summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/sdk')
-rw-r--r--third_party/libwebrtc/sdk/BUILD.gn1704
-rw-r--r--third_party/libwebrtc/sdk/OWNERS1
-rw-r--r--third_party/libwebrtc/sdk/android/AndroidManifest.xml14
-rw-r--r--third_party/libwebrtc/sdk/android/BUILD.gn1749
-rw-r--r--third_party/libwebrtc/sdk/android/OWNERS9
-rw-r--r--third_party/libwebrtc/sdk/android/README21
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AddIceObserver.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java21
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java21
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioSource.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioTrack.java32
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java23
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java23
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java41
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Capturer.java33
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java190
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Capturer.java36
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java260
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java206
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerator.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java172
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CapturerObserver.java27
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CryptoOptions.java145
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/DataChannel.java196
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Dav1dDecoder.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java69
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/DtmfSender.java96
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase.java255
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase10.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase14.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EglRenderer.java787
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EncodedImage.java183
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java22
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java201
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/FrameDecryptor.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/FrameEncryptor.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/GlRectDrawer.java31
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/GlShader.java131
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java122
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/GlUtil.java66
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java57
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/IceCandidateErrorEvent.java43
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java200
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Decoder.java22
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Encoder.java25
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java25
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java22
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java27
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/MediaConstraints.java99
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/MediaSource.java74
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java129
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Metrics.java81
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java24
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/NetEqFactoryFactory.java21
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/OWNERS3
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java39
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Predicate.java73
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/RefCounted.java28
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/RendererCommon.java259
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SSLCertificateVerifier.java27
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java212
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SdpObserver.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SessionDescription.java56
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java57
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java54
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/StatsObserver.java17
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/StatsReport.java63
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java160
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java390
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java300
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/TextureBufferImpl.java202
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/TimestampAligner.java59
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/TurnCustomizer.java41
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCapturer.java53
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java86
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java42
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoder.java94
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java30
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java31
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoder.java385
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java72
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java36
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java162
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrame.java208
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameBufferType.java33
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java241
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoProcessor.java76
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSink.java23
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSource.java162
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoTrack.java76
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoDecoder.java38
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoEncoder.java49
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/YuvConverter.java252
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/YuvHelper.java200
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java38
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java436
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java46
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/AndroidManifest.xml38
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/ant.properties18
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/build.xml92
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/loggable_test.cc31
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/project.properties16
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java200
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java54
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java205
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java208
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java334
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java793
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java109
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java366
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java129
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java318
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java507
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/LoggableTest.java161
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java411
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java1641
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java65
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java215
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java150
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java70
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java77
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpTransceiverTest.java67
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java518
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java241
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TestConstants.java15
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TimestampAlignerTest.java43
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java88
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java530
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoTrackTest.java112
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java31
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java207
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m5
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/video_frame_buffer_test.cc45
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/DEPS4
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc155
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/audio_device_module/audio_device_android.h40
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/base/init.cc24
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/base/init.h23
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/codecs/wrapper.cc49
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/codecs/wrapper.h48
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/jni/class_loader.cc80
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/jni/class_loader.h40
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/jni/java_types.cc355
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/jni/java_types.h366
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/jni/jni_int_wrapper.h59
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/jni/jvm.cc21
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/jni/jvm.h21
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/jni/scoped_java_ref.h226
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/network_monitor/network_monitor.cc31
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/network_monitor/network_monitor.h36
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/peerconnection/peer_connection_factory.cc33
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/peerconnection/peer_connection_factory.h34
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/stacktrace/stacktrace.cc286
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/stacktrace/stacktrace.h45
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/video/video_source.cc115
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/video/video_source.h41
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/video/wrapper.cc33
-rw-r--r--third_party/libwebrtc/sdk/android/native_api/video/wrapper.h36
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/DEPS5
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/android_network_monitor_unittest.cc330
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.cc24
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.h23
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/audio_device/audio_device_unittest.cc1161
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/codecs/DEPS3
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/codecs/wrapper_unittest.cc57
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/java_types_unittest.cc76
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/ApplicationContextProvider.java20
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/BuildInfo.java59
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java31
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/FakeVideoEncoder.java60
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaTypesTestHelper.java25
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaVideoSourceTestHelper.java30
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java33
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/peerconnection/DEPS6
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc115
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc275
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/test_jni_onload.cc23
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/video/video_source_unittest.cc175
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java684
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java38
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java31
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java29
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java33
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java340
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java428
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java458
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java72
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java98
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java365
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java271
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java17
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java26
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java281
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java52
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java763
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java39
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java28
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java23
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java129
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java139
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java55
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java22
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java115
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java73
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java69
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java99
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java53
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java51
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java63
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java29
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java27
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java46
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java27
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java110
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java81
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java83
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java227
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java122
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java743
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java585
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java308
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/DEPS15
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/OWNERS4
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc50
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc53
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc686
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h198
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc167
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h98
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS4
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS1
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc247
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h154
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc234
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h134
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc501
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h129
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h32
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc650
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h102
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc267
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h140
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc271
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h129
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc144
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h92
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc446
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h199
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc445
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h193
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc25
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc23
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc117
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/encoded_image.h45
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc63
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_common.cc45
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc80
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h168
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h80
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc39
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jvm.cc133
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jvm.h32
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc29
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc25
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc42
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h43
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc29
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h29
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc80
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc72
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc39
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h38
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h12
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc23
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/audio.h27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc26
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc73
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc43
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h30
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc155
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc55
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc259
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h89
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc59
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h30
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc24
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc152
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h54
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc67
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h31
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc31
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h60
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc917
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h141
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc550
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h33
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc59
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h33
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc161
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h41
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc211
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h35
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc127
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h41
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc114
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h29
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc176
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h46
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc81
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h69
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc48
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h36
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc44
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h41
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc74
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h36
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc35
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/video.cc55
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/video.h45
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc38
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h49
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc46
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc37
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h31
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc25
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h41
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc39
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc273
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h117
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc130
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc39
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc490
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h133
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_frame.cc319
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_frame.h43
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_sink.cc32
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_sink.h36
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_track.cc49
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc30
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc38
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc40
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h31
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc158
-rw-r--r--third_party/libwebrtc/sdk/android/tests/resources/robolectric.properties1
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/AndroidVideoDecoderTest.java432
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CameraEnumerationTest.java48
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CodecTestHelper.java62
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CryptoOptionsTest.java74
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/FakeMediaCodecWrapper.java321
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/FramerateBitrateAdjusterTest.java46
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/GlGenericDrawerTest.java160
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java370
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/IceCandidateTest.java52
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/RefCountDelegateTest.java83
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/ScalingSettingsTest.java29
-rw-r--r--third_party/libwebrtc/sdk/android/tests/src/org/webrtc/audio/LowLatencyAudioBufferManagerTest.java104
-rw-r--r--third_party/libwebrtc/sdk/base_objc_gn/moz.build74
-rw-r--r--third_party/libwebrtc/sdk/helpers_objc_gn/moz.build70
-rw-r--r--third_party/libwebrtc/sdk/media_constraints.cc259
-rw-r--r--third_party/libwebrtc/sdk/media_constraints.h131
-rw-r--r--third_party/libwebrtc/sdk/media_constraints_unittest.cc69
-rw-r--r--third_party/libwebrtc/sdk/objc/DEPS18
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h12
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/api/audio_device_module.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/api/video_decoder_factory.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/api/video_encoder_factory.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/api/video_frame_buffer.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_decoder_factory.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_encoder_factory.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Info.plist26
-rw-r--r--third_party/libwebrtc/sdk/objc/OWNERS9
-rw-r--r--third_party/libwebrtc/sdk/objc/README.md37
-rw-r--r--third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter+Private.h41
-rw-r--r--third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.mm67
-rw-r--r--third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.h41
-rw-r--r--third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.mm151
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource+Private.h34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.mm52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h31
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.h28
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.mm67
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.h44
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.mm72
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Native.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Private.h79
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.h273
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm549
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.h63
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.mm33
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel+Private.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.h132
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.mm220
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm87
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.h71
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.mm74
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm130
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm58
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.h74
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.mm170
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h36
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.h49
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.mm76
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.mm42
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer+Private.h31
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.h114
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.mm196
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h37
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm60
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.h46
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.mm90
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource+Private.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.h34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.mm82
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream+Private.h37
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.h49
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.mm157
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h62
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h50
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm161
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.mm34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h48
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm43
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h143
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm102
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.h398
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm939
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h85
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h35
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h105
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm322
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h21
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm49
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h48
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm72
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h38
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm56
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.h30
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.mm40
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h73
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm113
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h76
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm128
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm43
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.h58
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.mm121
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.h86
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.mm159
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Native.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Private.h31
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.h54
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.mm132
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h46
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.h137
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm190
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.h20
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.mm26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.h48
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.mm103
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h19
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.h55
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.mm193
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.h21
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.mm29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm38
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource+Private.h51
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.h37
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.mm92
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h30
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.h38
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.mm126
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.h17
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm18
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.mm27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm39
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm31
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm39
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm62
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm86
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm138
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm31
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCCodecSpecificInfo.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m29
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCI420Buffer.h22
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCLogging.h67
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCLogging.mm48
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCMacros.h62
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCMutableI420Buffer.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCMutableYUVPlanarBuffer.h28
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCSSLCertificateVerifier.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.h35
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m24
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.h36
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m65
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoDecoder.h40
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoDecoderFactory.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoder.h59
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderFactory.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.h28
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m26
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m25
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.h86
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm78
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoFrameBuffer.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoRenderer.h43
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCYUVPlanarBuffer.h46
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Configuration.mm176
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Private.h95
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.h265
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.mm1000
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.h48
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.m133
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm89
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.h56
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m535
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.h51
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m215
-rw-r--r--third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.mm126
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h17
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm170
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m122
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm164
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h22
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm164
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h61
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm328
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h44
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m265
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm207
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m59
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h45
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m295
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm157
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m199
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m113
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h37
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h21
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm189
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h39
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm29
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m85
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h31
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m102
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h60
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm120
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m49
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm276
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m49
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h22
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm819
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.h19
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.mm205
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/helpers.cc90
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/helpers.h47
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.cc327
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.h113
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm352
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm51
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.h34
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm45
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.h30
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.m123
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher+Private.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.h46
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m65
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.h111
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.mm171
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/noop.mm13
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/scoped_cftyperef.h116
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/audio_device_module.h30
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/audio_device_module.mm29
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.mm30
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.mm48
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_capturer.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_capturer.mm35
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.mm24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.mm24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_frame.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_frame.mm21
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.mm28
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_renderer.h28
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_renderer.mm24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.h308
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.mm1165
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.h143
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.mm669
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_session_observer.h41
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/helpers.h76
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/helpers.mm109
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.h141
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.mm488
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/network_monitor_observer.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.h50
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.mm86
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.h67
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.mm95
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.h41
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.mm123
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.h44
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.mm209
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.mm28
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.h39
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.mm38
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.h59
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.mm131
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm469
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm593
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCAudioDevice_xctest.mm115
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCAudioSessionTest.mm324
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm308
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCCallbackLogger_xctest.m244
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm569
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCCertificateTest.mm73
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCConfigurationTest.mm162
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm51
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m30
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCEncodedImage_xctest.mm55
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm114
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m48
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCIceCandidateTest.mm60
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCIceServerTest.mm136
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCMTLVideoView_xctest.m298
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCMediaConstraintsTest.mm58
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCNV12TextureCache_xctest.m55
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm72
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m381
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionTest.mm204
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCSessionDescriptionTest.mm122
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCTracingTest.mm41
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/audio_short16.pcmbin0 -> 643632 bytes
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/audio_short44.pcmbin0 -> 1774010 bytes
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/audio_short48.pcmbin0 -> 1930896 bytes
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/avformatmappertests.mm243
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/foreman.mp4bin0 -> 546651 bytes
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.h22
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.mm126
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/main.mm24
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/nalu_rewriter_xctest.mm374
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/objc_video_decoder_factory_tests.mm107
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/objc_video_encoder_factory_tests.mm148
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/scoped_cftyperef_tests.mm113
-rw-r--r--third_party/libwebrtc/sdk/videocapture_objc_gn/moz.build65
-rw-r--r--third_party/libwebrtc/sdk/videoframebuffer_objc_gn/moz.build68
729 files changed, 82268 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/BUILD.gn b/third_party/libwebrtc/sdk/BUILD.gn
new file mode 100644
index 0000000000..97ce0c49da
--- /dev/null
+++ b/third_party/libwebrtc/sdk/BUILD.gn
@@ -0,0 +1,1704 @@
+# Copyright 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//third_party/libaom/options.gni")
+import("../webrtc.gni")
+if (is_ios) {
+ import("//build/config/ios/ios_sdk.gni")
+ import("//build/config/ios/rules.gni")
+}
+if (is_mac) {
+ import("//build/config/mac/rules.gni")
+}
+
+if (!build_with_mozilla) {
+group("sdk") {
+ public_deps = []
+ if (!build_with_chromium) {
+ if (is_android) {
+ public_deps += [ "android" ]
+ }
+ if (is_ios) {
+ public_deps += [ ":framework_objc" ]
+ }
+ }
+}
+
+rtc_library("media_constraints") {
+ sources = [
+ "media_constraints.cc",
+ "media_constraints.h",
+ ]
+ deps = [
+ "../api:audio_options_api",
+ "../api:libjingle_peerconnection_api",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("sdk_tests") {
+ testonly = true
+ sources = [ "media_constraints_unittest.cc" ]
+ deps = [
+ ":media_constraints",
+ "../test:test_support",
+ ]
+}
+}
+
+if (is_ios || is_mac) {
+ config("common_config_objc") {
+ include_dirs = [
+ "objc",
+
+ # This is needed so that framework headers can include base headers
+ # without pathname (so it works from within the framework module).
+ "objc/base",
+ ]
+ cflags = [
+ "-Wimplicit-retain-self",
+ "-Wstrict-overflow",
+ "-Wmissing-field-initializers",
+ ]
+
+ if (use_clang_coverage) {
+ configs = [ "//build/config/coverage:default_coverage" ]
+ }
+ }
+
+ config("used_from_extension") {
+ if (is_ios && rtc_apprtcmobile_broadcast_extension) {
+ cflags = [ "-fapplication-extension" ]
+ }
+ }
+
+ # TODO(bugs.webrtc.org/9627): Remove this when unused. Targets should depend on base_objc
+ # or helpers_objc directly instead.
+ rtc_library("common_objc") {
+ visibility = [ "*" ]
+
+ sources = [ "objc/helpers/noop.mm" ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ ]
+ }
+
+ rtc_library("base_objc") {
+ visibility = [ "*" ]
+ sources = [
+ "objc/base/RTCCodecSpecificInfo.h",
+ "objc/base/RTCEncodedImage.h",
+ "objc/base/RTCEncodedImage.m",
+ "objc/base/RTCI420Buffer.h",
+ "objc/base/RTCLogging.h",
+ "objc/base/RTCLogging.mm",
+ "objc/base/RTCMacros.h",
+ "objc/base/RTCMutableI420Buffer.h",
+ "objc/base/RTCMutableYUVPlanarBuffer.h",
+ "objc/base/RTCSSLCertificateVerifier.h",
+ "objc/base/RTCVideoCapturer.h",
+ "objc/base/RTCVideoCapturer.m",
+ "objc/base/RTCVideoCodecInfo.h",
+ "objc/base/RTCVideoCodecInfo.m",
+ "objc/base/RTCVideoDecoder.h",
+ "objc/base/RTCVideoDecoderFactory.h",
+ "objc/base/RTCVideoEncoder.h",
+ "objc/base/RTCVideoEncoderFactory.h",
+ "objc/base/RTCVideoEncoderQpThresholds.h",
+ "objc/base/RTCVideoEncoderQpThresholds.m",
+ "objc/base/RTCVideoEncoderSettings.h",
+ "objc/base/RTCVideoEncoderSettings.m",
+ "objc/base/RTCVideoFrame.h",
+ "objc/base/RTCVideoFrame.mm",
+ "objc/base/RTCVideoFrameBuffer.h",
+ "objc/base/RTCVideoRenderer.h",
+ "objc/base/RTCYUVPlanarBuffer.h",
+ ]
+
+ deps = [
+ "../rtc_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ ]
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+
+ public_configs = [ ":common_config_objc" ]
+ }
+
+ rtc_library("helpers_objc") {
+ sources = [
+ "objc/helpers/AVCaptureSession+DevicePosition.h",
+ "objc/helpers/AVCaptureSession+DevicePosition.mm",
+ "objc/helpers/NSString+StdString.h",
+ "objc/helpers/NSString+StdString.mm",
+ "objc/helpers/RTCDispatcher+Private.h",
+ "objc/helpers/RTCDispatcher.h",
+ "objc/helpers/RTCDispatcher.m",
+ "objc/helpers/scoped_cftyperef.h",
+ ]
+
+ deps = [
+ ":base_objc",
+ "../rtc_base:checks",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+
+ frameworks = [
+ "AVFoundation.framework",
+ "CoreMedia.framework",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ if (is_ios) {
+ sources += [
+ "objc/helpers/RTCCameraPreviewView.h",
+ "objc/helpers/RTCCameraPreviewView.m",
+ "objc/helpers/UIDevice+RTCDevice.h",
+ "objc/helpers/UIDevice+RTCDevice.mm",
+ ]
+ }
+ }
+
+ if (!build_with_chromium && !build_with_mozilla) {
+ rtc_library("callback_logger_objc") {
+ sources = [
+ "objc/api/logging/RTCCallbackLogger.h",
+ "objc/api/logging/RTCCallbackLogger.mm",
+ ]
+
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ "../rtc_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("file_logger_objc") {
+ sources = [
+ "objc/api/peerconnection/RTCFileLogger.h",
+ "objc/api/peerconnection/RTCFileLogger.mm",
+ ]
+
+ deps = [
+ ":base_objc",
+ "../rtc_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+ }
+ }
+
+ if (!build_with_chromium) {
+ if (is_ios) {
+ rtc_library("native_api_audio_device_module") {
+ visibility = [ "*" ]
+
+ sources = [
+ "objc/native/api/audio_device_module.h",
+ "objc/native/api/audio_device_module.mm",
+ ]
+
+ deps = [
+ ":audio_device",
+ "../api:make_ref_counted",
+ "../modules/audio_device:audio_device_api",
+ "../modules/audio_device:audio_device_generic",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../system_wrappers",
+ ]
+ }
+
+ rtc_source_set("audio_session_observer") {
+ visibility = [ ":*" ]
+
+ sources = [ "objc/native/src/audio/audio_session_observer.h" ]
+
+ deps = [
+ "../rtc_base",
+ "../rtc_base:threading",
+ ]
+ }
+
+ rtc_library("audio_device") {
+ visibility = [ "*" ]
+
+ sources = [
+ "objc/native/src/audio/audio_device_ios.h",
+ "objc/native/src/audio/audio_device_ios.mm",
+ "objc/native/src/audio/audio_device_module_ios.h",
+ "objc/native/src/audio/audio_device_module_ios.mm",
+ "objc/native/src/audio/helpers.h",
+ "objc/native/src/audio/helpers.mm",
+ "objc/native/src/audio/voice_processing_audio_unit.h",
+ "objc/native/src/audio/voice_processing_audio_unit.mm",
+ ]
+
+ deps = [
+ ":audio_objc",
+ ":audio_session_observer",
+ ":base_objc",
+ "../api:array_view",
+ "../api:sequence_checker",
+ "../api/task_queue",
+ "../api/task_queue:default_task_queue_factory",
+ "../modules/audio_device:audio_device_api",
+ "../modules/audio_device:audio_device_buffer",
+ "../modules/audio_device:audio_device_generic",
+ "../rtc_base",
+ "../rtc_base:buffer",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:refcount",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../system_wrappers:field_trial",
+ "../system_wrappers:metrics",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
+
+ frameworks = [ "AudioToolbox.framework" ]
+ }
+
+ # This target exists to expose :audio_session_objc and
+ # :audio_session_delegate_adapter_objc for backward compatibility,
+ # and should be deprecated.
+ group("audio_objc") {
+ public_deps = [ # no-presubmit-check TODO(webrtc:11238)
+ ":audio_session_delegate_adapter_objc",
+ ":audio_session_objc",
+ ]
+ }
+
+ rtc_library("audio_session_delegate_adapter_objc") {
+ sources = [
+ "objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h",
+ "objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":audio_session_objc",
+ ":audio_session_observer",
+ ":base_objc",
+ ]
+ }
+
+ rtc_library("audio_session_objc") {
+ visibility = [ "*" ]
+
+ sources = [
+ "objc/components/audio/RTCAudioSession+Configuration.mm",
+ "objc/components/audio/RTCAudioSession+Private.h",
+ "objc/components/audio/RTCAudioSession.h",
+ "objc/components/audio/RTCAudioSession.mm",
+ "objc/components/audio/RTCAudioSessionConfiguration.h",
+ "objc/components/audio/RTCAudioSessionConfiguration.m",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ frameworks = [ "AVFoundation.framework" ]
+
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ "../rtc_base",
+ "../rtc_base:checks",
+ "../rtc_base/synchronization:mutex",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
+ }
+
+ rtc_source_set("network_monitor_observer") {
+ visibility = [ ":*" ]
+
+ sources = [ "objc/native/src/network_monitor_observer.h" ]
+
+ deps = [
+ "../rtc_base",
+ "../rtc_base:network_constants",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("network_monitor_objc") {
+ visibility = [ "*" ]
+
+ sources = [
+ "objc/components/network/RTCNetworkMonitor+Private.h",
+ "objc/components/network/RTCNetworkMonitor.h",
+ "objc/components/network/RTCNetworkMonitor.mm",
+ ]
+
+ configs += [ ":used_from_extension" ]
+
+ frameworks = [ "Network.framework" ]
+
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ ":network_monitor_observer",
+ "../rtc_base:stringutils",
+ "../rtc_base/system:gcd_helpers",
+ ]
+ }
+ }
+
+ if (!build_with_mozilla) {
+ rtc_library("videosource_objc") {
+ sources = [
+ "objc/api/peerconnection/RTCVideoSource+Private.h",
+ "objc/api/peerconnection/RTCVideoSource.h",
+ "objc/api/peerconnection/RTCVideoSource.mm",
+ ]
+
+ deps = [
+ ":base_objc",
+ ":mediasource_objc",
+ ":native_video",
+ ":videoframebuffer_objc",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../common_video",
+ "../media:rtc_media_base",
+ "../pc:video_track_source_proxy",
+ "../rtc_base",
+ "../rtc_base:checks",
+ "../rtc_base:threading",
+ "//third_party/libyuv",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+ }
+ }
+
+ rtc_library("videoframebuffer_objc") {
+ visibility = [ "*" ]
+ sources = [
+ "objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h",
+ "objc/api/video_frame_buffer/RTCNativeI420Buffer.h",
+ "objc/api/video_frame_buffer/RTCNativeI420Buffer.mm",
+ "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h",
+ "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm",
+ "objc/components/video_frame_buffer/RTCCVPixelBuffer.h",
+ "objc/components/video_frame_buffer/RTCCVPixelBuffer.mm",
+ ]
+ deps = [
+ ":base_objc",
+ "../rtc_base:logging",
+ "//api/video:video_frame",
+ "//api/video:video_rtp_headers",
+ "//common_video",
+ "//rtc_base:checks",
+ "//third_party/libyuv",
+ ]
+ if (build_with_mozilla) {
+ deps -= [ "//third_party/libyuv" ]
+ include_dirs = [
+ "/media/libyuv",
+ "/media/libyuv/libyuv/include",
+ ]
+ }
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+ frameworks = [
+ "VideoToolbox.framework",
+ "CoreGraphics.framework",
+ "CoreVideo.framework",
+ ]
+ }
+
+ if (!build_with_mozilla) {
+ rtc_library("opengl_objc") {
+ sources = [
+ "objc/components/renderer/opengl/RTCDefaultShader.h",
+ "objc/components/renderer/opengl/RTCDefaultShader.mm",
+ "objc/components/renderer/opengl/RTCI420TextureCache.h",
+ "objc/components/renderer/opengl/RTCI420TextureCache.mm",
+ "objc/components/renderer/opengl/RTCOpenGLDefines.h",
+ "objc/components/renderer/opengl/RTCShader.h",
+ "objc/components/renderer/opengl/RTCShader.mm",
+ "objc/components/renderer/opengl/RTCVideoViewShading.h",
+ ]
+ frameworks = [ "CoreVideo.framework" ]
+ if (is_ios) {
+ sources += [
+ "objc/components/renderer/opengl/RTCNV12TextureCache.h",
+ "objc/components/renderer/opengl/RTCNV12TextureCache.m",
+ ]
+ frameworks += [
+ "GLKit.framework",
+ "OpenGLES.framework",
+ "QuartzCore.framework",
+ ]
+ } else if (is_mac) {
+ frameworks += [
+ "CoreMedia.framework",
+ "OpenGL.framework",
+ ]
+ }
+
+ # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning
+ # workaround.
+ defines = [ "GLES_SILENCE_DEPRECATION" ]
+
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ ":mediaconstraints_objc",
+ ":native_video",
+ ":videoframebuffer_objc",
+ ":videosource_objc",
+ "../api:libjingle_peerconnection_api",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../common_video",
+ "../media:rtc_media_base",
+ "../rtc_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+ }
+
+ rtc_library("opengl_ui_objc") {
+ visibility = [ "*" ]
+ allow_poison = [
+ "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
+ "default_task_queue",
+ ]
+ if (is_ios) {
+ sources = [
+ "objc/components/renderer/opengl/RTCDisplayLinkTimer.h",
+ "objc/components/renderer/opengl/RTCDisplayLinkTimer.m",
+ "objc/components/renderer/opengl/RTCEAGLVideoView.h",
+ "objc/components/renderer/opengl/RTCEAGLVideoView.m",
+ ]
+
+ # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning
+ # workaround.
+ defines = [ "GLES_SILENCE_DEPRECATION" ]
+ }
+ if (is_mac) {
+ sources = [
+ "objc/components/renderer/opengl/RTCNSGLVideoView.h",
+ "objc/components/renderer/opengl/RTCNSGLVideoView.m",
+ ]
+ }
+ configs += [ "..:common_objc" ]
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ ":metal_objc",
+ ":opengl_objc",
+ ":videocapture_objc",
+ ":videoframebuffer_objc",
+ ]
+ }
+
+ rtc_library("metal_objc") {
+ visibility = [ "*" ]
+ allow_poison = [
+ "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
+ "default_task_queue",
+ ]
+ sources = [
+ "objc/components/renderer/metal/RTCMTLI420Renderer.h",
+ "objc/components/renderer/metal/RTCMTLI420Renderer.mm",
+ "objc/components/renderer/metal/RTCMTLNV12Renderer.h",
+ "objc/components/renderer/metal/RTCMTLNV12Renderer.mm",
+ "objc/components/renderer/metal/RTCMTLRGBRenderer.h",
+ "objc/components/renderer/metal/RTCMTLRGBRenderer.mm",
+ "objc/components/renderer/metal/RTCMTLRenderer+Private.h",
+ "objc/components/renderer/metal/RTCMTLRenderer.h",
+ "objc/components/renderer/metal/RTCMTLRenderer.mm",
+ ]
+ frameworks = [
+ "CoreVideo.framework",
+ "Metal.framework",
+ "MetalKit.framework",
+ ]
+ if (is_ios) {
+ sources += [
+ "objc/components/renderer/metal/RTCMTLVideoView.h",
+ "objc/components/renderer/metal/RTCMTLVideoView.m",
+ ]
+ }
+ if (is_mac) {
+ sources += [
+ "objc/components/renderer/metal/RTCMTLNSVideoView.h",
+ "objc/components/renderer/metal/RTCMTLNSVideoView.m",
+ ]
+ frameworks += [ "AppKit.framework" ]
+ }
+ deps = [
+ ":base_objc",
+ ":peerconnectionfactory_base_objc",
+ ":videoframebuffer_objc",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../rtc_base:checks",
+ ]
+ configs += [ "..:common_objc" ]
+ public_configs = [ ":common_config_objc" ]
+ }
+
+ # TODO(bugs.webrtc.org/9627): Remove this target.
+ rtc_library("videocapturebase_objc") {
+ visibility = [ "*" ]
+ sources = [ "objc/helpers/noop.mm" ]
+
+ configs += [ "..:common_objc" ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_objc",
+ ":videoframebuffer_objc",
+ ]
+ }
+ }
+
+ rtc_library("videocapture_objc") {
+ visibility = [ "*" ]
+ allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
+ sources = [
+ "objc/components/capturer/RTCCameraVideoCapturer.h",
+ "objc/components/capturer/RTCCameraVideoCapturer.m",
+ "objc/components/capturer/RTCFileVideoCapturer.h",
+ "objc/components/capturer/RTCFileVideoCapturer.m",
+ ]
+ frameworks = [
+ "AVFoundation.framework",
+ "CoreVideo.framework",
+ "QuartzCore.framework",
+ ]
+
+ configs += [ "..:common_objc" ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ ":videoframebuffer_objc",
+ "../rtc_base/system:gcd_helpers",
+ ]
+ }
+
+ if (!build_with_mozilla) {
+ rtc_library("videocodec_objc") {
+ visibility = [ "*" ]
+ configs += [ "..:no_global_constructors" ]
+ sources = [
+ "objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h",
+ "objc/components/video_codec/RTCCodecSpecificInfoH264.h",
+ "objc/components/video_codec/RTCCodecSpecificInfoH264.mm",
+ "objc/components/video_codec/RTCH264ProfileLevelId.h",
+ "objc/components/video_codec/RTCH264ProfileLevelId.mm",
+ ]
+ if (is_ios) {
+ sources += [
+ "objc/components/video_codec/UIDevice+H264Profile.h",
+ "objc/components/video_codec/UIDevice+H264Profile.mm",
+ ]
+ }
+
+ public_configs = [ ":common_config_objc" ]
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../media:rtc_media_base",
+ "../modules/video_coding:video_codec_interface",
+ "../rtc_base:checks",
+ ]
+ }
+
+ rtc_library("default_codec_factory_objc") {
+ sources = [
+ "objc/components/video_codec/RTCDefaultVideoDecoderFactory.h",
+ "objc/components/video_codec/RTCDefaultVideoDecoderFactory.m",
+ "objc/components/video_codec/RTCDefaultVideoEncoderFactory.h",
+ "objc/components/video_codec/RTCDefaultVideoEncoderFactory.m",
+ ]
+
+ deps = [
+ ":base_objc",
+ ":native_video",
+ ":videocodec_objc",
+ ":videotoolbox_objc",
+ ":vp8",
+ ":vp9",
+ ":vpx_codec_constants",
+ ]
+
+ defines = []
+ if (enable_libaom) {
+ defines += [ "RTC_USE_LIBAOM_AV1_ENCODER" ]
+ deps += [ ":libaom_av1_encoder" ]
+ }
+
+ if (rtc_include_dav1d_in_internal_decoder_factory) {
+ deps += [ ":dav1d_decoder" ]
+ }
+ }
+
+ rtc_library("vpx_codec_constants") {
+ configs += [ "..:no_global_constructors" ]
+ sources = [
+ "objc/api/video_codec/RTCVideoCodecConstants.h",
+ "objc/api/video_codec/RTCVideoCodecConstants.mm",
+ ]
+
+ deps = [
+ ":base_objc",
+ "../media:rtc_media_base",
+ ]
+ }
+
+ rtc_library("vp8") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [
+ "objc/api/video_codec/RTCVideoDecoderVP8.h",
+ "objc/api/video_codec/RTCVideoDecoderVP8.mm",
+ "objc/api/video_codec/RTCVideoEncoderVP8.h",
+ "objc/api/video_codec/RTCVideoEncoderVP8.mm",
+ ]
+
+ deps = [
+ ":base_objc",
+ ":wrapped_native_codec_objc",
+ "../modules/video_coding:webrtc_vp8",
+ ]
+ }
+
+ rtc_library("vp9") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [
+ "objc/api/video_codec/RTCVideoDecoderVP9.h",
+ "objc/api/video_codec/RTCVideoDecoderVP9.mm",
+ "objc/api/video_codec/RTCVideoEncoderVP9.h",
+ "objc/api/video_codec/RTCVideoEncoderVP9.mm",
+ ]
+
+ deps = [
+ ":base_objc",
+ ":wrapped_native_codec_objc",
+ "../media:rtc_media_base",
+ "../modules/video_coding:webrtc_vp9",
+ ]
+ }
+
+ rtc_library("dav1d_decoder") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [
+ "objc/api/video_codec/RTCVideoDecoderAV1.h",
+ "objc/api/video_codec/RTCVideoDecoderAV1.mm",
+ ]
+
+ deps = [
+ ":base_objc",
+ ":wrapped_native_codec_objc",
+ "../media:rtc_media_base",
+ "../modules/video_coding/codecs/av1:dav1d_decoder",
+ ]
+ }
+
+ rtc_library("libaom_av1_encoder") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [
+ "objc/api/video_codec/RTCVideoEncoderAV1.h",
+ "objc/api/video_codec/RTCVideoEncoderAV1.mm",
+ ]
+
+ deps = [
+ ":base_objc",
+ ":wrapped_native_codec_objc",
+ "../media:rtc_media_base",
+ "../modules/video_coding/codecs/av1:libaom_av1_encoder",
+ ]
+ }
+
+ rtc_library("mediaconstraints_objc") {
+ configs += [ "..:no_global_constructors" ]
+ sources = [
+ "objc/api/peerconnection/RTCMediaConstraints+Private.h",
+ "objc/api/peerconnection/RTCMediaConstraints.h",
+ "objc/api/peerconnection/RTCMediaConstraints.mm",
+ ]
+
+ public_configs = [ ":common_config_objc" ]
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ ":media_constraints",
+ ]
+ }
+
+ # TODO(bugs.webrtc.org/9627): Remove, targets should depend on base_objc.
+ rtc_library("videorenderer_objc") {
+ visibility = [ "*" ]
+ sources = [ "objc/helpers/noop.mm" ]
+
+ configs += [ "..:common_objc" ]
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [ ":base_objc" ]
+ }
+
+ rtc_library("videorendereradapter_objc") {
+ visibility = [ "*" ]
+ allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
+ sources = [
+ "objc/api/RTCVideoRendererAdapter+Private.h",
+ "objc/api/RTCVideoRendererAdapter.h",
+ "objc/api/RTCVideoRendererAdapter.mm",
+ ]
+
+ configs += [ "..:common_objc" ]
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_objc",
+ ":native_api",
+ ":videoframebuffer_objc",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ ]
+ }
+
+ rtc_library("mediasource_objc") {
+ sources = [
+ "objc/api/peerconnection/RTCMediaSource+Private.h",
+ "objc/api/peerconnection/RTCMediaSource.h",
+ "objc/api/peerconnection/RTCMediaSource.mm",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_objc",
+ "../api:media_stream_interface",
+ "../rtc_base:checks",
+ ]
+ }
+
+ rtc_library("base_native_additions_objc") {
+ sources = [
+ "objc/api/peerconnection/RTCEncodedImage+Private.h",
+ "objc/api/peerconnection/RTCEncodedImage+Private.mm",
+ "objc/api/peerconnection/RTCVideoCodecInfo+Private.h",
+ "objc/api/peerconnection/RTCVideoCodecInfo+Private.mm",
+ "objc/api/peerconnection/RTCVideoEncoderSettings+Private.h",
+ "objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm",
+ ]
+
+ configs += [ "..:common_objc" ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ "../api/video:encoded_image",
+ "../api/video_codecs:video_codecs_api",
+ "../modules/video_coding:video_codec_interface",
+ "../rtc_base",
+ "../rtc_base:refcount",
+ "../rtc_base:safe_conversions",
+ ]
+ }
+
+ rtc_library("peerconnectionfactory_base_objc") {
+ visibility = [ "*" ]
+ allow_poison = [
+ "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
+ "default_task_queue",
+ ]
+ configs += [ "..:no_global_constructors" ]
+ sources = [
+ "objc/api/peerconnection/RTCAudioSource+Private.h",
+ "objc/api/peerconnection/RTCAudioSource.h",
+ "objc/api/peerconnection/RTCAudioSource.mm",
+ "objc/api/peerconnection/RTCAudioTrack+Private.h",
+ "objc/api/peerconnection/RTCAudioTrack.h",
+ "objc/api/peerconnection/RTCAudioTrack.mm",
+ "objc/api/peerconnection/RTCCertificate.h",
+ "objc/api/peerconnection/RTCCertificate.mm",
+ "objc/api/peerconnection/RTCConfiguration+Native.h",
+ "objc/api/peerconnection/RTCConfiguration+Private.h",
+ "objc/api/peerconnection/RTCConfiguration.h",
+ "objc/api/peerconnection/RTCConfiguration.mm",
+ "objc/api/peerconnection/RTCCryptoOptions.h",
+ "objc/api/peerconnection/RTCCryptoOptions.mm",
+ "objc/api/peerconnection/RTCDataChannel+Private.h",
+ "objc/api/peerconnection/RTCDataChannel.h",
+ "objc/api/peerconnection/RTCDataChannel.mm",
+ "objc/api/peerconnection/RTCDataChannelConfiguration+Private.h",
+ "objc/api/peerconnection/RTCDataChannelConfiguration.h",
+ "objc/api/peerconnection/RTCDataChannelConfiguration.mm",
+ "objc/api/peerconnection/RTCDtmfSender+Private.h",
+ "objc/api/peerconnection/RTCDtmfSender.h",
+ "objc/api/peerconnection/RTCDtmfSender.mm",
+ "objc/api/peerconnection/RTCFieldTrials.h",
+ "objc/api/peerconnection/RTCFieldTrials.mm",
+ "objc/api/peerconnection/RTCIceCandidate+Private.h",
+ "objc/api/peerconnection/RTCIceCandidate.h",
+ "objc/api/peerconnection/RTCIceCandidate.mm",
+ "objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h",
+ "objc/api/peerconnection/RTCIceCandidateErrorEvent.h",
+ "objc/api/peerconnection/RTCIceCandidateErrorEvent.mm",
+ "objc/api/peerconnection/RTCIceServer+Private.h",
+ "objc/api/peerconnection/RTCIceServer.h",
+ "objc/api/peerconnection/RTCIceServer.mm",
+ "objc/api/peerconnection/RTCLegacyStatsReport+Private.h",
+ "objc/api/peerconnection/RTCLegacyStatsReport.h",
+ "objc/api/peerconnection/RTCLegacyStatsReport.mm",
+ "objc/api/peerconnection/RTCMediaStream+Private.h",
+ "objc/api/peerconnection/RTCMediaStream.h",
+ "objc/api/peerconnection/RTCMediaStream.mm",
+ "objc/api/peerconnection/RTCMediaStreamTrack+Private.h",
+ "objc/api/peerconnection/RTCMediaStreamTrack.h",
+ "objc/api/peerconnection/RTCMediaStreamTrack.mm",
+ "objc/api/peerconnection/RTCMetrics.h",
+ "objc/api/peerconnection/RTCMetrics.mm",
+ "objc/api/peerconnection/RTCMetricsSampleInfo+Private.h",
+ "objc/api/peerconnection/RTCMetricsSampleInfo.h",
+ "objc/api/peerconnection/RTCMetricsSampleInfo.mm",
+ "objc/api/peerconnection/RTCPeerConnection+DataChannel.mm",
+ "objc/api/peerconnection/RTCPeerConnection+Private.h",
+ "objc/api/peerconnection/RTCPeerConnection+Stats.mm",
+ "objc/api/peerconnection/RTCPeerConnection.h",
+ "objc/api/peerconnection/RTCPeerConnection.mm",
+ "objc/api/peerconnection/RTCPeerConnectionFactory+Native.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactory+Private.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactory.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactory.mm",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm",
+ "objc/api/peerconnection/RTCRtcpParameters+Private.h",
+ "objc/api/peerconnection/RTCRtcpParameters.h",
+ "objc/api/peerconnection/RTCRtcpParameters.mm",
+ "objc/api/peerconnection/RTCRtpCodecParameters+Private.h",
+ "objc/api/peerconnection/RTCRtpCodecParameters.h",
+ "objc/api/peerconnection/RTCRtpCodecParameters.mm",
+ "objc/api/peerconnection/RTCRtpEncodingParameters+Private.h",
+ "objc/api/peerconnection/RTCRtpEncodingParameters.h",
+ "objc/api/peerconnection/RTCRtpEncodingParameters.mm",
+ "objc/api/peerconnection/RTCRtpHeaderExtension+Private.h",
+ "objc/api/peerconnection/RTCRtpHeaderExtension.h",
+ "objc/api/peerconnection/RTCRtpHeaderExtension.mm",
+ "objc/api/peerconnection/RTCRtpParameters+Private.h",
+ "objc/api/peerconnection/RTCRtpParameters.h",
+ "objc/api/peerconnection/RTCRtpParameters.mm",
+ "objc/api/peerconnection/RTCRtpReceiver+Native.h",
+ "objc/api/peerconnection/RTCRtpReceiver+Private.h",
+ "objc/api/peerconnection/RTCRtpReceiver.h",
+ "objc/api/peerconnection/RTCRtpReceiver.mm",
+ "objc/api/peerconnection/RTCRtpSender+Native.h",
+ "objc/api/peerconnection/RTCRtpSender+Private.h",
+ "objc/api/peerconnection/RTCRtpSender.h",
+ "objc/api/peerconnection/RTCRtpSender.mm",
+ "objc/api/peerconnection/RTCRtpTransceiver+Private.h",
+ "objc/api/peerconnection/RTCRtpTransceiver.h",
+ "objc/api/peerconnection/RTCRtpTransceiver.mm",
+ "objc/api/peerconnection/RTCSSLAdapter.h",
+ "objc/api/peerconnection/RTCSSLAdapter.mm",
+ "objc/api/peerconnection/RTCSessionDescription+Private.h",
+ "objc/api/peerconnection/RTCSessionDescription.h",
+ "objc/api/peerconnection/RTCSessionDescription.mm",
+ "objc/api/peerconnection/RTCStatisticsReport+Private.h",
+ "objc/api/peerconnection/RTCStatisticsReport.h",
+ "objc/api/peerconnection/RTCStatisticsReport.mm",
+ "objc/api/peerconnection/RTCTracing.h",
+ "objc/api/peerconnection/RTCTracing.mm",
+ "objc/api/peerconnection/RTCVideoTrack+Private.h",
+ "objc/api/peerconnection/RTCVideoTrack.h",
+ "objc/api/peerconnection/RTCVideoTrack.mm",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_native_additions_objc",
+ ":base_objc",
+ ":file_logger_objc",
+ ":helpers_objc",
+ ":mediaconstraints_objc",
+ ":mediasource_objc",
+ ":native_api",
+ ":native_video",
+ ":videoframebuffer_objc",
+ ":videorendereradapter_objc",
+ ":videosource_objc",
+ ":videotoolbox_objc",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtc_event_log_output_file",
+ "../api:rtc_stats_api",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/crypto:frame_decryptor_interface",
+ "../api/crypto:frame_encryptor_interface",
+ "../api/rtc_event_log:rtc_event_log_factory",
+ "../api/task_queue:default_task_queue_factory",
+ "../api/transport:field_trial_based_config",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../media:rtc_audio_video",
+ "../media:rtc_media_base",
+ "../modules/audio_device:audio_device_api",
+ "../modules/audio_processing",
+ "../modules/audio_processing:api",
+ "../modules/video_coding:video_codec_interface",
+ "../pc:peer_connection_factory",
+ "../pc:webrtc_sdp",
+ "../rtc_base",
+ "../rtc_base:checks",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:network_constants",
+ "../rtc_base:safe_conversions",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../stats:rtc_stats",
+ "../system_wrappers:field_trial",
+ "../system_wrappers:metrics",
+ ]
+
+ if (is_ios) {
+ deps += [ ":native_api_audio_device_module" ]
+ }
+ }
+
+ if (rtc_include_tests) {
+ if (is_ios) {
+ rtc_library("sdk_unittests_sources") {
+ testonly = true
+ include_dirs = [ "objc/" ]
+
+ sources = [
+ "objc/unittests/ObjCVideoTrackSource_xctest.mm",
+ "objc/unittests/RTCAudioSessionTest.mm",
+ "objc/unittests/RTCCVPixelBuffer_xctest.mm",
+ "objc/unittests/RTCCallbackLogger_xctest.m",
+ "objc/unittests/RTCCameraVideoCapturerTests.mm",
+ "objc/unittests/RTCCertificateTest.mm",
+ "objc/unittests/RTCConfigurationTest.mm",
+ "objc/unittests/RTCDataChannelConfigurationTest.mm",
+ "objc/unittests/RTCEncodedImage_xctest.mm",
+ "objc/unittests/RTCFileVideoCapturer_xctest.mm",
+ "objc/unittests/RTCH264ProfileLevelId_xctest.m",
+ "objc/unittests/RTCIceCandidateTest.mm",
+ "objc/unittests/RTCIceServerTest.mm",
+ "objc/unittests/RTCMTLVideoView_xctest.m",
+ "objc/unittests/RTCMediaConstraintsTest.mm",
+ "objc/unittests/RTCNV12TextureCache_xctest.m",
+ "objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm",
+ "objc/unittests/RTCPeerConnectionFactory_xctest.m",
+ "objc/unittests/RTCPeerConnectionTest.mm",
+ "objc/unittests/RTCSessionDescriptionTest.mm",
+ "objc/unittests/RTCTracingTest.mm",
+ "objc/unittests/frame_buffer_helpers.h",
+ "objc/unittests/frame_buffer_helpers.mm",
+ "objc/unittests/nalu_rewriter_xctest.mm",
+ "objc/unittests/objc_video_decoder_factory_tests.mm",
+ "objc/unittests/objc_video_encoder_factory_tests.mm",
+ "objc/unittests/scoped_cftyperef_tests.mm",
+ ]
+
+ # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning
+ # workaround.
+ defines = [ "GLES_SILENCE_DEPRECATION" ]
+
+ # TODO(peterhanspers): Reenable these tests on simulator.
+ # See bugs.webrtc.org/7812
+ if (target_environment != "simulator") {
+ sources += [
+ "objc/unittests/RTCAudioDeviceModule_xctest.mm",
+ "objc/unittests/RTCAudioDevice_xctest.mm",
+ ]
+ }
+
+ deps = [
+ ":audio_device",
+ ":audio_session_objc",
+ ":base_native_additions_objc",
+ ":base_objc",
+ ":callback_logger_objc",
+ ":framework_objc",
+ ":helpers_objc",
+ ":mediaconstraints_objc",
+ ":metal_objc",
+ ":native_api",
+ ":native_api_audio_device_module",
+ ":native_video",
+ ":peerconnectionfactory_base_objc",
+ ":video_toolbox_cc",
+ ":videocapture_objc",
+ ":videocodec_objc",
+ ":videoframebuffer_objc",
+ ":videosource_objc",
+ ":videotoolbox_objc",
+ "../api:scoped_refptr",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/task_queue:default_task_queue_factory",
+ "../api/video:video_frame",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../media:rtc_media_base",
+ "../media:rtc_media_tests_utils",
+ "../modules/audio_device:audio_device_api",
+ "../modules/audio_processing:api",
+ "../modules/video_coding:video_codec_interface",
+ "../rtc_base",
+ "../rtc_base:gunit_helpers",
+ "../rtc_base:macromagic",
+ "../rtc_base:refcount",
+ "../rtc_base:rtc_event",
+ "../rtc_base/system:unused",
+ "../system_wrappers",
+ "../test:test_support", # TODO(webrtc:8382): Remove use of gtest
+ "//third_party/libyuv",
+ ]
+
+ if (rtc_ios_macos_use_opengl_rendering) {
+ deps += [ ":opengl_objc" ]
+ }
+
+ public_deps = [
+ "//build/config/ios:xctest",
+ "//third_party/ocmock",
+ ]
+ }
+
+ bundle_data("sdk_unittests_bundle_data") {
+ sources = [
+ "objc/unittests/audio_short16.pcm",
+ "objc/unittests/audio_short44.pcm",
+ "objc/unittests/audio_short48.pcm",
+
+ # Sample video taken from https://media.xiph.org/video/derf/
+ "objc/unittests/foreman.mp4",
+ ]
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+
+ # These tests use static linking.
+ rtc_test("sdk_unittests") {
+ is_xctest = true
+ info_plist = "//test/ios/Info.plist"
+ sources = [ "objc/unittests/main.mm" ]
+
+ extra_substitutions = [ "GTEST_BUNDLE_ID_SUFFIX=generic-unit-test" ]
+ deps = [
+ ":peerconnectionfactory_base_objc",
+ ":sdk_unittests_bundle_data",
+ ":sdk_unittests_sources",
+ "../rtc_base",
+ "../rtc_base:threading",
+ "//test:test_support",
+ ]
+ ldflags = [ "-all_load" ]
+ }
+
+ # These tests link to the framework.
+ rtc_test("sdk_framework_unittests") {
+ is_xctest = true
+ info_plist = "//test/ios/Info.plist"
+ sources = [
+ "objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m",
+ "objc/unittests/main.mm",
+ ]
+
+ extra_substitutions = [ "GTEST_BUNDLE_ID_SUFFIX=generic-unit-test" ]
+ deps = [
+ ":framework_objc+link",
+ ":ios_framework_bundle",
+ "../rtc_base",
+ "../rtc_base:threading",
+ "//test:test_support",
+ ]
+ }
+ }
+ }
+
+ if (is_ios) {
+ apple_framework_bundle_with_umbrella_header("framework_objc") {
+ info_plist = "objc/Info.plist"
+ output_name = "WebRTC"
+
+ common_objc_headers = [
+ "objc/base/RTCCodecSpecificInfo.h",
+ "objc/base/RTCEncodedImage.h",
+ "objc/base/RTCI420Buffer.h",
+ "objc/base/RTCLogging.h",
+ "objc/base/RTCMacros.h",
+ "objc/base/RTCMutableI420Buffer.h",
+ "objc/base/RTCMutableYUVPlanarBuffer.h",
+ "objc/base/RTCSSLCertificateVerifier.h",
+ "objc/base/RTCVideoCapturer.h",
+ "objc/base/RTCVideoCodecInfo.h",
+ "objc/base/RTCVideoDecoder.h",
+ "objc/base/RTCVideoDecoderFactory.h",
+ "objc/base/RTCVideoEncoder.h",
+ "objc/base/RTCVideoEncoderFactory.h",
+ "objc/base/RTCVideoEncoderQpThresholds.h",
+ "objc/base/RTCVideoEncoderSettings.h",
+ "objc/base/RTCVideoFrame.h",
+ "objc/base/RTCVideoFrameBuffer.h",
+ "objc/base/RTCVideoRenderer.h",
+ "objc/base/RTCYUVPlanarBuffer.h",
+ "objc/components/audio/RTCAudioSession.h",
+ "objc/components/audio/RTCAudioSessionConfiguration.h",
+ "objc/components/capturer/RTCCameraVideoCapturer.h",
+ "objc/components/capturer/RTCFileVideoCapturer.h",
+ "objc/components/network/RTCNetworkMonitor.h",
+ "objc/components/renderer/metal/RTCMTLVideoView.h",
+ "objc/components/renderer/opengl/RTCEAGLVideoView.h",
+ "objc/components/renderer/opengl/RTCVideoViewShading.h",
+ "objc/components/video_codec/RTCCodecSpecificInfoH264.h",
+ "objc/components/video_codec/RTCDefaultVideoDecoderFactory.h",
+ "objc/components/video_codec/RTCDefaultVideoEncoderFactory.h",
+ "objc/components/video_codec/RTCH264ProfileLevelId.h",
+ "objc/components/video_codec/RTCVideoDecoderFactoryH264.h",
+ "objc/components/video_codec/RTCVideoDecoderH264.h",
+ "objc/components/video_codec/RTCVideoEncoderFactoryH264.h",
+ "objc/components/video_codec/RTCVideoEncoderH264.h",
+ "objc/components/video_frame_buffer/RTCCVPixelBuffer.h",
+ "objc/helpers/RTCCameraPreviewView.h",
+ "objc/helpers/RTCDispatcher.h",
+ "objc/helpers/UIDevice+RTCDevice.h",
+ "objc/api/peerconnection/RTCAudioSource.h",
+ "objc/api/peerconnection/RTCAudioTrack.h",
+ "objc/api/peerconnection/RTCConfiguration.h",
+ "objc/api/peerconnection/RTCDataChannel.h",
+ "objc/api/peerconnection/RTCDataChannelConfiguration.h",
+ "objc/api/peerconnection/RTCFieldTrials.h",
+ "objc/api/peerconnection/RTCIceCandidate.h",
+ "objc/api/peerconnection/RTCIceCandidateErrorEvent.h",
+ "objc/api/peerconnection/RTCIceServer.h",
+ "objc/api/peerconnection/RTCLegacyStatsReport.h",
+ "objc/api/peerconnection/RTCMediaConstraints.h",
+ "objc/api/peerconnection/RTCMediaSource.h",
+ "objc/api/peerconnection/RTCMediaStream.h",
+ "objc/api/peerconnection/RTCMediaStreamTrack.h",
+ "objc/api/peerconnection/RTCMetrics.h",
+ "objc/api/peerconnection/RTCMetricsSampleInfo.h",
+ "objc/api/peerconnection/RTCPeerConnection.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactory.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h",
+ "objc/api/peerconnection/RTCRtcpParameters.h",
+ "objc/api/peerconnection/RTCRtpCodecParameters.h",
+ "objc/api/peerconnection/RTCRtpEncodingParameters.h",
+ "objc/api/peerconnection/RTCRtpHeaderExtension.h",
+ "objc/api/peerconnection/RTCRtpParameters.h",
+ "objc/api/peerconnection/RTCRtpReceiver.h",
+ "objc/api/peerconnection/RTCRtpSender.h",
+ "objc/api/peerconnection/RTCRtpTransceiver.h",
+ "objc/api/peerconnection/RTCDtmfSender.h",
+ "objc/api/peerconnection/RTCSSLAdapter.h",
+ "objc/api/peerconnection/RTCSessionDescription.h",
+ "objc/api/peerconnection/RTCStatisticsReport.h",
+ "objc/api/peerconnection/RTCTracing.h",
+ "objc/api/peerconnection/RTCCertificate.h",
+ "objc/api/peerconnection/RTCCryptoOptions.h",
+ "objc/api/peerconnection/RTCVideoSource.h",
+ "objc/api/peerconnection/RTCVideoTrack.h",
+ "objc/api/video_codec/RTCVideoCodecConstants.h",
+ "objc/api/video_codec/RTCVideoDecoderVP8.h",
+ "objc/api/video_codec/RTCVideoDecoderVP9.h",
+ "objc/api/video_codec/RTCVideoDecoderAV1.h",
+ "objc/api/video_codec/RTCVideoEncoderVP8.h",
+ "objc/api/video_codec/RTCVideoEncoderVP9.h",
+ "objc/api/video_codec/RTCVideoEncoderAV1.h",
+ "objc/api/video_frame_buffer/RTCNativeI420Buffer.h",
+ "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h",
+ ]
+
+ if (!build_with_chromium) {
+ common_objc_headers += [
+ "objc/api/logging/RTCCallbackLogger.h",
+ "objc/api/peerconnection/RTCFileLogger.h",
+ ]
+ }
+
+ sources = common_objc_headers
+ public_headers = common_objc_headers
+
+ ldflags = [
+ "-all_load",
+ "-install_name",
+ "@rpath/$output_name.framework/$output_name",
+ ]
+
+ deps = [
+ ":audio_objc",
+ ":base_objc",
+ ":default_codec_factory_objc",
+ ":metal_objc",
+ ":native_api",
+ ":native_video",
+ ":peerconnectionfactory_base_objc",
+ ":videocapture_objc",
+ ":videocodec_objc",
+ ":videotoolbox_objc",
+ ]
+ if (rtc_ios_macos_use_opengl_rendering) {
+ deps += [ ":opengl_ui_objc" ]
+ }
+ if (!build_with_chromium) {
+ deps += [
+ ":callback_logger_objc",
+ ":file_logger_objc",
+ ]
+ }
+
+ frameworks = [
+ "AVFoundation.framework",
+ "CoreGraphics.framework",
+ "CoreMedia.framework",
+ ]
+
+ configs = [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+
+ public_configs = [ ":common_config_objc" ]
+ }
+
+ bundle_data("ios_framework_bundle") {
+ deps = [ "../sdk:framework_objc" ]
+ sources = [ "$root_build_dir/WebRTC.framework" ]
+ outputs = [ "{{bundle_resources_dir}}/Frameworks/{{source_file_part}}" ]
+ }
+ }
+
+ if (is_mac) {
+ apple_framework_bundle_with_umbrella_header("mac_framework_objc") {
+ info_plist = "objc/Info.plist"
+ output_name = "WebRTC"
+
+ sources = [
+ "objc/api/peerconnection/RTCAudioSource.h",
+ "objc/api/peerconnection/RTCAudioTrack.h",
+ "objc/api/peerconnection/RTCCertificate.h",
+ "objc/api/peerconnection/RTCConfiguration.h",
+ "objc/api/peerconnection/RTCCryptoOptions.h",
+ "objc/api/peerconnection/RTCDataChannel.h",
+ "objc/api/peerconnection/RTCDataChannelConfiguration.h",
+ "objc/api/peerconnection/RTCDtmfSender.h",
+ "objc/api/peerconnection/RTCFieldTrials.h",
+ "objc/api/peerconnection/RTCIceCandidate.h",
+ "objc/api/peerconnection/RTCIceCandidateErrorEvent.h",
+ "objc/api/peerconnection/RTCIceServer.h",
+ "objc/api/peerconnection/RTCLegacyStatsReport.h",
+ "objc/api/peerconnection/RTCMediaConstraints.h",
+ "objc/api/peerconnection/RTCMediaSource.h",
+ "objc/api/peerconnection/RTCMediaStream.h",
+ "objc/api/peerconnection/RTCMediaStreamTrack.h",
+ "objc/api/peerconnection/RTCMetrics.h",
+ "objc/api/peerconnection/RTCMetricsSampleInfo.h",
+ "objc/api/peerconnection/RTCPeerConnection.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactory.h",
+ "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h",
+ "objc/api/peerconnection/RTCRtcpParameters.h",
+ "objc/api/peerconnection/RTCRtpCodecParameters.h",
+ "objc/api/peerconnection/RTCRtpEncodingParameters.h",
+ "objc/api/peerconnection/RTCRtpHeaderExtension.h",
+ "objc/api/peerconnection/RTCRtpParameters.h",
+ "objc/api/peerconnection/RTCRtpReceiver.h",
+ "objc/api/peerconnection/RTCRtpSender.h",
+ "objc/api/peerconnection/RTCRtpTransceiver.h",
+ "objc/api/peerconnection/RTCSSLAdapter.h",
+ "objc/api/peerconnection/RTCSessionDescription.h",
+ "objc/api/peerconnection/RTCStatisticsReport.h",
+ "objc/api/peerconnection/RTCTracing.h",
+ "objc/api/peerconnection/RTCVideoSource.h",
+ "objc/api/peerconnection/RTCVideoTrack.h",
+ "objc/api/video_codec/RTCVideoDecoderAV1.h",
+ "objc/api/video_codec/RTCVideoDecoderVP8.h",
+ "objc/api/video_codec/RTCVideoDecoderVP9.h",
+ "objc/api/video_codec/RTCVideoEncoderAV1.h",
+ "objc/api/video_codec/RTCVideoEncoderVP8.h",
+ "objc/api/video_codec/RTCVideoEncoderVP9.h",
+ "objc/api/video_frame_buffer/RTCNativeI420Buffer.h",
+ "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h",
+ "objc/base/RTCCodecSpecificInfo.h",
+ "objc/base/RTCEncodedImage.h",
+ "objc/base/RTCI420Buffer.h",
+ "objc/base/RTCLogging.h",
+ "objc/base/RTCMacros.h",
+ "objc/base/RTCMutableI420Buffer.h",
+ "objc/base/RTCMutableYUVPlanarBuffer.h",
+ "objc/base/RTCSSLCertificateVerifier.h",
+ "objc/base/RTCVideoCapturer.h",
+ "objc/base/RTCVideoCodecInfo.h",
+ "objc/base/RTCVideoDecoder.h",
+ "objc/base/RTCVideoDecoderFactory.h",
+ "objc/base/RTCVideoEncoder.h",
+ "objc/base/RTCVideoEncoderFactory.h",
+ "objc/base/RTCVideoEncoderQpThresholds.h",
+ "objc/base/RTCVideoEncoderSettings.h",
+ "objc/base/RTCVideoFrame.h",
+ "objc/base/RTCVideoFrameBuffer.h",
+ "objc/base/RTCVideoRenderer.h",
+ "objc/base/RTCYUVPlanarBuffer.h",
+ "objc/components/capturer/RTCCameraVideoCapturer.h",
+ "objc/components/capturer/RTCFileVideoCapturer.h",
+ "objc/components/renderer/metal/RTCMTLNSVideoView.h",
+ "objc/components/renderer/opengl/RTCNSGLVideoView.h",
+ "objc/components/renderer/opengl/RTCVideoViewShading.h",
+ "objc/components/video_codec/RTCCodecSpecificInfoH264.h",
+ "objc/components/video_codec/RTCDefaultVideoDecoderFactory.h",
+ "objc/components/video_codec/RTCDefaultVideoEncoderFactory.h",
+ "objc/components/video_codec/RTCH264ProfileLevelId.h",
+ "objc/components/video_codec/RTCVideoDecoderFactoryH264.h",
+ "objc/components/video_codec/RTCVideoDecoderH264.h",
+ "objc/components/video_codec/RTCVideoEncoderFactoryH264.h",
+ "objc/components/video_codec/RTCVideoEncoderH264.h",
+ "objc/components/video_frame_buffer/RTCCVPixelBuffer.h",
+ "objc/helpers/RTCDispatcher.h",
+ ]
+ if (!build_with_chromium) {
+ sources += [
+ "objc/api/logging/RTCCallbackLogger.h",
+ "objc/api/peerconnection/RTCFileLogger.h",
+ ]
+ }
+
+ deps = [
+ ":base_objc",
+ ":default_codec_factory_objc",
+ ":native_api",
+ ":native_video",
+ ":opengl_ui_objc",
+ ":peerconnectionfactory_base_objc",
+ ":videocapture_objc",
+ ":videocodec_objc",
+ ":videotoolbox_objc",
+ ]
+ if (!build_with_chromium) {
+ deps += [
+ ":callback_logger_objc",
+ ":file_logger_objc",
+ ]
+ }
+
+ frameworks = [
+ "AVFoundation.framework",
+ "CoreGraphics.framework",
+ "CoreMedia.framework",
+ "OpenGL.framework",
+ ]
+
+ configs = [ "..:common_objc" ]
+
+ public_configs = [ ":common_config_objc" ]
+ }
+
+ bundle_data("mac_framework_bundle") {
+ deps = [ "../sdk:mac_framework_objc" ]
+ sources = [ "$root_build_dir/WebRTC.framework" ]
+ outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+ }
+ }
+
+ rtc_library("wrapped_native_codec_objc") {
+ sources = [
+ "objc/api/video_codec/RTCWrappedNativeVideoDecoder.h",
+ "objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm",
+ "objc/api/video_codec/RTCWrappedNativeVideoEncoder.h",
+ "objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm",
+ ]
+
+ configs += [ "..:common_objc" ]
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_objc",
+ ":helpers_objc",
+ "../api/video_codecs:video_codecs_api",
+ "../media:rtc_media_base",
+ ]
+ }
+
+ # The native API is currently experimental and may change without notice.
+ rtc_library("native_api") {
+ visibility = [ "*" ]
+ allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
+ sources = [
+ "objc/native/api/network_monitor_factory.h",
+ "objc/native/api/network_monitor_factory.mm",
+ "objc/native/api/ssl_certificate_verifier.h",
+ "objc/native/api/ssl_certificate_verifier.mm",
+ "objc/native/api/video_capturer.h",
+ "objc/native/api/video_capturer.mm",
+ "objc/native/api/video_decoder_factory.h",
+ "objc/native/api/video_decoder_factory.mm",
+ "objc/native/api/video_encoder_factory.h",
+ "objc/native/api/video_encoder_factory.mm",
+ "objc/native/api/video_frame.h",
+ "objc/native/api/video_frame.mm",
+ "objc/native/api/video_frame_buffer.h",
+ "objc/native/api/video_frame_buffer.mm",
+ "objc/native/api/video_renderer.h",
+ "objc/native/api/video_renderer.mm",
+ ]
+
+ configs += [ "..:common_objc" ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_objc",
+ ":native_video",
+ ":videoframebuffer_objc",
+ "../api:libjingle_peerconnection_api",
+ "../api:make_ref_counted",
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../rtc_base",
+ "../rtc_base:buffer",
+ "../rtc_base:logging",
+ "../rtc_base:threading",
+ ]
+ if (is_ios) {
+ deps += [ ":native_network_monitor" ]
+ }
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+ }
+
+ if (is_ios) {
+ rtc_library("native_network_monitor") {
+ visibility = [ "*" ]
+
+ sources = [
+ "objc/native/src/objc_network_monitor.h",
+ "objc/native/src/objc_network_monitor.mm",
+ ]
+
+ deps = [
+ ":network_monitor_objc",
+ ":network_monitor_observer",
+ "../api:field_trials_view",
+ "../api:sequence_checker",
+ "../api/task_queue:pending_task_safety_flag",
+ "../rtc_base",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+ }
+
+ rtc_library("native_video") {
+ sources = [
+ "objc/native/src/objc_frame_buffer.h",
+ "objc/native/src/objc_frame_buffer.mm",
+ "objc/native/src/objc_video_decoder_factory.h",
+ "objc/native/src/objc_video_decoder_factory.mm",
+ "objc/native/src/objc_video_encoder_factory.h",
+ "objc/native/src/objc_video_encoder_factory.mm",
+ "objc/native/src/objc_video_frame.h",
+ "objc/native/src/objc_video_frame.mm",
+ "objc/native/src/objc_video_renderer.h",
+ "objc/native/src/objc_video_renderer.mm",
+ "objc/native/src/objc_video_track_source.h",
+ "objc/native/src/objc_video_track_source.mm",
+ ]
+
+ configs += [ "..:common_objc" ]
+
+ public_configs = [ ":common_config_objc" ]
+
+ deps = [
+ ":base_native_additions_objc",
+ ":base_objc",
+ ":helpers_objc",
+ ":videocodec_objc",
+ ":videoframebuffer_objc",
+ ":vpx_codec_constants",
+ ":wrapped_native_codec_objc",
+ "../api:make_ref_counted",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../media:rtc_audio_video",
+ "../media:rtc_media_base",
+ "../modules/video_coding:video_codec_interface",
+ "../rtc_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:timestamp_aligner",
+ "../rtc_base:timeutils",
+ ]
+ }
+
+ rtc_library("video_toolbox_cc") {
+ visibility = [
+ ":sdk_unittests_sources",
+ ":videotoolbox_objc",
+ ]
+ sources = [
+ "objc/components/video_codec/helpers.cc",
+ "objc/components/video_codec/helpers.h",
+ "objc/components/video_codec/nalu_rewriter.cc",
+ "objc/components/video_codec/nalu_rewriter.h",
+ ]
+ deps = [
+ "../common_video",
+ "../modules/video_coding:webrtc_h264",
+ "../rtc_base:buffer",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ ]
+ }
+
+ rtc_library("videotoolbox_objc") {
+ visibility = [ "*" ]
+ allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
+ sources = [
+ "objc/components/video_codec/RTCVideoDecoderFactoryH264.h",
+ "objc/components/video_codec/RTCVideoDecoderFactoryH264.m",
+ "objc/components/video_codec/RTCVideoDecoderH264.h",
+ "objc/components/video_codec/RTCVideoDecoderH264.mm",
+ "objc/components/video_codec/RTCVideoEncoderFactoryH264.h",
+ "objc/components/video_codec/RTCVideoEncoderFactoryH264.m",
+ "objc/components/video_codec/RTCVideoEncoderH264.h",
+ "objc/components/video_codec/RTCVideoEncoderH264.mm",
+ ]
+
+ configs += [
+ "..:common_objc",
+ ":used_from_extension",
+ ]
+
+ if (is_ios && rtc_apprtcmobile_broadcast_extension) {
+ defines = [ "RTC_APPRTCMOBILE_BROADCAST_EXTENSION" ]
+ }
+
+ deps = [
+ ":base_native_additions_objc",
+ ":base_objc",
+ ":helpers_objc",
+ ":video_toolbox_cc",
+ ":videocodec_objc",
+ ":videoframebuffer_objc",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../modules/video_coding:video_codec_interface",
+ "../rtc_base:buffer",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:timeutils",
+ "//third_party/libyuv",
+ ]
+
+ frameworks = [
+ "CoreFoundation.framework",
+ "CoreMedia.framework",
+ "CoreVideo.framework",
+ "VideoToolbox.framework",
+ ]
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/OWNERS b/third_party/libwebrtc/sdk/OWNERS
new file mode 100644
index 0000000000..4d31ffb663
--- /dev/null
+++ b/third_party/libwebrtc/sdk/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/third_party/libwebrtc/sdk/android/AndroidManifest.xml b/third_party/libwebrtc/sdk/android/AndroidManifest.xml
new file mode 100644
index 0000000000..417f45fc5e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.webrtc">
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="23" />
+</manifest>
diff --git a/third_party/libwebrtc/sdk/android/BUILD.gn b/third_party/libwebrtc/sdk/android/BUILD.gn
new file mode 100644
index 0000000000..f082bd353a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/BUILD.gn
@@ -0,0 +1,1749 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+if (is_android) {
+ import("//build/config/android/config.gni")
+ import("//build/config/android/rules.gni")
+ import("../../webrtc.gni")
+
+ group("android") {
+ if (!build_with_chromium && is_android) {
+ public_deps = [
+ ":libjingle_peerconnection_jni",
+ ":libjingle_peerconnection_so",
+ ":libwebrtc",
+ ":native_api",
+ ]
+ }
+ }
+
+ #####################
+ # Aggregate targets #
+ #####################
+
+ dist_jar("libwebrtc") {
+ _target_dir_name = get_label_info(":$target_name", "dir")
+ output = "${root_out_dir}/lib.java${_target_dir_name}/${target_name}.jar"
+ direct_deps_only = true
+ use_unprocessed_jars = true
+ requires_android = true
+ no_build_hooks = true
+
+ deps = [
+ ":audio_api_java",
+ ":base_java",
+ ":builtin_audio_codecs_java",
+ ":camera_java",
+ ":default_video_codec_factory_java",
+ ":filevideo_java",
+ ":hwcodecs_java",
+ ":java_audio_device_module_java",
+ ":libaom_av1_java",
+ ":libjingle_peerconnection_java",
+ ":libjingle_peerconnection_metrics_default_java",
+ ":libvpx_vp8_java",
+ ":libvpx_vp9_java",
+ ":logging_java",
+ ":peerconnection_java",
+ ":screencapturer_java",
+ ":surfaceviewrenderer_java",
+ ":swcodecs_java",
+ ":video_api_java",
+ ":video_java",
+ "../../modules/audio_device:audio_device_java",
+ "../../rtc_base:base_java",
+ ]
+ }
+
+ # The native API is currently experimental and may change without notice.
+ group("native_api") {
+ deps = [
+ ":native_api_audio_device_module",
+ ":native_api_base",
+ ":native_api_codecs",
+ ":native_api_jni",
+ ":native_api_network_monitor",
+ ":native_api_peerconnection",
+ ":native_api_stacktrace",
+ ":native_api_video",
+ ]
+ }
+
+ # Old target that pulls in everything. This will be going away in the future,
+ # clients should depend on individual video_java etc. targets instead.
+ rtc_android_library("libjingle_peerconnection_java") {
+ sources = [ "src/java/org/webrtc/Empty.java" ]
+
+ deps = [
+ ":audio_api_java",
+ ":base_java",
+ ":camera_java",
+ ":filevideo_java",
+ ":hwcodecs_java",
+ ":java_audio_device_module_java",
+ ":peerconnection_java",
+ ":screencapturer_java",
+ ":surfaceviewrenderer_java",
+ ":video_api_java",
+ ":video_java",
+ "//modules/audio_device:audio_device_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ rtc_android_library("libjingle_peerconnection_metrics_default_java") {
+ sources = [ "api/org/webrtc/Metrics.java" ]
+
+ deps = [
+ ":base_java",
+ ":libjingle_peerconnection_java",
+ "../../rtc_base:base_java",
+ ]
+ }
+
+ rtc_library("libjingle_peerconnection_jni") {
+ visibility = [ "*" ]
+ allow_poison = [
+ "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
+ "software_video_codecs", # TODO(bugs.webrtc.org/7925): Remove.
+ ]
+ public_deps = [ # no-presubmit-check TODO(webrtc:8603)
+ ":audio_jni",
+ ":base_jni",
+ ":builtin_audio_codecs_jni",
+ ":default_video_codec_factory_jni",
+ ":java_audio_device_module_jni",
+ ":peerconnection_jni",
+ ":video_jni",
+ "../../api:create_peerconnection_factory",
+ ]
+ }
+
+ rtc_shared_library("libjingle_peerconnection_so") {
+ sources = [ "src/jni/jni_onload.cc" ]
+
+ suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
+ configs += [ "//build/config/android:hide_all_but_jni" ]
+ ldflags = [
+ "-lEGL",
+ "-Wl,--build-id",
+ ]
+
+ deps = [
+ ":libjingle_peerconnection_jni",
+ ":libjingle_peerconnection_metrics_default_jni",
+ ":native_api_jni",
+ ":video_egl_jni",
+ "../../pc:libjingle_peerconnection",
+ "../../rtc_base",
+ ]
+ output_extension = "so"
+ }
+
+ #######################
+ # Public Java modules #
+ #######################
+
+ # Core targets.
+
+ # TODO(sakal): Extract files from this target to releveant subtargets, video, audio etc.
+ rtc_android_library("base_java") {
+ sources = [
+ "api/org/webrtc/Predicate.java",
+ "api/org/webrtc/RefCounted.java",
+ "src/java/org/webrtc/CalledByNative.java",
+ "src/java/org/webrtc/CalledByNativeUnchecked.java",
+ "src/java/org/webrtc/Histogram.java",
+ "src/java/org/webrtc/JniCommon.java",
+ "src/java/org/webrtc/JniHelper.java",
+ "src/java/org/webrtc/RefCountDelegate.java",
+ "src/java/org/webrtc/WebRtcClassLoader.java",
+ ]
+
+ deps = [ "//third_party/androidx:androidx_annotation_annotation_java" ]
+ }
+
+ rtc_android_library("audio_api_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/AudioDecoderFactoryFactory.java",
+ "api/org/webrtc/AudioEncoderFactoryFactory.java",
+ "api/org/webrtc/audio/AudioDeviceModule.java",
+ ]
+
+ deps = [
+ ":base_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ rtc_android_library("video_api_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/CapturerObserver.java",
+ "api/org/webrtc/EncodedImage.java",
+ "api/org/webrtc/VideoCodecInfo.java",
+ "api/org/webrtc/VideoCodecStatus.java",
+ "api/org/webrtc/VideoDecoder.java",
+ "api/org/webrtc/VideoDecoderFactory.java",
+ "api/org/webrtc/VideoEncoder.java",
+ "api/org/webrtc/VideoEncoderFactory.java",
+ "api/org/webrtc/VideoFrame.java",
+ "api/org/webrtc/VideoSink.java",
+ ]
+
+ deps = [
+ ":base_java",
+ "//rtc_base:base_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ srcjar_deps = [ "//api/video:video_frame_enums" ]
+ }
+
+ rtc_android_library("video_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/EglBase.java",
+ "api/org/webrtc/EglBase10.java",
+ "api/org/webrtc/EglBase14.java",
+ "api/org/webrtc/EglRenderer.java",
+ "api/org/webrtc/GlRectDrawer.java",
+ "api/org/webrtc/GlShader.java",
+ "api/org/webrtc/GlTextureFrameBuffer.java",
+ "api/org/webrtc/GlUtil.java",
+ "api/org/webrtc/JavaI420Buffer.java",
+ "api/org/webrtc/RendererCommon.java",
+ "api/org/webrtc/SurfaceTextureHelper.java",
+ "api/org/webrtc/TextureBufferImpl.java",
+ "api/org/webrtc/TimestampAligner.java",
+ "api/org/webrtc/VideoCapturer.java",
+ "api/org/webrtc/VideoDecoderFallback.java",
+ "api/org/webrtc/VideoEncoderFallback.java",
+ "api/org/webrtc/VideoFrameDrawer.java",
+ "api/org/webrtc/WrappedNativeVideoDecoder.java",
+ "api/org/webrtc/WrappedNativeVideoEncoder.java",
+ "api/org/webrtc/YuvConverter.java",
+ "api/org/webrtc/YuvHelper.java",
+ "src/java/org/webrtc/EglBase10Impl.java",
+ "src/java/org/webrtc/EglBase14Impl.java",
+ "src/java/org/webrtc/GlGenericDrawer.java",
+ "src/java/org/webrtc/H264Utils.java",
+ "src/java/org/webrtc/NV21Buffer.java",
+ "src/java/org/webrtc/VideoCodecMimeType.java",
+ "src/java/org/webrtc/VideoDecoderWrapper.java",
+ "src/java/org/webrtc/VideoEncoderWrapper.java",
+ "src/java/org/webrtc/WrappedNativeI420Buffer.java",
+ ]
+
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ "//rtc_base:base_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+
+ rtc_android_library("peerconnection_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/AddIceObserver.java",
+ "api/org/webrtc/AudioProcessingFactory.java",
+ "api/org/webrtc/AudioSource.java",
+ "api/org/webrtc/AudioTrack.java",
+ "api/org/webrtc/CallSessionFileRotatingLogSink.java",
+ "api/org/webrtc/CandidatePairChangeEvent.java",
+ "api/org/webrtc/CryptoOptions.java",
+ "api/org/webrtc/DataChannel.java",
+ "api/org/webrtc/DtmfSender.java",
+ "api/org/webrtc/FecControllerFactoryFactoryInterface.java",
+ "api/org/webrtc/FrameDecryptor.java",
+ "api/org/webrtc/FrameEncryptor.java",
+ "api/org/webrtc/IceCandidate.java",
+ "api/org/webrtc/IceCandidateErrorEvent.java",
+ "api/org/webrtc/MediaConstraints.java",
+ "api/org/webrtc/MediaSource.java",
+ "api/org/webrtc/MediaStream.java",
+ "api/org/webrtc/MediaStreamTrack.java",
+ "api/org/webrtc/NativeLibraryLoader.java",
+ "api/org/webrtc/NativePeerConnectionFactory.java",
+ "api/org/webrtc/NetEqFactoryFactory.java",
+ "api/org/webrtc/NetworkChangeDetector.java",
+ "api/org/webrtc/NetworkChangeDetectorFactory.java",
+ "api/org/webrtc/NetworkControllerFactoryFactory.java",
+
+ # TODO(sakal): Break dependencies and move to base_java.
+ "api/org/webrtc/NetworkMonitor.java",
+ "api/org/webrtc/NetworkMonitorAutoDetect.java",
+ "api/org/webrtc/NetworkStatePredictorFactoryFactory.java",
+ "api/org/webrtc/PeerConnection.java",
+ "api/org/webrtc/PeerConnectionDependencies.java",
+ "api/org/webrtc/PeerConnectionFactory.java",
+ "api/org/webrtc/RTCStats.java",
+ "api/org/webrtc/RTCStatsCollectorCallback.java",
+ "api/org/webrtc/RTCStatsReport.java",
+ "api/org/webrtc/RtcCertificatePem.java",
+ "api/org/webrtc/RtpParameters.java",
+ "api/org/webrtc/RtpReceiver.java",
+ "api/org/webrtc/RtpSender.java",
+ "api/org/webrtc/RtpTransceiver.java",
+ "api/org/webrtc/SSLCertificateVerifier.java",
+ "api/org/webrtc/SdpObserver.java",
+ "api/org/webrtc/SessionDescription.java",
+ "api/org/webrtc/StatsObserver.java",
+ "api/org/webrtc/StatsReport.java",
+ "api/org/webrtc/TurnCustomizer.java",
+ "api/org/webrtc/VideoProcessor.java",
+ "api/org/webrtc/VideoSource.java",
+ "api/org/webrtc/VideoTrack.java",
+ "src/java/org/webrtc/NativeAndroidVideoTrackSource.java",
+ "src/java/org/webrtc/NativeCapturerObserver.java",
+ "src/java/org/webrtc/NativeLibrary.java",
+ ]
+
+ deps = [
+ ":audio_api_java",
+ ":base_java",
+ ":builtin_audio_codecs_java",
+ ":default_video_codec_factory_java",
+
+ #TODO(bugs.webrtc.org/7452): Make injection mandatory and remove this dep.
+ ":java_audio_device_module_java",
+ ":logging_java",
+ ":swcodecs_java",
+ ":video_api_java",
+ ":video_java",
+ "//modules/audio_device:audio_device_java",
+ "//rtc_base:base_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ srcjar_deps = [
+ "//api:priority_enums",
+ "//rtc_base:network_monitor_enums",
+ ]
+ }
+
+ # Modules, in alphabetical order.
+
+ rtc_android_library("camera_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/Camera1Capturer.java",
+ "api/org/webrtc/Camera1Enumerator.java",
+ "api/org/webrtc/Camera2Capturer.java",
+ "api/org/webrtc/Camera2Enumerator.java",
+ "api/org/webrtc/CameraEnumerationAndroid.java",
+ "api/org/webrtc/CameraEnumerator.java",
+ "api/org/webrtc/CameraVideoCapturer.java",
+ "src/java/org/webrtc/Camera1Session.java",
+ "src/java/org/webrtc/Camera2Session.java",
+ "src/java/org/webrtc/CameraCapturer.java",
+ "src/java/org/webrtc/CameraSession.java",
+ ]
+
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+
+ rtc_android_library("default_video_codec_factory_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/DefaultVideoDecoderFactory.java",
+ "api/org/webrtc/DefaultVideoEncoderFactory.java",
+ ]
+
+ deps = [
+ ":hwcodecs_java",
+ ":swcodecs_java",
+ ":video_api_java",
+ ":video_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+
+ rtc_android_library("filevideo_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/FileVideoCapturer.java",
+ "api/org/webrtc/VideoFileRenderer.java",
+ ]
+
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ rtc_android_library("hwcodecs_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/HardwareVideoDecoderFactory.java",
+ "api/org/webrtc/HardwareVideoEncoderFactory.java",
+ "api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java",
+ "src/java/org/webrtc/AndroidVideoDecoder.java",
+ "src/java/org/webrtc/BaseBitrateAdjuster.java",
+ "src/java/org/webrtc/BitrateAdjuster.java",
+ "src/java/org/webrtc/DynamicBitrateAdjuster.java",
+ "src/java/org/webrtc/FramerateBitrateAdjuster.java",
+ "src/java/org/webrtc/HardwareVideoEncoder.java",
+ "src/java/org/webrtc/MediaCodecUtils.java",
+ "src/java/org/webrtc/MediaCodecVideoDecoderFactory.java",
+ "src/java/org/webrtc/MediaCodecWrapper.java",
+ "src/java/org/webrtc/MediaCodecWrapperFactory.java",
+ "src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java",
+ "src/java/org/webrtc/NV12Buffer.java",
+ ]
+
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+
+ rtc_android_library("java_audio_device_module_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/audio/JavaAudioDeviceModule.java",
+ "src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java",
+ "src/java/org/webrtc/audio/VolumeLogger.java",
+ "src/java/org/webrtc/audio/WebRtcAudioEffects.java",
+ "src/java/org/webrtc/audio/WebRtcAudioManager.java",
+ "src/java/org/webrtc/audio/WebRtcAudioRecord.java",
+ "src/java/org/webrtc/audio/WebRtcAudioTrack.java",
+ "src/java/org/webrtc/audio/WebRtcAudioUtils.java",
+ ]
+
+ deps = [
+ ":audio_api_java",
+ ":base_java",
+ "//rtc_base:base_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+
+ rtc_android_library("builtin_audio_codecs_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java",
+ "api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java",
+ ]
+
+ deps = [ ":audio_api_java" ]
+ }
+
+ rtc_android_library("screencapturer_java") {
+ visibility = [ "*" ]
+ sources = [ "api/org/webrtc/ScreenCapturerAndroid.java" ]
+
+ deps = [
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+
+ rtc_android_library("surfaceviewrenderer_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/SurfaceEglRenderer.java",
+ "api/org/webrtc/SurfaceViewRenderer.java",
+ ]
+
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ rtc_android_library("libvpx_vp8_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/LibvpxVp8Decoder.java",
+ "api/org/webrtc/LibvpxVp8Encoder.java",
+ ]
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ rtc_android_library("libvpx_vp9_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/LibvpxVp9Decoder.java",
+ "api/org/webrtc/LibvpxVp9Encoder.java",
+ ]
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ rtc_android_library("libaom_av1_encoder_java") {
+ visibility = [ "*" ]
+ sources = [ "api/org/webrtc/LibaomAv1Encoder.java" ]
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ rtc_android_library("libaom_av1_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/LibaomAv1Decoder.java",
+ "api/org/webrtc/LibaomAv1Encoder.java",
+ ]
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ rtc_android_library("dav1d_java") {
+ visibility = [ "*" ]
+ sources = [ "api/org/webrtc/Dav1dDecoder.java" ]
+ deps = [ ":video_java" ]
+ }
+
+ rtc_android_library("swcodecs_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/SoftwareVideoDecoderFactory.java",
+ "api/org/webrtc/SoftwareVideoEncoderFactory.java",
+ ]
+
+ deps = [
+ ":base_java",
+ ":libaom_av1_java",
+ ":libvpx_vp8_java",
+ ":libvpx_vp9_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+ }
+}
+
+if (current_os == "linux" || is_android) {
+ ################################
+ # JNI targets for Java modules #
+ ################################
+
+ # Mirrors the order of targets in the section above.
+
+ rtc_library("base_jni") {
+ visibility = [ "*" ]
+ sources = [
+ "src/jni/android_histogram.cc",
+ "src/jni/android_network_monitor.cc",
+ "src/jni/android_network_monitor.h",
+ "src/jni/jni_common.cc",
+ "src/jni/jni_helpers.cc",
+ "src/jni/jni_helpers.h",
+ "src/jni/pc/audio.h",
+ "src/jni/pc/logging.cc",
+ "src/jni/pc/video.h",
+ "src/jni/scoped_java_ref_counted.cc",
+ "src/jni/scoped_java_ref_counted.h",
+ ]
+
+ deps = [
+ ":generated_base_jni",
+ ":internal_jni",
+ ":native_api_jni",
+ "../../api:field_trials_view",
+ "../../api:libjingle_peerconnection_api",
+ "../../api:scoped_refptr",
+ "../../api:sequence_checker",
+ "../../api/task_queue:pending_task_safety_flag",
+ "../../rtc_base",
+ "../../rtc_base:checks",
+ "../../rtc_base:ip_address",
+ "../../rtc_base:logging",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:refcount",
+ "../../rtc_base:stringutils",
+ "../../rtc_base:threading",
+ "../../system_wrappers:field_trial",
+ "../../system_wrappers:metrics",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("audio_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "audio_codecs" ]
+ sources = [ "src/jni/pc/audio.cc" ]
+
+ deps = [
+ ":base_jni",
+ "../../modules/audio_processing",
+ "../../modules/audio_processing:api",
+ ]
+ }
+
+ rtc_library("builtin_audio_codecs_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "audio_codecs" ]
+ sources = [
+ "src/jni/builtin_audio_decoder_factory_factory.cc",
+ "src/jni/builtin_audio_encoder_factory_factory.cc",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":generated_builtin_audio_codecs_jni",
+ ":native_api_jni",
+ "../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../api/audio_codecs:builtin_audio_encoder_factory",
+ ]
+ }
+
+ rtc_library("video_jni") {
+ visibility = [ "*" ]
+ sources = [
+ "src/jni/android_video_track_source.cc",
+ "src/jni/android_video_track_source.h",
+ "src/jni/encoded_image.cc",
+ "src/jni/encoded_image.h",
+ "src/jni/h264_utils.cc",
+ "src/jni/java_i420_buffer.cc",
+ "src/jni/jni_generator_helper.h",
+ "src/jni/native_capturer_observer.cc",
+ "src/jni/native_capturer_observer.h",
+ "src/jni/nv12_buffer.cc",
+ "src/jni/nv21_buffer.cc",
+ "src/jni/pc/video.cc",
+ "src/jni/timestamp_aligner.cc",
+ "src/jni/video_codec_info.cc",
+ "src/jni/video_codec_info.h",
+ "src/jni/video_codec_status.cc",
+ "src/jni/video_codec_status.h",
+ "src/jni/video_decoder_factory_wrapper.cc",
+ "src/jni/video_decoder_factory_wrapper.h",
+ "src/jni/video_decoder_fallback.cc",
+ "src/jni/video_decoder_wrapper.cc",
+ "src/jni/video_decoder_wrapper.h",
+ "src/jni/video_encoder_factory_wrapper.cc",
+ "src/jni/video_encoder_factory_wrapper.h",
+ "src/jni/video_encoder_fallback.cc",
+ "src/jni/video_encoder_wrapper.cc",
+ "src/jni/video_encoder_wrapper.h",
+ "src/jni/video_sink.cc",
+ "src/jni/video_sink.h",
+ "src/jni/video_track.cc",
+ "src/jni/yuv_helper.cc",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":generated_video_jni",
+ ":native_api_jni",
+ ":videoframe_jni",
+ "../../api:libjingle_peerconnection_api",
+ "../../api:media_stream_interface",
+ "../../api:sequence_checker",
+ "../../api/task_queue",
+ "../../api/video:encoded_image",
+ "../../api/video:render_resolution",
+ "../../api/video:video_frame",
+ "../../api/video:video_frame_type",
+ "../../api/video:video_rtp_headers",
+ "../../api/video_codecs:rtc_software_fallback_wrappers",
+ "../../api/video_codecs:video_codecs_api",
+ "../../common_video",
+ "../../media:rtc_media_base",
+ "../../modules/video_coding:codec_globals_headers",
+ "../../modules/video_coding:video_codec_interface",
+ "../../modules/video_coding:video_coding_utility",
+ "../../modules/video_coding/svc:scalable_video_controller",
+ "../../rtc_base",
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ "../../rtc_base:race_checker",
+ "../../rtc_base:refcount",
+ "../../rtc_base:rtc_task_queue",
+ "../../rtc_base:safe_conversions",
+ "../../rtc_base:threading",
+ "../../rtc_base:timestamp_aligner",
+ "../../rtc_base:timeutils",
+ "../../rtc_base/synchronization:mutex",
+ "//third_party/libyuv",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ # Sources here require -lEGL linker flag. It is separated from video_jni
+ # target for backwards compatibility.
+ rtc_library("video_egl_jni") {
+ visibility = [ "*" ]
+ sources = [ "src/jni/egl_base_10_impl.cc" ]
+ deps = [
+ ":generated_video_egl_jni",
+ ":native_api_jni",
+ ]
+ }
+
+ rtc_library("peerconnection_jni") {
+ # Do not depend on this target externally unless you absolute have to. It is
+ # made public because we don't have a proper NDK yet. Header APIs here are not
+ # considered public and are subject to change.
+ visibility = [ "*" ]
+
+ sources = [
+ "src/jni/pc/add_ice_candidate_observer.cc",
+ "src/jni/pc/add_ice_candidate_observer.h",
+ "src/jni/pc/android_network_monitor.h",
+ "src/jni/pc/audio_track.cc",
+ "src/jni/pc/call_session_file_rotating_log_sink.cc",
+ "src/jni/pc/crypto_options.cc",
+ "src/jni/pc/crypto_options.h",
+ "src/jni/pc/data_channel.cc",
+ "src/jni/pc/data_channel.h",
+ "src/jni/pc/dtmf_sender.cc",
+ "src/jni/pc/ice_candidate.cc",
+ "src/jni/pc/ice_candidate.h",
+ "src/jni/pc/media_constraints.cc",
+ "src/jni/pc/media_constraints.h",
+ "src/jni/pc/media_source.cc",
+ "src/jni/pc/media_stream.cc",
+ "src/jni/pc/media_stream.h",
+ "src/jni/pc/media_stream_track.cc",
+ "src/jni/pc/media_stream_track.h",
+ "src/jni/pc/owned_factory_and_threads.cc",
+ "src/jni/pc/owned_factory_and_threads.h",
+ "src/jni/pc/peer_connection.cc",
+ "src/jni/pc/peer_connection.h",
+ "src/jni/pc/peer_connection_factory.cc",
+ "src/jni/pc/peer_connection_factory.h",
+ "src/jni/pc/rtc_certificate.cc",
+ "src/jni/pc/rtc_certificate.h",
+ "src/jni/pc/rtc_stats_collector_callback_wrapper.cc",
+ "src/jni/pc/rtc_stats_collector_callback_wrapper.h",
+ "src/jni/pc/rtp_parameters.cc",
+ "src/jni/pc/rtp_parameters.h",
+ "src/jni/pc/rtp_receiver.cc",
+ "src/jni/pc/rtp_receiver.h",
+ "src/jni/pc/rtp_sender.cc",
+ "src/jni/pc/rtp_sender.h",
+ "src/jni/pc/rtp_transceiver.cc",
+ "src/jni/pc/rtp_transceiver.h",
+ "src/jni/pc/sdp_observer.cc",
+ "src/jni/pc/sdp_observer.h",
+ "src/jni/pc/session_description.cc",
+ "src/jni/pc/session_description.h",
+ "src/jni/pc/ssl_certificate_verifier_wrapper.cc",
+ "src/jni/pc/ssl_certificate_verifier_wrapper.h",
+ "src/jni/pc/stats_observer.cc",
+ "src/jni/pc/stats_observer.h",
+ "src/jni/pc/turn_customizer.cc",
+ "src/jni/pc/turn_customizer.h",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":generated_external_classes_jni",
+ ":generated_peerconnection_jni",
+ ":logging_jni",
+ ":native_api_jni",
+ ":native_api_stacktrace",
+ "..:media_constraints",
+ "../../api:callfactory_api",
+ "../../api:libjingle_peerconnection_api",
+ "../../api:media_stream_interface",
+ "../../api:rtc_event_log_output_file",
+ "../../api:rtp_parameters",
+ "../../api:turn_customizer",
+ "../../api/crypto:options",
+ "../../api/rtc_event_log:rtc_event_log_factory",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../api/video_codecs:video_codecs_api",
+ "../../call:call_interfaces",
+ "../../media:rtc_audio_video",
+ "../../media:rtc_media_base",
+ "../../modules/audio_device",
+ "../../modules/audio_processing:api",
+ "../../modules/utility",
+ "../../pc:media_stream_observer",
+ "../../pc:webrtc_sdp",
+ "../../rtc_base",
+ "../../rtc_base:checks",
+ "../../rtc_base:event_tracer",
+ "../../rtc_base:logging",
+ "../../rtc_base:refcount",
+ "../../rtc_base:rtc_task_queue",
+ "../../rtc_base:safe_conversions",
+ "../../rtc_base:stringutils",
+ "../../rtc_base:threading",
+ "../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ # JNI target for java_audio_device_module_java
+ rtc_library("java_audio_device_module_jni") {
+ visibility = [ "*" ]
+ sources = [ "src/jni/audio_device/java_audio_device_module.cc" ]
+
+ deps = [
+ ":base_jni",
+ ":generated_java_audio_jni",
+ ":java_audio_device_module",
+ ]
+ }
+
+ rtc_library("libjingle_peerconnection_metrics_default_jni") {
+ visibility = [ "*" ]
+ sources = [ "src/jni/android_metrics.cc" ]
+ deps = [
+ ":base_jni",
+ ":generated_metrics_jni",
+ ":native_api_jni",
+ ":peerconnection_jni",
+ "../../pc:peerconnection",
+ "../../rtc_base:stringutils",
+ "../../system_wrappers:metrics",
+ ]
+ }
+
+ rtc_library("default_video_codec_factory_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ deps = [
+ ":swcodecs_jni",
+ ":video_jni",
+ ]
+ }
+
+ rtc_library("libvpx_vp8_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [ "src/jni/vp8_codec.cc" ]
+ deps = [
+ ":base_jni",
+ ":generated_libvpx_vp8_jni",
+ ":video_jni",
+ "../../modules/video_coding:webrtc_vp8",
+ ]
+ }
+
+ rtc_library("libvpx_vp9_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [ "src/jni/vp9_codec.cc" ]
+ deps = [
+ ":base_jni",
+ ":generated_libvpx_vp9_jni",
+ ":video_jni",
+ "../../modules/video_coding:webrtc_vp9",
+ ]
+ }
+
+ rtc_library("libaom_av1_encoder_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [ "src/jni/libaom_av1_encoder.cc" ]
+ deps = [
+ ":base_jni",
+ ":generated_libaom_av1_encoder_jni",
+ ":video_jni",
+ "../../modules/video_coding/codecs/av1:libaom_av1_encoder",
+ ]
+ }
+
+ rtc_library("libaom_av1_decoder_if_supported_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [ "src/jni/libaom_av1_codec.cc" ]
+ deps = [
+ ":base_jni",
+ ":generated_libaom_av1_decoder_if_supported_jni",
+ ":video_jni",
+ "../../modules/video_coding/codecs/av1:libaom_av1_decoder",
+ ]
+ }
+
+ rtc_library("dav1d_av1_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [ "src/jni/dav1d_codec.cc" ]
+ deps = [
+ ":base_jni",
+ ":generated_dav1d_jni",
+ ":video_jni",
+ "../../modules/video_coding/codecs/av1:dav1d_decoder",
+ ]
+ }
+
+ rtc_library("swcodecs_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ deps = [
+ ":libaom_av1_decoder_if_supported_jni",
+ ":libvpx_vp8_jni",
+ ":libvpx_vp9_jni",
+ ]
+ }
+
+ ######################
+ # Native API targets #
+ ######################
+
+ # Core targets.
+
+ # JNI helpers that are also needed from internal JNI code. Cannot depend on any
+ # other JNI targets than internal_jni.
+ rtc_library("native_api_jni") {
+ visibility = [ "*" ]
+ sources = [
+ "native_api/jni/class_loader.cc",
+ "native_api/jni/java_types.cc",
+ "native_api/jni/jvm.cc",
+ "src/jni/jni_generator_helper.cc",
+ "src/jni/jni_generator_helper.h",
+ ]
+
+ public = [
+ "native_api/jni/class_loader.h",
+ "native_api/jni/java_types.h",
+ "native_api/jni/jni_int_wrapper.h",
+ "native_api/jni/jvm.h",
+ "native_api/jni/scoped_java_ref.h",
+ ]
+
+ deps = [
+ ":generated_external_classes_jni",
+ ":generated_native_api_jni",
+ ":internal_jni",
+ "../../api:sequence_checker",
+ "//api:array_view",
+ "//rtc_base:checks",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_library("native_api_base") {
+ visibility = [ "*" ]
+ sources = [
+ "native_api/base/init.cc",
+ "native_api/base/init.h",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":native_api_jni",
+ "//rtc_base",
+ "//rtc_base:checks",
+ ]
+ }
+
+ # Modules, in alphabetical order.
+
+ rtc_library("native_api_audio_device_module") {
+ visibility = [ "*" ]
+
+ sources = [
+ "native_api/audio_device_module/audio_device_android.cc",
+ "native_api/audio_device_module/audio_device_android.h",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":java_audio_device_module",
+ ":opensles_audio_device_module",
+ "../../api:scoped_refptr",
+ "../../modules/audio_device",
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ "../../rtc_base:refcount",
+ "../../system_wrappers",
+ "../../system_wrappers:metrics",
+ ]
+ if (rtc_enable_android_aaudio) {
+ deps += [ ":aaudio_audio_device_module" ]
+ }
+ }
+
+ # API for wrapping Java VideoDecoderFactory/VideoEncoderFactory classes to C++
+ # objects.
+ rtc_library("native_api_codecs") {
+ visibility = [ "*" ]
+ allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
+ sources = [
+ "native_api/codecs/wrapper.cc",
+ "native_api/codecs/wrapper.h",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":native_api_jni",
+ ":video_jni",
+ "//api/video_codecs:video_codecs_api",
+ "//rtc_base:checks",
+ ]
+ }
+
+ rtc_library("native_api_network_monitor") {
+ visibility = [ "*" ]
+ sources = [
+ "native_api/network_monitor/network_monitor.cc",
+ "native_api/network_monitor/network_monitor.h",
+ ]
+
+ deps = [
+ ":base_jni",
+ "../../rtc_base:threading",
+ "//rtc_base",
+ ]
+ }
+
+ # API for creating Java PeerConnectionFactory from C++ equivalents.
+ rtc_library("native_api_peerconnection") {
+ visibility = [ "*" ]
+ sources = [
+ "native_api/peerconnection/peer_connection_factory.cc",
+ "native_api/peerconnection/peer_connection_factory.h",
+ ]
+ deps = [
+ ":base_jni",
+ ":peerconnection_jni",
+ "../../rtc_base:threading",
+ "//api:libjingle_peerconnection_api",
+ "//api/video_codecs:video_codecs_api",
+ "//rtc_base",
+ ]
+ }
+
+ # API for capturing and printing native stacktraces.
+ rtc_library("native_api_stacktrace") {
+ visibility = [ "*" ]
+ sources = [
+ "native_api/stacktrace/stacktrace.cc",
+ "native_api/stacktrace/stacktrace.h",
+ ]
+
+ deps = [
+ "../../rtc_base:criticalsection",
+ "../../rtc_base:logging",
+ "../../rtc_base:stringutils",
+ "../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
+ }
+
+ # API for creating C++ wrapper implementations of api/mediastreaminterface.h
+ # video interfaces from their Java equivalents.
+ rtc_library("native_api_video") {
+ visibility = [ "*" ]
+ allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
+ sources = [
+ "native_api/video/video_source.cc",
+ "native_api/video/video_source.h",
+ "native_api/video/wrapper.cc",
+ "native_api/video/wrapper.h",
+ ]
+ deps = [
+ ":native_api_jni",
+ ":video_jni",
+ ":videoframe_jni",
+ "../../rtc_base:refcount",
+ "../../rtc_base:threading",
+ "//api:libjingle_peerconnection_api",
+ "//api:media_stream_interface",
+ "//api/video:video_frame",
+ "//api/video:video_rtp_headers",
+ "//rtc_base",
+ ]
+ }
+
+ ####################
+ # Internal targets #
+ ####################
+
+ rtc_android_library("logging_java") {
+ sources = [ "src/java/org/webrtc/JNILogging.java" ]
+
+ deps = [
+ ":base_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
+ # Internal code that is needed by native_api_jni. The code cannot be placed in
+ # base_jni because native_api_jni depends on the code (and base_jni depends on
+ # native_api_jni).
+ rtc_library("internal_jni") {
+ sources = [
+ "src/jni/jvm.cc",
+ "src/jni/jvm.h",
+ ]
+
+ deps = [ "../../rtc_base:checks" ]
+ }
+
+ rtc_library("videoframe_jni") {
+ sources = [
+ "src/jni/video_frame.cc",
+ "src/jni/video_frame.h",
+ "src/jni/wrapped_native_i420_buffer.cc",
+ "src/jni/wrapped_native_i420_buffer.h",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":generated_video_jni",
+ ":native_api_jni",
+ "../../api:scoped_refptr",
+ "../../api/video:video_frame",
+ "../../api/video:video_rtp_headers",
+ "../../common_video",
+ "../../rtc_base",
+ "../../rtc_base:checks",
+ "../../rtc_base:refcount",
+ "../../rtc_base:timeutils",
+ ]
+ }
+
+ rtc_library("logging_jni") {
+ visibility = [ "*" ]
+ sources = [
+ "src/jni/logging/log_sink.cc",
+ "src/jni/logging/log_sink.h",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":generated_logging_jni",
+ ":native_api_jni",
+ "../../rtc_base",
+ "../../rtc_base:logging",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("audio_device_module_base") {
+ visibility = [ "*" ]
+
+ sources = [
+ "src/jni/audio_device/audio_common.h",
+ "src/jni/audio_device/audio_device_module.cc",
+ "src/jni/audio_device/audio_device_module.h",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":generated_audio_device_module_base_jni",
+ ":native_api_jni",
+ "../../api:make_ref_counted",
+ "../../api:sequence_checker",
+ "../../api/task_queue",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../modules/audio_device:audio_device_api",
+ "../../modules/audio_device:audio_device_buffer",
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ "../../system_wrappers:metrics",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_library("java_audio_device_module") {
+ visibility = [ "*" ]
+
+ sources = [
+ "src/jni/audio_device/audio_record_jni.cc",
+ "src/jni/audio_device/audio_record_jni.h",
+ "src/jni/audio_device/audio_track_jni.cc",
+ "src/jni/audio_device/audio_track_jni.h",
+ ]
+ deps = [
+ ":audio_device_module_base",
+ ":base_jni",
+ ":generated_java_audio_device_module_native_jni",
+ "../../api:sequence_checker",
+ "../../modules/audio_device",
+ "../../modules/audio_device:audio_device_buffer",
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:platform_thread",
+ "../../rtc_base:timeutils",
+ "../../system_wrappers:field_trial",
+ "../../system_wrappers:metrics",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ if (rtc_enable_android_aaudio) {
+ rtc_library("aaudio_audio_device_module") {
+ visibility = [ "*" ]
+ defines = [ "WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO" ]
+ sources = [
+ "src/jni/audio_device/aaudio_player.cc",
+ "src/jni/audio_device/aaudio_player.h",
+ "src/jni/audio_device/aaudio_recorder.cc",
+ "src/jni/audio_device/aaudio_recorder.h",
+ "src/jni/audio_device/aaudio_wrapper.cc",
+ "src/jni/audio_device/aaudio_wrapper.h",
+ ]
+ libs = [ "aaudio" ]
+ deps = [
+ ":audio_device_module_base",
+ ":base_jni",
+ "../../api:array_view",
+ "../../modules/audio_device",
+ "../../modules/audio_device:audio_device_buffer",
+ "../../rtc_base",
+ "../../rtc_base:checks",
+ "../../system_wrappers",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+ }
+
+ rtc_library("opensles_audio_device_module") {
+ visibility = [ "*" ]
+ sources = [
+ "src/jni/audio_device/opensles_common.cc",
+ "src/jni/audio_device/opensles_common.h",
+ "src/jni/audio_device/opensles_player.cc",
+ "src/jni/audio_device/opensles_player.h",
+ "src/jni/audio_device/opensles_recorder.cc",
+ "src/jni/audio_device/opensles_recorder.h",
+ ]
+ libs = [ "OpenSLES" ]
+ deps = [
+ ":audio_device_module_base",
+ ":base_jni",
+ "../../api:array_view",
+ "../../api:refcountedbase",
+ "../../api:scoped_refptr",
+ "../../api:sequence_checker",
+ "../../modules/audio_device",
+ "../../modules/audio_device:audio_device_buffer",
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:platform_thread",
+ "../../rtc_base:timeutils",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ #########################
+ # Generated JNI targets #
+ #########################
+
+ generate_jar_jni("generated_external_classes_jni") {
+ classes = [
+ "java/lang/Integer.class",
+ "java/lang/Double.class",
+ "java/lang/Long.class",
+ "java/lang/Iterable.class",
+ "java/util/Iterator.class",
+ "java/lang/Boolean.class",
+ "java/math/BigInteger.class",
+ "java/util/Map.class",
+ "java/util/LinkedHashMap.class",
+ "java/util/ArrayList.class",
+ "java/lang/Enum.class",
+ ]
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_metrics_jni") {
+ sources = [ "api/org/webrtc/Metrics.java" ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ # Generated JNI for public JNI targets, matching order of targets
+
+ generate_jni("generated_base_jni") {
+ sources = [
+ "api/org/webrtc/NetworkChangeDetector.java",
+ "api/org/webrtc/NetworkMonitor.java",
+ "api/org/webrtc/RefCounted.java",
+ "src/java/org/webrtc/Histogram.java",
+ "src/java/org/webrtc/JniCommon.java",
+ ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_video_jni") {
+ sources = [
+ "api/org/webrtc/EncodedImage.java",
+ "api/org/webrtc/JavaI420Buffer.java",
+ "api/org/webrtc/TimestampAligner.java",
+ "api/org/webrtc/VideoCodecInfo.java",
+ "api/org/webrtc/VideoCodecStatus.java",
+ "api/org/webrtc/VideoDecoder.java",
+ "api/org/webrtc/VideoDecoderFactory.java",
+ "api/org/webrtc/VideoDecoderFallback.java",
+ "api/org/webrtc/VideoEncoder.java",
+ "api/org/webrtc/VideoEncoderFactory.java",
+ "api/org/webrtc/VideoEncoderFallback.java",
+ "api/org/webrtc/VideoFrame.java",
+ "api/org/webrtc/VideoSink.java",
+ "api/org/webrtc/VideoTrack.java",
+ "api/org/webrtc/YuvHelper.java",
+ "src/java/org/webrtc/H264Utils.java",
+ "src/java/org/webrtc/NV12Buffer.java",
+ "src/java/org/webrtc/NV21Buffer.java",
+ "src/java/org/webrtc/NativeAndroidVideoTrackSource.java",
+ "src/java/org/webrtc/NativeCapturerObserver.java",
+ "src/java/org/webrtc/VideoDecoderWrapper.java",
+ "src/java/org/webrtc/VideoEncoderWrapper.java",
+ "src/java/org/webrtc/WrappedNativeI420Buffer.java",
+ ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_video_egl_jni") {
+ sources = [ "src/java/org/webrtc/EglBase10Impl.java" ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_libvpx_vp8_jni") {
+ sources = [
+ "api/org/webrtc/LibvpxVp8Decoder.java",
+ "api/org/webrtc/LibvpxVp8Encoder.java",
+ ]
+
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_libvpx_vp9_jni") {
+ sources = [
+ "api/org/webrtc/LibvpxVp9Decoder.java",
+ "api/org/webrtc/LibvpxVp9Encoder.java",
+ ]
+
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_libaom_av1_encoder_jni") {
+ sources = [ "api/org/webrtc/LibaomAv1Encoder.java" ]
+
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_libaom_av1_decoder_if_supported_jni") {
+ sources = [ "api/org/webrtc/LibaomAv1Decoder.java" ]
+
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_dav1d_jni") {
+ sources = [ "api/org/webrtc/Dav1dDecoder.java" ]
+
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_peerconnection_jni") {
+ sources = [
+ "api/org/webrtc/AddIceObserver.java",
+ "api/org/webrtc/AudioTrack.java",
+ "api/org/webrtc/CallSessionFileRotatingLogSink.java",
+ "api/org/webrtc/CandidatePairChangeEvent.java",
+ "api/org/webrtc/CryptoOptions.java",
+ "api/org/webrtc/DataChannel.java",
+ "api/org/webrtc/DtmfSender.java",
+ "api/org/webrtc/IceCandidate.java",
+ "api/org/webrtc/IceCandidateErrorEvent.java",
+ "api/org/webrtc/MediaConstraints.java",
+ "api/org/webrtc/MediaSource.java",
+ "api/org/webrtc/MediaStream.java",
+ "api/org/webrtc/MediaStreamTrack.java",
+ "api/org/webrtc/PeerConnection.java",
+ "api/org/webrtc/PeerConnectionFactory.java",
+ "api/org/webrtc/RTCStats.java",
+ "api/org/webrtc/RTCStatsCollectorCallback.java",
+ "api/org/webrtc/RTCStatsReport.java",
+ "api/org/webrtc/RtcCertificatePem.java",
+ "api/org/webrtc/RtpParameters.java",
+ "api/org/webrtc/RtpReceiver.java",
+ "api/org/webrtc/RtpSender.java",
+ "api/org/webrtc/RtpTransceiver.java",
+ "api/org/webrtc/SSLCertificateVerifier.java",
+ "api/org/webrtc/SdpObserver.java",
+ "api/org/webrtc/SessionDescription.java",
+ "api/org/webrtc/StatsObserver.java",
+ "api/org/webrtc/StatsReport.java",
+ "api/org/webrtc/TurnCustomizer.java",
+ ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_java_audio_jni") {
+ sources = [ "api/org/webrtc/audio/JavaAudioDeviceModule.java" ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_builtin_audio_codecs_jni") {
+ sources = [
+ "api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java",
+ "api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java",
+ ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ # Generated JNI for native API targets
+
+ generate_jni("generated_native_api_jni") {
+ sources = [
+ "src/java/org/webrtc/JniHelper.java",
+ "src/java/org/webrtc/WebRtcClassLoader.java",
+ ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ # Generated JNI for internal targets.
+
+ generate_jni("generated_logging_jni") {
+ sources = [ "src/java/org/webrtc/JNILogging.java" ]
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_audio_device_module_base_jni") {
+ sources = [ "src/java/org/webrtc/audio/WebRtcAudioManager.java" ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ generate_jni("generated_java_audio_device_module_native_jni") {
+ sources = [
+ "src/java/org/webrtc/audio/WebRtcAudioRecord.java",
+ "src/java/org/webrtc/audio/WebRtcAudioTrack.java",
+ ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+}
+
+if (is_android) {
+ ################
+ # Test targets #
+ ################
+
+ if (rtc_include_tests) {
+ rtc_instrumentation_test_apk("android_instrumentation_test_apk") {
+ apk_name = "android_instrumentation_test_apk"
+ android_manifest = "instrumentationtests/AndroidManifest.xml"
+ min_sdk_version = 21
+ target_sdk_version = 21
+
+ sources = [
+ "instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java",
+ "instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java",
+ "instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java",
+ "instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java",
+ "instrumentationtests/src/org/webrtc/Camera2CapturerTest.java",
+ "instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java",
+ "instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java",
+ "instrumentationtests/src/org/webrtc/EglRendererTest.java",
+ "instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java",
+ "instrumentationtests/src/org/webrtc/GlRectDrawerTest.java",
+ "instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java",
+ "instrumentationtests/src/org/webrtc/LoggableTest.java",
+ "instrumentationtests/src/org/webrtc/NetworkMonitorTest.java",
+ "instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java",
+ "instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java",
+ "instrumentationtests/src/org/webrtc/PeerConnectionTest.java",
+ "instrumentationtests/src/org/webrtc/RendererCommonTest.java",
+ "instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java",
+ "instrumentationtests/src/org/webrtc/RtpSenderTest.java",
+ "instrumentationtests/src/org/webrtc/RtpTransceiverTest.java",
+ "instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java",
+ "instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java",
+ "instrumentationtests/src/org/webrtc/TestConstants.java",
+ "instrumentationtests/src/org/webrtc/TimestampAlignerTest.java",
+ "instrumentationtests/src/org/webrtc/VideoFileRendererTest.java",
+ "instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java",
+ "instrumentationtests/src/org/webrtc/VideoTrackTest.java",
+ "instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java",
+ "instrumentationtests/src/org/webrtc/YuvHelperTest.java",
+ ]
+
+ data = [ "../../sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m" ]
+
+ deps = [
+ ":audio_api_java",
+ ":base_java",
+ ":builtin_audio_codecs_java",
+ ":camera_java",
+ ":default_video_codec_factory_java",
+ ":filevideo_java",
+ ":hwcodecs_java",
+ ":libjingle_peerconnection_java",
+ ":libjingle_peerconnection_metrics_default_java",
+ ":peerconnection_java",
+ ":surfaceviewrenderer_java",
+ ":swcodecs_java",
+ ":video_api_java",
+ ":video_java",
+ "//base:base_java_test_support",
+ "//rtc_base:base_java",
+ "//third_party/android_deps:guava_android_java",
+ "//third_party/android_support_test_runner:rules_java",
+ "//third_party/android_support_test_runner:runner_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ "//third_party/androidx:androidx_test_runner_java",
+ "//third_party/google-truth:google_truth_java",
+ "//third_party/hamcrest:hamcrest_java",
+ "//third_party/hamcrest:hamcrest_library_java",
+ "//third_party/junit",
+ "//third_party/mockito:mockito_java",
+ ]
+
+ shared_libraries = [
+ "../../sdk/android:libjingle_peerconnection_instrumentationtests_so",
+ ]
+ }
+ }
+
+ rtc_shared_library("libjingle_peerconnection_instrumentationtests_so") {
+ testonly = true
+ sources = [ "src/jni/jni_onload.cc" ]
+
+ suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
+ configs += [ "//build/config/android:hide_all_but_jni" ]
+
+ deps = [
+ ":instrumentationtests_jni",
+ ":libjingle_peerconnection_jni",
+ ":libjingle_peerconnection_metrics_default_jni",
+ ":native_api_jni",
+ "../../pc:libjingle_peerconnection",
+ "../../rtc_base",
+ ]
+ output_extension = "so"
+ }
+
+ rtc_library("instrumentationtests_jni") {
+ testonly = true
+ sources = [
+ "instrumentationtests/loggable_test.cc",
+ "instrumentationtests/video_frame_buffer_test.cc",
+ ]
+
+ deps = [
+ ":base_jni",
+ ":native_api_jni",
+ ":videoframe_jni",
+ "../../api/video:video_frame",
+ "../../rtc_base:logging",
+ ]
+ }
+
+ rtc_library("native_test_jni_onload") {
+ testonly = true
+
+ sources = [ "native_unittests/test_jni_onload.cc" ]
+
+ deps = [
+ ":base_jni",
+ ":internal_jni",
+ ":native_api_base",
+ ":native_api_jni",
+ "../../rtc_base:checks",
+ ]
+ }
+
+ rtc_library("native_unittests") {
+ testonly = true
+
+ sources = [
+ "native_unittests/android_network_monitor_unittest.cc",
+ "native_unittests/application_context_provider.cc",
+ "native_unittests/application_context_provider.h",
+ "native_unittests/audio_device/audio_device_unittest.cc",
+ "native_unittests/codecs/wrapper_unittest.cc",
+ "native_unittests/java_types_unittest.cc",
+ "native_unittests/peerconnection/peer_connection_factory_unittest.cc",
+ "native_unittests/stacktrace/stacktrace_unittest.cc",
+ "native_unittests/video/video_source_unittest.cc",
+ ]
+
+ data = [
+ "../../resources/audio_device/audio_short44.pcm",
+ "../../resources/audio_device/audio_short48.pcm",
+ ]
+
+ deps = [
+ ":audio_device_module_base",
+ ":audio_jni",
+ ":base_jni",
+ ":generated_native_unittests_jni",
+ ":native_api_audio_device_module",
+ ":native_api_base",
+ ":native_api_codecs",
+ ":native_api_jni",
+ ":native_api_peerconnection",
+ ":native_api_stacktrace",
+ ":native_api_video",
+ ":native_test_jni_onload",
+ ":opensles_audio_device_module",
+ ":video_jni",
+ "../../api:field_trials_view",
+ "../../api:scoped_refptr",
+ "../../api/rtc_event_log:rtc_event_log_factory",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../api/video:video_frame",
+ "../../api/video:video_rtp_headers",
+ "../../media:rtc_audio_video",
+ "../../media:rtc_internal_video_codecs",
+ "../../media:rtc_media_base",
+ "../../media:rtc_media_engine_defaults",
+ "../../modules/audio_device",
+ "../../modules/audio_device:mock_audio_device",
+ "../../modules/audio_processing:api",
+ "../../modules/utility",
+ "../../pc:libjingle_peerconnection",
+ "../../rtc_base:checks",
+ "../../rtc_base:ip_address",
+ "../../rtc_base:logging",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:platform_thread",
+ "../../rtc_base:rtc_base",
+ "../../rtc_base:rtc_event",
+ "../../rtc_base:stringutils",
+ "../../rtc_base:threading",
+ "../../rtc_base:timeutils",
+ "../../rtc_base/synchronization:mutex",
+ "../../rtc_base/system:inline",
+ "../../system_wrappers",
+ "../../test:fileutils",
+ "../../test:scoped_key_value_config",
+ "../../test:test_support",
+ "../../testing/gtest",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+ }
+
+ rtc_android_library("native_unittests_java") {
+ testonly = true
+
+ sources = [
+ "native_unittests/org/webrtc/ApplicationContextProvider.java",
+ "native_unittests/org/webrtc/BuildInfo.java",
+ "native_unittests/org/webrtc/CodecsWrapperTestHelper.java",
+ "native_unittests/org/webrtc/FakeVideoEncoder.java",
+ "native_unittests/org/webrtc/JavaTypesTestHelper.java",
+ "native_unittests/org/webrtc/JavaVideoSourceTestHelper.java",
+ "native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java",
+ ]
+
+ deps = [
+ ":base_java",
+ ":java_audio_device_module_java",
+ ":peerconnection_java",
+ ":video_api_java",
+ ":video_java",
+ "../../rtc_base:base_java",
+ "//third_party/android_support_test_runner:runner_java",
+ "//third_party/androidx:androidx_test_runner_java",
+ ]
+ }
+
+ generate_jni("generated_native_unittests_jni") {
+ testonly = true
+
+ sources = [
+ "native_unittests/org/webrtc/ApplicationContextProvider.java",
+ "native_unittests/org/webrtc/BuildInfo.java",
+ "native_unittests/org/webrtc/CodecsWrapperTestHelper.java",
+ "native_unittests/org/webrtc/JavaTypesTestHelper.java",
+ "native_unittests/org/webrtc/JavaVideoSourceTestHelper.java",
+ "native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java",
+ ]
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
+ robolectric_binary("android_sdk_junit_tests") {
+ sources = [
+ "tests/src/org/webrtc/AndroidVideoDecoderTest.java",
+ "tests/src/org/webrtc/CameraEnumerationTest.java",
+ "tests/src/org/webrtc/CodecTestHelper.java",
+ "tests/src/org/webrtc/CryptoOptionsTest.java",
+ "tests/src/org/webrtc/FakeMediaCodecWrapper.java",
+ "tests/src/org/webrtc/FramerateBitrateAdjusterTest.java",
+ "tests/src/org/webrtc/GlGenericDrawerTest.java",
+ "tests/src/org/webrtc/HardwareVideoEncoderTest.java",
+ "tests/src/org/webrtc/IceCandidateTest.java",
+ "tests/src/org/webrtc/RefCountDelegateTest.java",
+ "tests/src/org/webrtc/ScalingSettingsTest.java",
+ "tests/src/org/webrtc/audio/LowLatencyAudioBufferManagerTest.java",
+ ]
+
+ deps = [
+ ":base_java",
+ ":camera_java",
+ ":hwcodecs_java",
+ ":java_audio_device_module_java",
+ ":libjingle_peerconnection_java",
+ ":peerconnection_java",
+ ":video_api_java",
+ ":video_java",
+ "//third_party/android_deps:guava_android_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ "//third_party/androidx:androidx_test_runner_java",
+ "//third_party/google-truth:google_truth_java",
+ ]
+
+ additional_jar_files = [ [
+ "tests/resources/robolectric.properties",
+ "robolectric.properties",
+ ] ]
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/OWNERS b/third_party/libwebrtc/sdk/android/OWNERS
new file mode 100644
index 0000000000..890f642341
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/OWNERS
@@ -0,0 +1,9 @@
+# New owners
+xalep@webrtc.org
+sartorius@webrtc.org
+ssilkin@webrtc.org
+
+# Legacy owners
+magjed@webrtc.org
+xalep@webrtc.org
+per-file ...Audio*.java=henrika@webrtc.org
diff --git a/third_party/libwebrtc/sdk/android/README b/third_party/libwebrtc/sdk/android/README
new file mode 100644
index 0000000000..53bdc9edc5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/README
@@ -0,0 +1,21 @@
+This directory holds a Java implementation of the webrtc::PeerConnection API, as
+well as the JNI glue C++ code that lets the Java implementation reuse the C++
+implementation of the same API.
+
+To build the Java API and related tests, make sure you have a WebRTC checkout
+with Android specific parts. This can be used for linux development as well by
+configuring gn appropriately, as it is a superset of the webrtc checkout:
+fetch --nohooks webrtc_android
+gclient sync
+
+You also must generate GN projects with:
+--args='target_os="android" target_cpu="arm"'
+
+More information on getting the code, compiling and running the AppRTCMobile
+app can be found at:
+https://webrtc.org/native-code/android/
+
+To use the Java API, start by looking at the public interface of
+org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
+
+To understand the implementation of the API, see the native code in src/jni/pc/.
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AddIceObserver.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AddIceObserver.java
new file mode 100644
index 0000000000..ff2c690029
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AddIceObserver.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface to handle completion of addIceCandidate */
+public interface AddIceObserver {
+ /** Called when ICE candidate added successfully.*/
+ @CalledByNative public void onAddSuccess();
+
+ /** Called when ICE candidate addition failed.*/
+ @CalledByNative public void onAddFailure(String error);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java
new file mode 100644
index 0000000000..dd3e262896
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioDecoderFactory}.
+ */
+public interface AudioDecoderFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::AudioDecoderFactory}. The caller takes ownership.
+ */
+ long createNativeAudioDecoderFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java
new file mode 100644
index 0000000000..814b71aba1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioEncoderFactory}.
+ */
+public interface AudioEncoderFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::AudioEncoderFactory}. The caller takes ownership.
+ */
+ long createNativeAudioEncoderFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java
new file mode 100644
index 0000000000..bd8fdb8989
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::AudioProcessing instances. */
+public interface AudioProcessingFactory {
+ /**
+ * Dynamically allocates a webrtc::AudioProcessing instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ public long createNative();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioSource.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioSource.java
new file mode 100644
index 0000000000..f8104e5904
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioSource.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+ public AudioSource(long nativeSource) {
+ super(nativeSource);
+ }
+
+ /** Returns a pointer to webrtc::AudioSourceInterface. */
+ long getNativeAudioSource() {
+ return getNativeMediaSource();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioTrack.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioTrack.java
new file mode 100644
index 0000000000..ca745db634
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioTrack.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+ public AudioTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+
+ /** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
+ * 0 to 10.
+ */
+ public void setVolume(double volume) {
+ nativeSetVolume(getNativeAudioTrack(), volume);
+ }
+
+ /** Returns a pointer to webrtc::AudioTrackInterface. */
+ long getNativeAudioTrack() {
+ return getNativeMediaStreamTrack();
+ }
+
+ private static native void nativeSetVolume(long track, double volume);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java
new file mode 100644
index 0000000000..5ebc19f25d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Creates a native {@code webrtc::AudioDecoderFactory} with the builtin audio decoders.
+ */
+public class BuiltinAudioDecoderFactoryFactory implements AudioDecoderFactoryFactory {
+ @Override
+ public long createNativeAudioDecoderFactory() {
+ return nativeCreateBuiltinAudioDecoderFactory();
+ }
+
+ private static native long nativeCreateBuiltinAudioDecoderFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java
new file mode 100644
index 0000000000..e884d4c3b9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * This class creates a native {@code webrtc::AudioEncoderFactory} with the builtin audio encoders.
+ */
+public class BuiltinAudioEncoderFactoryFactory implements AudioEncoderFactoryFactory {
+ @Override
+ public long createNativeAudioEncoderFactory() {
+ return nativeCreateBuiltinAudioEncoderFactory();
+ }
+
+ private static native long nativeCreateBuiltinAudioEncoderFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java
new file mode 100644
index 0000000000..f4edb58847
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+ private long nativeSink;
+
+ public static byte[] getLogData(String dirPath) {
+ if (dirPath == null) {
+ throw new IllegalArgumentException("dirPath may not be null.");
+ }
+ return nativeGetLogData(dirPath);
+ }
+
+ public CallSessionFileRotatingLogSink(
+ String dirPath, int maxFileSize, Logging.Severity severity) {
+ if (dirPath == null) {
+ throw new IllegalArgumentException("dirPath may not be null.");
+ }
+ nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+ }
+
+ public void dispose() {
+ if (nativeSink != 0) {
+ nativeDeleteSink(nativeSink);
+ nativeSink = 0;
+ }
+ }
+
+ private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
+ private static native void nativeDeleteSink(long sink);
+ private static native byte[] nativeGetLogData(String dirPath);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Capturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Capturer.java
new file mode 100644
index 0000000000..de172aa1d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Capturer.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+public class Camera1Capturer extends CameraCapturer {
+ private final boolean captureToTexture;
+
+ public Camera1Capturer(
+ String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
+ super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
+
+ this.captureToTexture = captureToTexture;
+ }
+
+ @Override
+ protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+ CameraSession.Events events, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+ int framerate) {
+ Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
+ surfaceTextureHelper, cameraName, width, height, framerate);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java
new file mode 100644
index 0000000000..4a1aacdb05
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.SystemClock;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+@SuppressWarnings("deprecation")
+public class Camera1Enumerator implements CameraEnumerator {
+ private final static String TAG = "Camera1Enumerator";
+ // Each entry contains the supported formats for corresponding camera index. The formats for all
+ // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+ // reference.
+ private static List<List<CaptureFormat>> cachedSupportedFormats;
+
+ private final boolean captureToTexture;
+
+ public Camera1Enumerator() {
+ this(true /* captureToTexture */);
+ }
+
+ public Camera1Enumerator(boolean captureToTexture) {
+ this.captureToTexture = captureToTexture;
+ }
+
+ // Returns device names that can be used to create a new VideoCapturerAndroid.
+ @Override
+ public String[] getDeviceNames() {
+ ArrayList<String> namesList = new ArrayList<>();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ String name = getDeviceName(i);
+ if (name != null) {
+ namesList.add(name);
+ Logging.d(TAG, "Index: " + i + ". " + name);
+ } else {
+ Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
+ }
+ }
+ String[] namesArray = new String[namesList.size()];
+ return namesList.toArray(namesArray);
+ }
+
+ @Override
+ public boolean isFrontFacing(String deviceName) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+ return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
+ }
+
+ @Override
+ public boolean isBackFacing(String deviceName) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+ return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
+ }
+
+ @Override
+ public boolean isInfrared(String deviceName) {
+ return false;
+ }
+
+ @Override
+ public List<CaptureFormat> getSupportedFormats(String deviceName) {
+ return getSupportedFormats(getCameraIndex(deviceName));
+ }
+
+ @Override
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
+ }
+
+ private static @Nullable android.hardware.Camera.CameraInfo getCameraInfo(int index) {
+ android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ try {
+ android.hardware.Camera.getCameraInfo(index, info);
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo failed on index " + index, e);
+ return null;
+ }
+ return info;
+ }
+
+ static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
+ if (cachedSupportedFormats == null) {
+ cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ cachedSupportedFormats.add(enumerateFormats(i));
+ }
+ }
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ private static List<CaptureFormat> enumerateFormats(int cameraId) {
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ final android.hardware.Camera.Parameters parameters;
+ android.hardware.Camera camera = null;
+ try {
+ Logging.d(TAG, "Opening camera with index " + cameraId);
+ camera = android.hardware.Camera.open(cameraId);
+ parameters = camera.getParameters();
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+ return new ArrayList<CaptureFormat>();
+ } finally {
+ if (camera != null) {
+ camera.release();
+ }
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ try {
+ int minFps = 0;
+ int maxFps = 0;
+ final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+ if (listFpsRange != null) {
+ // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+ // corresponding to the highest fps.
+ final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+ minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ }
+ for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
+ formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+ }
+
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+
+ // Convert from android.hardware.Camera.Size to Size.
+ static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
+ final List<Size> sizes = new ArrayList<Size>();
+ for (android.hardware.Camera.Size size : cameraSizes) {
+ sizes.add(new Size(size.width, size.height));
+ }
+ return sizes;
+ }
+
+ // Convert from int[2] to CaptureFormat.FramerateRange.
+ static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
+ final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+ for (int[] range : arrayRanges) {
+ ranges.add(new CaptureFormat.FramerateRange(
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
+ }
+ return ranges;
+ }
+
+ // Returns the camera index for camera with name `deviceName`, or throws IllegalArgumentException
+ // if no such camera can be found.
+ static int getCameraIndex(String deviceName) {
+ Logging.d(TAG, "getCameraIndex: " + deviceName);
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ if (deviceName.equals(getDeviceName(i))) {
+ return i;
+ }
+ }
+ throw new IllegalArgumentException("No such camera: " + deviceName);
+ }
+
+ // Returns the name of the camera with camera index. Returns null if the
+ // camera can not be used.
+ static @Nullable String getDeviceName(int index) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(index);
+ if (info == null) {
+ return null;
+ }
+
+ String facing =
+ (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+ return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Capturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Capturer.java
new file mode 100644
index 0000000000..c4becf4819
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Capturer.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.camera2.CameraManager;
+import androidx.annotation.Nullable;
+
+public class Camera2Capturer extends CameraCapturer {
+ private final Context context;
+ @Nullable private final CameraManager cameraManager;
+
+ public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
+ super(cameraName, eventsHandler, new Camera2Enumerator(context));
+
+ this.context = context;
+ cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+ CameraSession.Events events, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+ int framerate) {
+ Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
+ surfaceTextureHelper, cameraName, width, height, framerate);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java
new file mode 100644
index 0000000000..7950393046
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java
@@ -0,0 +1,260 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.AndroidException;
+import android.util.Range;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+public class Camera2Enumerator implements CameraEnumerator {
+ private final static String TAG = "Camera2Enumerator";
+ private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+ // Each entry contains the supported formats for a given camera index. The formats are enumerated
+ // lazily in getSupportedFormats(), and cached for future reference.
+ private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
+ new HashMap<String, List<CaptureFormat>>();
+
+ final Context context;
+ @Nullable final CameraManager cameraManager;
+
+ public Camera2Enumerator(Context context) {
+ this.context = context;
+ this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ public String[] getDeviceNames() {
+ try {
+ return cameraManager.getCameraIdList();
+ // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
+ // catch statement with an Exception from a newer API, even if the code is never executed.
+ // https://code.google.com/p/android/issues/detail?id=209129
+ } catch (/* CameraAccessException */ AndroidException e) {
+ Logging.e(TAG, "Camera access exception", e);
+ return new String[] {};
+ }
+ }
+
+ @Override
+ public boolean isFrontFacing(String deviceName) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+ return characteristics != null
+ && characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+ }
+
+ @Override
+ public boolean isBackFacing(String deviceName) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+ return characteristics != null
+ && characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_BACK;
+ }
+
+ @Override
+ public boolean isInfrared(String deviceName) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ Integer colors = characteristics.get(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
+ return colors != null && colors.equals(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR);
+ }
+
+ return false;
+ }
+
+ @Nullable
+ @Override
+ public List<CaptureFormat> getSupportedFormats(String deviceName) {
+ return getSupportedFormats(context, deviceName);
+ }
+
+ @Override
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return new Camera2Capturer(context, deviceName, eventsHandler);
+ }
+
+ private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) {
+ try {
+ return cameraManager.getCameraCharacteristics(deviceName);
+ // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
+ // catch statement with an Exception from a newer API, even if the code is never executed.
+ // https://code.google.com/p/android/issues/detail?id=209129
+ } catch (/* CameraAccessException */ AndroidException e) {
+ Logging.e(TAG, "Camera access exception", e);
+ return null;
+ }
+ }
+
+ /**
+ * Checks if API is supported and all cameras have better than legacy support.
+ */
+ public static boolean isSupported(Context context) {
+ CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ try {
+ String[] cameraIds = cameraManager.getCameraIdList();
+ for (String id : cameraIds) {
+ CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
+ if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
+ == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ return false;
+ }
+ }
+ // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
+ // catch statement with an Exception from a newer API, even if the code is never executed.
+ // https://code.google.com/p/android/issues/detail?id=209129
+ } catch (/* CameraAccessException */ AndroidException | RuntimeException e) {
+ Logging.e(TAG, "Failed to check if camera2 is supported", e);
+ return false;
+ }
+ return true;
+ }
+
+ static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
+ if (fpsRanges.length == 0) {
+ return 1000;
+ }
+ return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
+ }
+
+ static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ final int supportLevel =
+ cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+
+ final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
+ final List<Size> sizes = convertSizes(nativeSizes);
+
+ // Video may be stretched pre LMR1 on legacy implementations.
+ // Filter out formats that have different aspect ratio than the sensor array.
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
+ && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ final Rect activeArraySize =
+ cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ final ArrayList<Size> filteredSizes = new ArrayList<Size>();
+
+ for (Size size : sizes) {
+ if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
+ filteredSizes.add(size);
+ }
+ }
+
+ return filteredSizes;
+ } else {
+ return sizes;
+ }
+ }
+
+ @Nullable
+ static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
+ return getSupportedFormats(
+ (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
+ }
+
+ @Nullable
+ static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
+ synchronized (cachedSupportedFormats) {
+ if (cachedSupportedFormats.containsKey(cameraId)) {
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+
+ final CameraCharacteristics cameraCharacteristics;
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+ } catch (Exception ex) {
+ Logging.e(TAG, "getCameraCharacteristics()", ex);
+ return new ArrayList<CaptureFormat>();
+ }
+
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ Range<Integer>[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ List<CaptureFormat.FramerateRange> framerateRanges =
+ convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
+ List<Size> sizes = getSupportedSizes(cameraCharacteristics);
+
+ int defaultMaxFps = 0;
+ for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
+ defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ for (Size size : sizes) {
+ long minFrameDurationNs = 0;
+ try {
+ minFrameDurationNs = streamMap.getOutputMinFrameDuration(
+ SurfaceTexture.class, new android.util.Size(size.width, size.height));
+ } catch (Exception e) {
+ // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+ }
+ final int maxFps = (minFrameDurationNs == 0)
+ ? defaultMaxFps
+ : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
+ formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
+ Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
+ }
+
+ cachedSupportedFormats.put(cameraId, formatList);
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+ }
+
+ // Convert from android.util.Size to Size.
+ private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
+ if (cameraSizes == null || cameraSizes.length == 0) {
+ return Collections.emptyList();
+ }
+ final List<Size> sizes = new ArrayList<>(cameraSizes.length);
+ for (android.util.Size size : cameraSizes) {
+ sizes.add(new Size(size.getWidth(), size.getHeight()));
+ }
+ return sizes;
+ }
+
+ // Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
+ static List<CaptureFormat.FramerateRange> convertFramerates(
+ Range<Integer>[] arrayRanges, int unitFactor) {
+ final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+ for (Range<Integer> range : arrayRanges) {
+ ranges.add(new CaptureFormat.FramerateRange(
+ range.getLower() * unitFactor, range.getUpper() * unitFactor));
+ }
+ return ranges;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
new file mode 100644
index 0000000000..0c3188fffe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+
+import android.graphics.ImageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+ private final static String TAG = "CameraEnumerationAndroid";
+
+ static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
+ // 0, Unknown resolution
+ new Size(160, 120), // 1, QQVGA
+ new Size(240, 160), // 2, HQVGA
+ new Size(320, 240), // 3, QVGA
+ new Size(400, 240), // 4, WQVGA
+ new Size(480, 320), // 5, HVGA
+ new Size(640, 360), // 6, nHD
+ new Size(640, 480), // 7, VGA
+ new Size(768, 480), // 8, WVGA
+ new Size(854, 480), // 9, FWVGA
+ new Size(800, 600), // 10, SVGA
+ new Size(960, 540), // 11, qHD
+ new Size(960, 640), // 12, DVGA
+ new Size(1024, 576), // 13, WSVGA
+ new Size(1024, 600), // 14, WVSGA
+ new Size(1280, 720), // 15, HD
+ new Size(1280, 1024), // 16, SXGA
+ new Size(1920, 1080), // 17, Full HD
+ new Size(1920, 1440), // 18, Full HD 4:3
+ new Size(2560, 1440), // 19, QHD
+ new Size(3840, 2160) // 20, UHD
+ ));
+
+ public static class CaptureFormat {
+ // Class to represent a framerate range. The framerate varies because of lightning conditions.
+ // The values are multiplied by 1000, so 1000 represents one frame per second.
+ public static class FramerateRange {
+ public int min;
+ public int max;
+
+ public FramerateRange(int min, int max) {
+ this.min = min;
+ this.max = max;
+ }
+
+ @Override
+ public String toString() {
+ return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof FramerateRange)) {
+ return false;
+ }
+ final FramerateRange otherFramerate = (FramerateRange) other;
+ return min == otherFramerate.min && max == otherFramerate.max;
+ }
+
+ @Override
+ public int hashCode() {
+ // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+ return 1 + 65537 * min + max;
+ }
+ }
+
+ public final int width;
+ public final int height;
+ public final FramerateRange framerate;
+
+ // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
+ // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
+ // all imageFormats.
+ public final int imageFormat = ImageFormat.NV21;
+
+ public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
+ this.width = width;
+ this.height = height;
+ this.framerate = new FramerateRange(minFramerate, maxFramerate);
+ }
+
+ public CaptureFormat(int width, int height, FramerateRange framerate) {
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+ }
+
+ // Calculates the frame size of this capture format.
+ public int frameSize() {
+ return frameSize(width, height, imageFormat);
+ }
+
+ // Calculates the frame size of the specified image format. Currently only
+ // supporting ImageFormat.NV21.
+ // The size is width * height * number of bytes per pixel.
+ // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
+ public static int frameSize(int width, int height, int imageFormat) {
+ if (imageFormat != ImageFormat.NV21) {
+ throw new UnsupportedOperationException("Don't know how to calculate "
+ + "the frame size of non-NV21 image formats.");
+ }
+ return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + "@" + framerate;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof CaptureFormat)) {
+ return false;
+ }
+ final CaptureFormat otherFormat = (CaptureFormat) other;
+ return width == otherFormat.width && height == otherFormat.height
+ && framerate.equals(otherFormat.framerate);
+ }
+
+ @Override
+ public int hashCode() {
+ return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
+ }
+ }
+
+ // Helper class for finding the closest supported format for the two functions below. It creates a
+ // comparator based on the difference to some requested parameters, where the element with the
+ // minimum difference is the element that is closest to the requested parameters.
+ private static abstract class ClosestComparator<T> implements Comparator<T> {
+ // Difference between supported and requested parameter.
+ abstract int diff(T supportedParameter);
+
+ @Override
+ public int compare(T t1, T t2) {
+ return diff(t1) - diff(t2);
+ }
+ }
+
+ // Prefer a fps range with an upper bound close to `framerate`. Also prefer a fps range with a low
+ // lower bound, to allow the framerate to fluctuate based on lightning conditions.
+ public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
+ List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
+ return Collections.min(
+ supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
+ // Progressive penalty if the upper bound is further away than `MAX_FPS_DIFF_THRESHOLD`
+ // from requested.
+ private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
+ private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
+ private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
+
+ // Progressive penalty if the lower bound is bigger than `MIN_FPS_THRESHOLD`.
+ private static final int MIN_FPS_THRESHOLD = 8000;
+ private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
+ private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
+
+ // Use one weight for small `value` less than `threshold`, and another weight above.
+ private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
+ return (value < threshold) ? value * lowWeight
+ : threshold * lowWeight + (value - threshold) * highWeight;
+ }
+
+ @Override
+ int diff(CaptureFormat.FramerateRange range) {
+ final int minFpsError = progressivePenalty(
+ range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
+ final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
+ MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
+ return minFpsError + maxFpsError;
+ }
+ });
+ }
+
+ public static Size getClosestSupportedSize(
+ List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
+ return Collections.min(supportedSizes, new ClosestComparator<Size>() {
+ @Override
+ int diff(Size size) {
+ return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+ }
+ });
+ }
+
+ // Helper method for camera classes.
+ static void reportCameraResolution(Histogram histogram, Size resolution) {
+ int index = COMMON_RESOLUTIONS.indexOf(resolution);
+ // 0 is reserved for unknown resolution, so add 1.
+ // indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
+ histogram.addSample(index + 1);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerator.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerator.java
new file mode 100644
index 0000000000..db34d542c8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerator.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.List;
+
+public interface CameraEnumerator {
+ public String[] getDeviceNames();
+ public boolean isFrontFacing(String deviceName);
+ public boolean isBackFacing(String deviceName);
+ public boolean isInfrared(String deviceName);
+ public List<CaptureFormat> getSupportedFormats(String deviceName);
+
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java
new file mode 100644
index 0000000000..ec26868b5c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaRecorder;
+
+/**
+ * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
+ * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
+ * class for detecting camera freezes.
+ */
+public interface CameraVideoCapturer extends VideoCapturer {
+ /**
+ * Camera events handler - can be used to be notifed about camera events. The callbacks are
+ * executed from an arbitrary thread.
+ */
+ public interface CameraEventsHandler {
+ // Camera error handler - invoked when camera can not be opened
+ // or any camera exception happens on camera thread.
+ void onCameraError(String errorDescription);
+
+ // Called when camera is disconnected.
+ void onCameraDisconnected();
+
+ // Invoked when camera stops receiving frames.
+ void onCameraFreezed(String errorDescription);
+
+ // Callback invoked when camera is opening.
+ void onCameraOpening(String cameraName);
+
+ // Callback invoked when first camera frame is available after camera is started.
+ void onFirstFrameAvailable();
+
+ // Callback invoked when camera is closed.
+ void onCameraClosed();
+ }
+
+ /**
+ * Camera switch handler - one of these functions are invoked with the result of switchCamera().
+ * The callback may be called on an arbitrary thread.
+ */
+ public interface CameraSwitchHandler {
+ // Invoked on success. `isFrontCamera` is true if the new camera is front facing.
+ void onCameraSwitchDone(boolean isFrontCamera);
+
+ // Invoked on failure, e.g. camera is stopped or only one camera available.
+ void onCameraSwitchError(String errorDescription);
+ }
+
+ /**
+ * Switch camera to the next valid camera id. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ void switchCamera(CameraSwitchHandler switchEventsHandler);
+
+ /**
+ * Switch camera to the specified camera id. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ void switchCamera(CameraSwitchHandler switchEventsHandler, String cameraName);
+
+ /**
+ * MediaRecorder add/remove handler - one of these functions are invoked with the result of
+ * addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
+ * The callback may be called on an arbitrary thread.
+ */
+ @Deprecated
+ public interface MediaRecorderHandler {
+ // Invoked on success.
+ void onMediaRecorderSuccess();
+
+ // Invoked on failure, e.g. camera is stopped or any exception happens.
+ void onMediaRecorderError(String errorDescription);
+ }
+
+ /**
+ * Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
+ * Once MediaRecorder is added to camera pipeline camera switch is not allowed.
+ * This function can be called from any thread.
+ */
+ @Deprecated
+ default void addMediaRecorderToCamera(
+ MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) {
+ throw new UnsupportedOperationException("Deprecated and not implemented.");
+ }
+
+ /**
+ * Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ @Deprecated
+ default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) {
+ throw new UnsupportedOperationException("Deprecated and not implemented.");
+ }
+
+ /**
+ * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
+ * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
+ * thread.
+ */
+ public static class CameraStatistics {
+ private final static String TAG = "CameraStatistics";
+ private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+ private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
+
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final CameraEventsHandler eventsHandler;
+ private int frameCount;
+ private int freezePeriodCount;
+ // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+ private final Runnable cameraObserver = new Runnable() {
+ @Override
+ public void run() {
+ final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
+ Logging.d(TAG, "Camera fps: " + cameraFps + ".");
+ if (frameCount == 0) {
+ ++freezePeriodCount;
+ if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
+ && eventsHandler != null) {
+ Logging.e(TAG, "Camera freezed.");
+ if (surfaceTextureHelper.isTextureInUse()) {
+ // This can only happen if we are capturing to textures.
+ eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+ } else {
+ eventsHandler.onCameraFreezed("Camera failure.");
+ }
+ return;
+ }
+ } else {
+ freezePeriodCount = 0;
+ }
+ frameCount = 0;
+ surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+ }
+ };
+
+ public CameraStatistics(
+ SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
+ if (surfaceTextureHelper == null) {
+ throw new IllegalArgumentException("SurfaceTextureHelper is null");
+ }
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.eventsHandler = eventsHandler;
+ this.frameCount = 0;
+ this.freezePeriodCount = 0;
+ surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+ }
+
+ private void checkThread() {
+ if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ public void addFrame() {
+ checkThread();
+ ++frameCount;
+ }
+
+ public void release() {
+ surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CapturerObserver.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CapturerObserver.java
new file mode 100644
index 0000000000..382dc15b3a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CapturerObserver.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for observering a capturer. Passed to {@link VideoCapturer#initialize}. Provided by
+ * {@link VideoSource#getCapturerObserver}.
+ *
+ * All callbacks must be executed on a single thread.
+ */
+public interface CapturerObserver {
+ /** Notify if the capturer have been started successfully or not. */
+ void onCapturerStarted(boolean success);
+ /** Notify that the capturer has been stopped. */
+ void onCapturerStopped();
+
+ /** Delivers a captured frame. */
+ void onFrameCaptured(VideoFrame frame);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CryptoOptions.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CryptoOptions.java
new file mode 100644
index 0000000000..6e06bc6426
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CryptoOptions.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * CryptoOptions defines advanced cryptographic settings for native WebRTC.
+ * These settings must be passed into RTCConfiguration. WebRTC is secur by
+ * default and you should not need to set any of these options unless you are
+ * specifically looking for an additional crypto feature such as AES_GCM
+ * support. This class is the Java binding of native api/crypto/cryptooptions.h
+ */
+public final class CryptoOptions {
+ /**
+ * SRTP Related Peer Connection Options.
+ */
+ public final class Srtp {
+ /**
+ * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
+ * if both sides enable it
+ */
+ private final boolean enableGcmCryptoSuites;
+ /**
+ * If set to true, the (potentially insecure) crypto cipher
+ * kSrtpAes128CmSha1_32 will be included in the list of supported ciphers
+ * during negotiation. It will only be used if both peers support it and no
+ * other ciphers get preferred.
+ */
+ private final boolean enableAes128Sha1_32CryptoCipher;
+ /**
+ * If set to true, encrypted RTP header extensions as defined in RFC 6904
+ * will be negotiated. They will only be used if both peers support them.
+ */
+ private final boolean enableEncryptedRtpHeaderExtensions;
+
+ private Srtp(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+ boolean enableEncryptedRtpHeaderExtensions) {
+ this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+ this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+ this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableGcmCryptoSuites() {
+ return enableGcmCryptoSuites;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableAes128Sha1_32CryptoCipher() {
+ return enableAes128Sha1_32CryptoCipher;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableEncryptedRtpHeaderExtensions() {
+ return enableEncryptedRtpHeaderExtensions;
+ }
+ }
+
+ /**
+ * Options to be used when the FrameEncryptor / FrameDecryptor APIs are used.
+ */
+ public final class SFrame {
+ /**
+ * If set all RtpSenders must have an FrameEncryptor attached to them before
+ * they are allowed to send packets. All RtpReceivers must have a
+ * FrameDecryptor attached to them before they are able to receive packets.
+ */
+ private final boolean requireFrameEncryption;
+
+ private SFrame(boolean requireFrameEncryption) {
+ this.requireFrameEncryption = requireFrameEncryption;
+ }
+
+ @CalledByNative("SFrame")
+ public boolean getRequireFrameEncryption() {
+ return requireFrameEncryption;
+ }
+ }
+
+ private final Srtp srtp;
+ private final SFrame sframe;
+
+ private CryptoOptions(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+ boolean enableEncryptedRtpHeaderExtensions, boolean requireFrameEncryption) {
+ this.srtp = new Srtp(
+ enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher, enableEncryptedRtpHeaderExtensions);
+ this.sframe = new SFrame(requireFrameEncryption);
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ @CalledByNative
+ public Srtp getSrtp() {
+ return srtp;
+ }
+
+ @CalledByNative
+ public SFrame getSFrame() {
+ return sframe;
+ }
+
+ public static class Builder {
+ private boolean enableGcmCryptoSuites;
+ private boolean enableAes128Sha1_32CryptoCipher;
+ private boolean enableEncryptedRtpHeaderExtensions;
+ private boolean requireFrameEncryption;
+
+ private Builder() {}
+
+ public Builder setEnableGcmCryptoSuites(boolean enableGcmCryptoSuites) {
+ this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+ return this;
+ }
+
+ public Builder setEnableAes128Sha1_32CryptoCipher(boolean enableAes128Sha1_32CryptoCipher) {
+ this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+ return this;
+ }
+
+ public Builder setEnableEncryptedRtpHeaderExtensions(
+ boolean enableEncryptedRtpHeaderExtensions) {
+ this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+ return this;
+ }
+
+ public Builder setRequireFrameEncryption(boolean requireFrameEncryption) {
+ this.requireFrameEncryption = requireFrameEncryption;
+ return this;
+ }
+
+ public CryptoOptions createCryptoOptions() {
+ return new CryptoOptions(enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher,
+ enableEncryptedRtpHeaderExtensions, requireFrameEncryption);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/DataChannel.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/DataChannel.java
new file mode 100644
index 0000000000..b9301f1faa
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/DataChannel.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+ /** Java wrapper for WebIDL RTCDataChannel. */
+ public static class Init {
+ public boolean ordered = true;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmitTimeMs = -1;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmits = -1;
+ public String protocol = "";
+ public boolean negotiated;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int id = -1;
+
+ @CalledByNative("Init")
+ boolean getOrdered() {
+ return ordered;
+ }
+
+ @CalledByNative("Init")
+ int getMaxRetransmitTimeMs() {
+ return maxRetransmitTimeMs;
+ }
+
+ @CalledByNative("Init")
+ int getMaxRetransmits() {
+ return maxRetransmits;
+ }
+
+ @CalledByNative("Init")
+ String getProtocol() {
+ return protocol;
+ }
+
+ @CalledByNative("Init")
+ boolean getNegotiated() {
+ return negotiated;
+ }
+
+ @CalledByNative("Init")
+ int getId() {
+ return id;
+ }
+ }
+
+ /** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
+ public static class Buffer {
+ /** The underlying data. */
+ public final ByteBuffer data;
+
+ /**
+ * Indicates whether `data` contains UTF-8 text or "binary data"
+ * (i.e. anything else).
+ */
+ public final boolean binary;
+
+ @CalledByNative("Buffer")
+ public Buffer(ByteBuffer data, boolean binary) {
+ this.data = data;
+ this.binary = binary;
+ }
+ }
+
+ /** Java version of C++ DataChannelObserver. */
+ public interface Observer {
+ /** The data channel's bufferedAmount has changed. */
+ @CalledByNative("Observer") public void onBufferedAmountChange(long previousAmount);
+ /** The data channel state has changed. */
+ @CalledByNative("Observer") public void onStateChange();
+ /**
+ * A data buffer was successfully received. NOTE: `buffer.data` will be
+ * freed once this function returns so callers who want to use the data
+ * asynchronously must make sure to copy it first.
+ */
+ @CalledByNative("Observer") public void onMessage(Buffer buffer);
+ }
+
+ /** Keep in sync with DataChannelInterface::DataState. */
+ public enum State {
+ CONNECTING,
+ OPEN,
+ CLOSING,
+ CLOSED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ private long nativeDataChannel;
+ private long nativeObserver;
+
+ @CalledByNative
+ public DataChannel(long nativeDataChannel) {
+ this.nativeDataChannel = nativeDataChannel;
+ }
+
+ /** Register `observer`, replacing any previously-registered observer. */
+ public void registerObserver(Observer observer) {
+ checkDataChannelExists();
+ if (nativeObserver != 0) {
+ nativeUnregisterObserver(nativeObserver);
+ }
+ nativeObserver = nativeRegisterObserver(observer);
+ }
+
+ /** Unregister the (only) observer. */
+ public void unregisterObserver() {
+ checkDataChannelExists();
+ nativeUnregisterObserver(nativeObserver);
+ nativeObserver = 0;
+ }
+
+ public String label() {
+ checkDataChannelExists();
+ return nativeLabel();
+ }
+
+ public int id() {
+ checkDataChannelExists();
+ return nativeId();
+ }
+
+ public State state() {
+ checkDataChannelExists();
+ return nativeState();
+ }
+
+ /**
+ * Return the number of bytes of application data (UTF-8 text and binary data)
+ * that have been queued using SendBuffer but have not yet been transmitted
+ * to the network.
+ */
+ public long bufferedAmount() {
+ checkDataChannelExists();
+ return nativeBufferedAmount();
+ }
+
+ /** Close the channel. */
+ public void close() {
+ checkDataChannelExists();
+ nativeClose();
+ }
+
+ /** Send `data` to the remote peer; return success. */
+ public boolean send(Buffer buffer) {
+ checkDataChannelExists();
+ // TODO(fischman): this could be cleverer about avoiding copies if the
+ // ByteBuffer is direct and/or is backed by an array.
+ byte[] data = new byte[buffer.data.remaining()];
+ buffer.data.get(data);
+ return nativeSend(data, buffer.binary);
+ }
+
+ /** Dispose of native resources attached to this channel. */
+ public void dispose() {
+ checkDataChannelExists();
+ JniCommon.nativeReleaseRef(nativeDataChannel);
+ nativeDataChannel = 0;
+ }
+
+ @CalledByNative
+ long getNativeDataChannel() {
+ return nativeDataChannel;
+ }
+
+ private void checkDataChannelExists() {
+ if (nativeDataChannel == 0) {
+ throw new IllegalStateException("DataChannel has been disposed.");
+ }
+ }
+
+ private native long nativeRegisterObserver(Observer observer);
+ private native void nativeUnregisterObserver(long observer);
+ private native String nativeLabel();
+ private native int nativeId();
+ private native State nativeState();
+ private native long nativeBufferedAmount();
+ private native void nativeClose();
+ private native boolean nativeSend(byte[] data, boolean binary);
+};
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Dav1dDecoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Dav1dDecoder.java
new file mode 100644
index 0000000000..ecb16bc3a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Dav1dDecoder.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class Dav1dDecoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
new file mode 100644
index 0000000000..d7a8694d3d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+
+/**
+ * Helper class that combines HW and SW decoders.
+ */
+public class DefaultVideoDecoderFactory implements VideoDecoderFactory {
+ private final VideoDecoderFactory hardwareVideoDecoderFactory;
+ private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory();
+ private final @Nullable VideoDecoderFactory platformSoftwareVideoDecoderFactory;
+
+ /**
+ * Create decoder factory using default hardware decoder factory.
+ */
+ public DefaultVideoDecoderFactory(@Nullable EglBase.Context eglContext) {
+ this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext);
+ this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext);
+ }
+
+ /**
+ * Create decoder factory using explicit hardware decoder factory.
+ */
+ DefaultVideoDecoderFactory(VideoDecoderFactory hardwareVideoDecoderFactory) {
+ this.hardwareVideoDecoderFactory = hardwareVideoDecoderFactory;
+ this.platformSoftwareVideoDecoderFactory = null;
+ }
+
+ @Override
+ public @Nullable VideoDecoder createDecoder(VideoCodecInfo codecType) {
+ VideoDecoder softwareDecoder = softwareVideoDecoderFactory.createDecoder(codecType);
+ final VideoDecoder hardwareDecoder = hardwareVideoDecoderFactory.createDecoder(codecType);
+ if (softwareDecoder == null && platformSoftwareVideoDecoderFactory != null) {
+ softwareDecoder = platformSoftwareVideoDecoderFactory.createDecoder(codecType);
+ }
+ if (hardwareDecoder != null && softwareDecoder != null) {
+ // Both hardware and software supported, wrap it in a software fallback
+ return new VideoDecoderFallback(
+ /* fallback= */ softwareDecoder, /* primary= */ hardwareDecoder);
+ }
+ return hardwareDecoder != null ? hardwareDecoder : softwareDecoder;
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<VideoCodecInfo>();
+
+ supportedCodecInfos.addAll(Arrays.asList(softwareVideoDecoderFactory.getSupportedCodecs()));
+ supportedCodecInfos.addAll(Arrays.asList(hardwareVideoDecoderFactory.getSupportedCodecs()));
+ if (platformSoftwareVideoDecoderFactory != null) {
+ supportedCodecInfos.addAll(
+ Arrays.asList(platformSoftwareVideoDecoderFactory.getSupportedCodecs()));
+ }
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/DtmfSender.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/DtmfSender.java
new file mode 100644
index 0000000000..6549823089
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/DtmfSender.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ DtmfSenderInterface. */
+public class DtmfSender {
+ private long nativeDtmfSender;
+
+ public DtmfSender(long nativeDtmfSender) {
+ this.nativeDtmfSender = nativeDtmfSender;
+ }
+
+ /**
+ * @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
+ */
+ public boolean canInsertDtmf() {
+ checkDtmfSenderExists();
+ return nativeCanInsertDtmf(nativeDtmfSender);
+ }
+
+ /**
+ * Queues a task that sends the provided DTMF tones.
+ * <p>
+ * If insertDtmf is called on the same object while an existing task for this
+ * object to generate DTMF is still running, the previous task is canceled.
+ *
+ * @param tones This parameter is treated as a series of characters. The characters 0
+ * through 9, A through D, #, and * generate the associated DTMF tones. The
+ * characters a to d are equivalent to A to D. The character ',' indicates a
+ * delay of 2 seconds before processing the next character in the tones
+ * parameter. Unrecognized characters are ignored.
+ * @param duration Indicates the duration in ms to use for each character passed in the tones
+ * parameter. The duration cannot be more than 6000 or less than 70.
+ * @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
+ * as short as possible.
+ * @return true on success and false on failure.
+ */
+ public boolean insertDtmf(String tones, int duration, int interToneGap) {
+ checkDtmfSenderExists();
+ return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
+ }
+
+ /**
+ * @return The tones remaining to be played out
+ */
+ public String tones() {
+ checkDtmfSenderExists();
+ return nativeTones(nativeDtmfSender);
+ }
+
+ /**
+ * @return The current tone duration value in ms. This value will be the value last set via the
+ * insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
+ */
+ public int duration() {
+ checkDtmfSenderExists();
+ return nativeDuration(nativeDtmfSender);
+ }
+
+ /**
+ * @return The current value of the between-tone gap in ms. This value will be the value last set
+ * via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
+ * called.
+ */
+ public int interToneGap() {
+ checkDtmfSenderExists();
+ return nativeInterToneGap(nativeDtmfSender);
+ }
+
+ public void dispose() {
+ checkDtmfSenderExists();
+ JniCommon.nativeReleaseRef(nativeDtmfSender);
+ nativeDtmfSender = 0;
+ }
+
+ private void checkDtmfSenderExists() {
+ if (nativeDtmfSender == 0) {
+ throw new IllegalStateException("DtmfSender has been disposed.");
+ }
+ }
+
+ private static native boolean nativeCanInsertDtmf(long dtmfSender);
+ private static native boolean nativeInsertDtmf(
+ long dtmfSender, String tones, int duration, int interToneGap);
+ private static native String nativeTones(long dtmfSender);
+ private static native int nativeDuration(long dtmfSender);
+ private static native int nativeInterToneGap(long dtmfSender);
+};
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase.java
new file mode 100644
index 0000000000..64771d004a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase.java
@@ -0,0 +1,255 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import javax.microedition.khronos.egl.EGL10;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+public interface EglBase {
+ // EGL wrapper for an actual EGLContext.
+ public interface Context {
+ public final static long NO_CONTEXT = 0;
+
+ /**
+ * Returns an EGL context that can be used by native code. Returns NO_CONTEXT if the method is
+ * unsupported.
+ *
+ * @note This is currently only supported for EGL 1.4 and not for EGL 1.0.
+ */
+ long getNativeEglContext();
+ }
+
+ // According to the documentation, EGL can be used from multiple threads at the same time if each
+ // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
+ // Therefore, synchronize on this global lock before calling dangerous EGL functions that might
+ // deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+ public static final Object lock = new Object();
+
+ // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+ // This is similar to how GlSurfaceView does:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+ public static final int EGL_OPENGL_ES2_BIT = 4;
+ public static final int EGL_OPENGL_ES3_BIT = 0x40;
+ // Android-specific extension.
+ public static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+ public static ConfigBuilder configBuilder() {
+ return new ConfigBuilder();
+ }
+
+ public static class ConfigBuilder {
+ private int openGlesVersion = 2;
+ private boolean hasAlphaChannel;
+ private boolean supportsPixelBuffer;
+ private boolean isRecordable;
+
+ public ConfigBuilder setOpenGlesVersion(int version) {
+ if (version < 1 || version > 3) {
+ throw new IllegalArgumentException("OpenGL ES version " + version + " not supported");
+ }
+ this.openGlesVersion = version;
+ return this;
+ }
+
+ public ConfigBuilder setHasAlphaChannel(boolean hasAlphaChannel) {
+ this.hasAlphaChannel = hasAlphaChannel;
+ return this;
+ }
+
+ public ConfigBuilder setSupportsPixelBuffer(boolean supportsPixelBuffer) {
+ this.supportsPixelBuffer = supportsPixelBuffer;
+ return this;
+ }
+
+ public ConfigBuilder setIsRecordable(boolean isRecordable) {
+ this.isRecordable = isRecordable;
+ return this;
+ }
+
+ public int[] createConfigAttributes() {
+ ArrayList<Integer> list = new ArrayList<>();
+ list.add(EGL10.EGL_RED_SIZE);
+ list.add(8);
+ list.add(EGL10.EGL_GREEN_SIZE);
+ list.add(8);
+ list.add(EGL10.EGL_BLUE_SIZE);
+ list.add(8);
+ if (hasAlphaChannel) {
+ list.add(EGL10.EGL_ALPHA_SIZE);
+ list.add(8);
+ }
+ if (openGlesVersion == 2 || openGlesVersion == 3) {
+ list.add(EGL10.EGL_RENDERABLE_TYPE);
+ list.add(openGlesVersion == 3 ? EGL_OPENGL_ES3_BIT : EGL_OPENGL_ES2_BIT);
+ }
+ if (supportsPixelBuffer) {
+ list.add(EGL10.EGL_SURFACE_TYPE);
+ list.add(EGL10.EGL_PBUFFER_BIT);
+ }
+ if (isRecordable) {
+ list.add(EGL_RECORDABLE_ANDROID);
+ list.add(1);
+ }
+ list.add(EGL10.EGL_NONE);
+
+ final int[] res = new int[list.size()];
+ for (int i = 0; i < list.size(); ++i) {
+ res[i] = list.get(i);
+ }
+ return res;
+ }
+ }
+
+ public static final int[] CONFIG_PLAIN = configBuilder().createConfigAttributes();
+ public static final int[] CONFIG_RGBA =
+ configBuilder().setHasAlphaChannel(true).createConfigAttributes();
+ public static final int[] CONFIG_PIXEL_BUFFER =
+ configBuilder().setSupportsPixelBuffer(true).createConfigAttributes();
+ public static final int[] CONFIG_PIXEL_RGBA_BUFFER = configBuilder()
+ .setHasAlphaChannel(true)
+ .setSupportsPixelBuffer(true)
+ .createConfigAttributes();
+ public static final int[] CONFIG_RECORDABLE =
+ configBuilder().setIsRecordable(true).createConfigAttributes();
+
+ static int getOpenGlesVersionFromConfig(int[] configAttributes) {
+ for (int i = 0; i < configAttributes.length - 1; ++i) {
+ if (configAttributes[i] == EGL10.EGL_RENDERABLE_TYPE) {
+ switch (configAttributes[i + 1]) {
+ case EGL_OPENGL_ES2_BIT:
+ return 2;
+ case EGL_OPENGL_ES3_BIT:
+ return 3;
+ default:
+ return 1;
+ }
+ }
+ }
+ // Default to V1 if no renderable type is specified.
+ return 1;
+ }
+
+ /**
+ * Create a new context with the specified config attributes, sharing data with `sharedContext`.
+ * If `sharedContext` is null, a root EGL 1.4 context is created.
+ */
+ public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
+ if (sharedContext == null) {
+ return createEgl14(configAttributes);
+ } else if (sharedContext instanceof EglBase14.Context) {
+ return createEgl14((EglBase14.Context) sharedContext, configAttributes);
+ } else if (sharedContext instanceof EglBase10.Context) {
+ return createEgl10((EglBase10.Context) sharedContext, configAttributes);
+ }
+ throw new IllegalArgumentException("Unrecognized Context");
+ }
+
+ /**
+ * Helper function for creating a plain root context. This function will try to create an EGL 1.4
+ * context if possible, and an EGL 1.0 context otherwise.
+ */
+ public static EglBase create() {
+ return create(null /* shaderContext */, CONFIG_PLAIN);
+ }
+
+ /**
+ * Helper function for creating a plain context, sharing data with `sharedContext`. This function
+ * will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
+ */
+ public static EglBase create(Context sharedContext) {
+ return create(sharedContext, CONFIG_PLAIN);
+ }
+
+ /** Explicitly create a root EGl 1.0 context with the specified config attributes. */
+ public static EglBase10 createEgl10(int[] configAttributes) {
+ return new EglBase10Impl(/* sharedContext= */ null, configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.0 context with the specified config attributes and shared
+ * context.
+ */
+ public static EglBase10 createEgl10(EglBase10.Context sharedContext, int[] configAttributes) {
+ return new EglBase10Impl(
+ sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.0 context with the specified config attributes
+ * and shared context.
+ */
+ public static EglBase10 createEgl10(
+ javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
+ return new EglBase10Impl(sharedContext, configAttributes);
+ }
+
+ /** Explicitly create a root EGl 1.4 context with the specified config attributes. */
+ public static EglBase14 createEgl14(int[] configAttributes) {
+ return new EglBase14Impl(/* sharedContext= */ null, configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.4 context with the specified config attributes and shared
+ * context.
+ */
+ public static EglBase14 createEgl14(EglBase14.Context sharedContext, int[] configAttributes) {
+ return new EglBase14Impl(
+ sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.4 context with the specified config attributes
+ * and shared context.
+ */
+ public static EglBase14 createEgl14(
+ android.opengl.EGLContext sharedContext, int[] configAttributes) {
+ return new EglBase14Impl(sharedContext, configAttributes);
+ }
+
+ void createSurface(Surface surface);
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ void createSurface(SurfaceTexture surfaceTexture);
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ void createDummyPbufferSurface();
+
+ void createPbufferSurface(int width, int height);
+
+ Context getEglBaseContext();
+
+ boolean hasSurface();
+
+ int surfaceWidth();
+
+ int surfaceHeight();
+
+ void releaseSurface();
+
+ void release();
+
+ void makeCurrent();
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ void detachCurrent();
+
+ void swapBuffers();
+
+ void swapBuffers(long presentationTimeStampNs);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase10.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase10.java
new file mode 100644
index 0000000000..f8b0a3c0d0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase10.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/** EGL 1.0 implementation of EglBase. */
+public interface EglBase10 extends EglBase {
+ interface Context extends EglBase.Context {
+ EGLContext getRawContext();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase14.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase14.java
new file mode 100644
index 0000000000..69c89c44dc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase14.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.EGLContext;
+
+/** EGL 1.4 implementation of EglBase. */
+public interface EglBase14 extends EglBase {
+ interface Context extends EglBase.Context {
+ EGLContext getRawContext();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EglRenderer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglRenderer.java
new file mode 100644
index 0000000000..5ab0868ef3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglRenderer.java
@@ -0,0 +1,787 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Bitmap;
+import android.graphics.Matrix;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.Message;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to
+ * be used as a helper class for rendering on SurfaceViews and TextureViews.
+ */
+public class EglRenderer implements VideoSink {
+ private static final String TAG = "EglRenderer";
+ private static final long LOG_INTERVAL_SEC = 4;
+
+ public interface FrameListener { void onFrame(Bitmap frame); }
+
+ /** Callback for clients to be notified about errors encountered during rendering. */
+ public static interface ErrorCallback {
+ /** Called if GLES20.GL_OUT_OF_MEMORY is encountered during rendering. */
+ void onGlOutOfMemory();
+ }
+
+ private static class FrameListenerAndParams {
+ public final FrameListener listener;
+ public final float scale;
+ public final RendererCommon.GlDrawer drawer;
+ public final boolean applyFpsReduction;
+
+ public FrameListenerAndParams(FrameListener listener, float scale,
+ RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
+ this.listener = listener;
+ this.scale = scale;
+ this.drawer = drawer;
+ this.applyFpsReduction = applyFpsReduction;
+ }
+ }
+
+ private class EglSurfaceCreation implements Runnable {
+ private Object surface;
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void setSurface(Object surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void run() {
+ if (surface != null && eglBase != null && !eglBase.hasSurface()) {
+ if (surface instanceof Surface) {
+ eglBase.createSurface((Surface) surface);
+ } else if (surface instanceof SurfaceTexture) {
+ eglBase.createSurface((SurfaceTexture) surface);
+ } else {
+ throw new IllegalStateException("Invalid surface: " + surface);
+ }
+ eglBase.makeCurrent();
+ // Necessary for YUV frames with odd width.
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ }
+ }
+ }
+
+ /**
+ * Handler that triggers a callback when an uncaught exception happens when handling a message.
+ */
+ private static class HandlerWithExceptionCallback extends Handler {
+ private final Runnable exceptionCallback;
+
+ public HandlerWithExceptionCallback(Looper looper, Runnable exceptionCallback) {
+ super(looper);
+ this.exceptionCallback = exceptionCallback;
+ }
+
+ @Override
+ public void dispatchMessage(Message msg) {
+ try {
+ super.dispatchMessage(msg);
+ } catch (Exception e) {
+ Logging.e(TAG, "Exception on EglRenderer thread", e);
+ exceptionCallback.run();
+ throw e;
+ }
+ }
+ }
+
+ protected final String name;
+
+ // `renderThreadHandler` is a handler for communicating with `renderThread`, and is synchronized
+ // on `handlerLock`.
+ private final Object handlerLock = new Object();
+ @Nullable private Handler renderThreadHandler;
+
+ private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
+
+ private volatile ErrorCallback errorCallback;
+
+ // Variables for fps reduction.
+ private final Object fpsReductionLock = new Object();
+ // Time for when next frame should be rendered.
+ private long nextFrameTimeNs;
+ // Minimum duration between frames when fps reduction is active, or -1 if video is completely
+ // paused.
+ private long minRenderPeriodNs;
+
+ // EGL and GL resources for drawing YUV/OES textures. After initialization, these are only
+ // accessed from the render thread.
+ @Nullable private EglBase eglBase;
+ private final VideoFrameDrawer frameDrawer;
+ @Nullable private RendererCommon.GlDrawer drawer;
+ private boolean usePresentationTimeStamp;
+ private final Matrix drawMatrix = new Matrix();
+
+ // Pending frame to render. Serves as a queue with size 1. Synchronized on `frameLock`.
+ private final Object frameLock = new Object();
+ @Nullable private VideoFrame pendingFrame;
+
+ // These variables are synchronized on `layoutLock`.
+ private final Object layoutLock = new Object();
+ private float layoutAspectRatio;
+ // If true, mirrors the video stream horizontally.
+ private boolean mirrorHorizontally;
+ // If true, mirrors the video stream vertically.
+ private boolean mirrorVertically;
+
+ // These variables are synchronized on `statisticsLock`.
+ private final Object statisticsLock = new Object();
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+ // yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Start time for counting these statistics, or 0 if we haven't started measuring yet.
+ private long statisticsStartTimeNs;
+ // Time in ns spent in renderFrameOnRenderThread() function.
+ private long renderTimeNs;
+ // Time in ns spent by the render thread in the swapBuffers() function.
+ private long renderSwapBufferTimeNs;
+
+ // Used for bitmap capturing.
+ private final GlTextureFrameBuffer bitmapTextureFramebuffer =
+ new GlTextureFrameBuffer(GLES20.GL_RGBA);
+
+ private final Runnable logStatisticsRunnable = new Runnable() {
+ @Override
+ public void run() {
+ logStatistics();
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.removeCallbacks(logStatisticsRunnable);
+ renderThreadHandler.postDelayed(
+ logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+ }
+ }
+ }
+ };
+
+ private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
+
+ /**
+ * Standard constructor. The name will be used for the render thread name and included when
+ * logging. In order to render something, you must first call init() and createEglSurface.
+ */
+ public EglRenderer(String name) {
+ this(name, new VideoFrameDrawer());
+ }
+
+ public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) {
+ this.name = name;
+ this.frameDrawer = videoFrameDrawer;
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be
+ * set with the frame timestamps, which specifies desired presentation time and might be useful
+ * for e.g. syncing audio and video.
+ */
+ public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ throw new IllegalStateException(name + "Already initialized");
+ }
+ logD("Initializing EglRenderer");
+ this.drawer = drawer;
+ this.usePresentationTimeStamp = usePresentationTimeStamp;
+
+ final HandlerThread renderThread = new HandlerThread(name + "EglRenderer");
+ renderThread.start();
+ renderThreadHandler =
+ new HandlerWithExceptionCallback(renderThread.getLooper(), new Runnable() {
+ @Override
+ public void run() {
+ synchronized (handlerLock) {
+ renderThreadHandler = null;
+ }
+ }
+ });
+ // Create EGL context on the newly created render thread. It should be possibly to create the
+ // context on this thread and make it current on the render thread, but this causes failure on
+ // some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350.
+ ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
+ // If sharedContext is null, then texture frames are disabled. This is typically for old
+ // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
+ // caused trouble on some weird devices.
+ if (sharedContext == null) {
+ logD("EglBase10.create context");
+ eglBase = EglBase.createEgl10(configAttributes);
+ } else {
+ logD("EglBase.create shared context");
+ eglBase = EglBase.create(sharedContext, configAttributes);
+ }
+ });
+ renderThreadHandler.post(eglSurfaceCreationRunnable);
+ final long currentTimeNs = System.nanoTime();
+ resetStatistics(currentTimeNs);
+ renderThreadHandler.postDelayed(
+ logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+ }
+ }
+
+ /**
+ * Same as above with usePresentationTimeStamp set to false.
+ *
+ * @see #init(EglBase.Context, int[], RendererCommon.GlDrawer, boolean)
+ */
+ public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ init(sharedContext, configAttributes, drawer, /* usePresentationTimeStamp= */ false);
+ }
+
+ public void createEglSurface(Surface surface) {
+ createEglSurfaceInternal(surface);
+ }
+
+ public void createEglSurface(SurfaceTexture surfaceTexture) {
+ createEglSurfaceInternal(surfaceTexture);
+ }
+
+ private void createEglSurfaceInternal(Object surface) {
+ eglSurfaceCreationRunnable.setSurface(surface);
+ postToRenderThread(eglSurfaceCreationRunnable);
+ }
+
+ /**
+ * Block until any pending frame is returned and all GL resources released, even if an interrupt
+ * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+ * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+ * don't call this function, the GL resources might leak.
+ */
+ public void release() {
+ logD("Releasing.");
+ final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ logD("Already released");
+ return;
+ }
+ renderThreadHandler.removeCallbacks(logStatisticsRunnable);
+ // Release EGL and GL resources on render thread.
+ renderThreadHandler.postAtFrontOfQueue(() -> {
+ // Detach current shader program.
+ synchronized (EglBase.lock) {
+ GLES20.glUseProgram(/* program= */ 0);
+ }
+ if (drawer != null) {
+ drawer.release();
+ drawer = null;
+ }
+ frameDrawer.release();
+ bitmapTextureFramebuffer.release();
+ if (eglBase != null) {
+ logD("eglBase detach and release.");
+ eglBase.detachCurrent();
+ eglBase.release();
+ eglBase = null;
+ }
+ frameListeners.clear();
+ eglCleanupBarrier.countDown();
+ });
+ final Looper renderLooper = renderThreadHandler.getLooper();
+ // TODO(magjed): Replace this post() with renderLooper.quitSafely() when API support >= 18.
+ renderThreadHandler.post(() -> {
+ logD("Quitting render thread.");
+ renderLooper.quit();
+ });
+ // Don't accept any more frames or messages to the render thread.
+ renderThreadHandler = null;
+ }
+ // Make sure the EGL/GL cleanup posted above is executed.
+ ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ pendingFrame.release();
+ pendingFrame = null;
+ }
+ }
+ logD("Releasing done.");
+ }
+
+ /**
+ * Reset the statistics logged in logStatistics().
+ */
+ private void resetStatistics(long currentTimeNs) {
+ synchronized (statisticsLock) {
+ statisticsStartTimeNs = currentTimeNs;
+ framesReceived = 0;
+ framesDropped = 0;
+ framesRendered = 0;
+ renderTimeNs = 0;
+ renderSwapBufferTimeNs = 0;
+ }
+ }
+
+ public void printStackTrace() {
+ synchronized (handlerLock) {
+ final Thread renderThread =
+ (renderThreadHandler == null) ? null : renderThreadHandler.getLooper().getThread();
+ if (renderThread != null) {
+ final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
+ if (renderStackTrace.length > 0) {
+ logW("EglRenderer stack trace:");
+ for (StackTraceElement traceElem : renderStackTrace) {
+ logW(traceElem.toString());
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored horizontally or not.
+ */
+ public void setMirror(final boolean mirror) {
+ logD("setMirrorHorizontally: " + mirror);
+ synchronized (layoutLock) {
+ this.mirrorHorizontally = mirror;
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored vertically or not.
+ */
+ public void setMirrorVertically(final boolean mirrorVertically) {
+ logD("setMirrorVertically: " + mirrorVertically);
+ synchronized (layoutLock) {
+ this.mirrorVertically = mirrorVertically;
+ }
+ }
+
+ /**
+ * Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
+ * Set this to 0 to disable cropping.
+ */
+ public void setLayoutAspectRatio(float layoutAspectRatio) {
+ logD("setLayoutAspectRatio: " + layoutAspectRatio);
+ synchronized (layoutLock) {
+ this.layoutAspectRatio = layoutAspectRatio;
+ }
+ }
+
+ /**
+ * Limit render framerate.
+ *
+ * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+ * reduction.
+ */
+ public void setFpsReduction(float fps) {
+ logD("setFpsReduction: " + fps);
+ synchronized (fpsReductionLock) {
+ final long previousRenderPeriodNs = minRenderPeriodNs;
+ if (fps <= 0) {
+ minRenderPeriodNs = Long.MAX_VALUE;
+ } else {
+ minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
+ }
+ if (minRenderPeriodNs != previousRenderPeriodNs) {
+ // Fps reduction changed - reset frame time.
+ nextFrameTimeNs = System.nanoTime();
+ }
+ }
+ }
+
+ public void disableFpsReduction() {
+ setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
+ }
+
+ public void pauseVideo() {
+ setFpsReduction(0 /* fps */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received. This version uses
+ * the drawer of the EglRenderer that was passed in init.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ */
+ public void addFrameListener(final FrameListener listener, final float scale) {
+ addFrameListener(listener, scale, null, false /* applyFpsReduction */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ * @param drawer Custom drawer to use for this frame listener or null to use the default one.
+ */
+ public void addFrameListener(
+ final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
+ addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ * @param drawer Custom drawer to use for this frame listener or null to use the default one.
+ * @param applyFpsReduction This callback will not be called for frames that have been dropped by
+ * FPS reduction.
+ */
+ public void addFrameListener(final FrameListener listener, final float scale,
+ @Nullable final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
+ postToRenderThread(() -> {
+ final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
+ frameListeners.add(
+ new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
+ });
+ }
+
+ /**
+ * Remove any pending callback that was added with addFrameListener. If the callback is not in
+ * the queue, nothing happens. It is ensured that callback won't be called after this method
+ * returns.
+ *
+ * @param runnable The callback to remove.
+ */
+ public void removeFrameListener(final FrameListener listener) {
+ final CountDownLatch latch = new CountDownLatch(1);
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ return;
+ }
+ if (Thread.currentThread() == renderThreadHandler.getLooper().getThread()) {
+ throw new RuntimeException("removeFrameListener must not be called on the render thread.");
+ }
+ postToRenderThread(() -> {
+ latch.countDown();
+ final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
+ while (iter.hasNext()) {
+ if (iter.next().listener == listener) {
+ iter.remove();
+ }
+ }
+ });
+ }
+ ThreadUtils.awaitUninterruptibly(latch);
+ }
+
+ /** Can be set in order to be notified about errors encountered during rendering. */
+ public void setErrorCallback(ErrorCallback errorCallback) {
+ this.errorCallback = errorCallback;
+ }
+
+ // VideoSink interface.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ synchronized (statisticsLock) {
+ ++framesReceived;
+ }
+ final boolean dropOldFrame;
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ logD("Dropping frame - Not initialized or already released.");
+ return;
+ }
+ synchronized (frameLock) {
+ dropOldFrame = (pendingFrame != null);
+ if (dropOldFrame) {
+ pendingFrame.release();
+ }
+ pendingFrame = frame;
+ pendingFrame.retain();
+ renderThreadHandler.post(this ::renderFrameOnRenderThread);
+ }
+ }
+ if (dropOldFrame) {
+ synchronized (statisticsLock) {
+ ++framesDropped;
+ }
+ }
+ }
+
+ /**
+ * Release EGL surface. This function will block until the EGL surface is released.
+ */
+ public void releaseEglSurface(final Runnable completionCallback) {
+ // Ensure that the render thread is no longer touching the Surface before returning from this
+ // function.
+ eglSurfaceCreationRunnable.setSurface(null /* surface */);
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.removeCallbacks(eglSurfaceCreationRunnable);
+ renderThreadHandler.postAtFrontOfQueue(() -> {
+ if (eglBase != null) {
+ eglBase.detachCurrent();
+ eglBase.releaseSurface();
+ }
+ completionCallback.run();
+ });
+ return;
+ }
+ }
+ completionCallback.run();
+ }
+
+ /**
+ * Private helper function to post tasks safely.
+ */
+ private void postToRenderThread(Runnable runnable) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.post(runnable);
+ }
+ }
+ }
+
+ private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
+ if (eglBase != null && eglBase.hasSurface()) {
+ logD("clearSurface");
+ GLES20.glClearColor(r, g, b, a);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ }
+
+ /**
+ * Post a task to clear the surface to a transparent uniform color.
+ */
+ public void clearImage() {
+ clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ }
+
+ /**
+ * Post a task to clear the surface to a specific color.
+ */
+ public void clearImage(final float r, final float g, final float b, final float a) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ return;
+ }
+ renderThreadHandler.postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
+ }
+ }
+
+ /**
+ * Renders and releases `pendingFrame`.
+ */
+ private void renderFrameOnRenderThread() {
+ // Fetch and render `pendingFrame`.
+ final VideoFrame frame;
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ return;
+ }
+ frame = pendingFrame;
+ pendingFrame = null;
+ }
+ if (eglBase == null || !eglBase.hasSurface()) {
+ logD("Dropping frame - No surface");
+ frame.release();
+ return;
+ }
+ // Check if fps reduction is active.
+ final boolean shouldRenderFrame;
+ synchronized (fpsReductionLock) {
+ if (minRenderPeriodNs == Long.MAX_VALUE) {
+ // Rendering is paused.
+ shouldRenderFrame = false;
+ } else if (minRenderPeriodNs <= 0) {
+ // FPS reduction is disabled.
+ shouldRenderFrame = true;
+ } else {
+ final long currentTimeNs = System.nanoTime();
+ if (currentTimeNs < nextFrameTimeNs) {
+ logD("Skipping frame rendering - fps reduction is active.");
+ shouldRenderFrame = false;
+ } else {
+ nextFrameTimeNs += minRenderPeriodNs;
+ // The time for the next frame should always be in the future.
+ nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
+ shouldRenderFrame = true;
+ }
+ }
+ }
+
+ final long startTimeNs = System.nanoTime();
+
+ final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
+ final float drawnAspectRatio;
+ synchronized (layoutLock) {
+ drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
+ }
+
+ final float scaleX;
+ final float scaleY;
+
+ if (frameAspectRatio > drawnAspectRatio) {
+ scaleX = drawnAspectRatio / frameAspectRatio;
+ scaleY = 1f;
+ } else {
+ scaleX = 1f;
+ scaleY = frameAspectRatio / drawnAspectRatio;
+ }
+
+ drawMatrix.reset();
+ drawMatrix.preTranslate(0.5f, 0.5f);
+ drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+ drawMatrix.preScale(scaleX, scaleY);
+ drawMatrix.preTranslate(-0.5f, -0.5f);
+
+ try {
+ if (shouldRenderFrame) {
+ GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
+ eglBase.surfaceWidth(), eglBase.surfaceHeight());
+
+ final long swapBuffersStartTimeNs = System.nanoTime();
+ if (usePresentationTimeStamp) {
+ eglBase.swapBuffers(frame.getTimestampNs());
+ } else {
+ eglBase.swapBuffers();
+ }
+
+ final long currentTimeNs = System.nanoTime();
+ synchronized (statisticsLock) {
+ ++framesRendered;
+ renderTimeNs += (currentTimeNs - startTimeNs);
+ renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
+ }
+ }
+
+ notifyCallbacks(frame, shouldRenderFrame);
+ } catch (GlUtil.GlOutOfMemoryException e) {
+ logE("Error while drawing frame", e);
+ final ErrorCallback errorCallback = this.errorCallback;
+ if (errorCallback != null) {
+ errorCallback.onGlOutOfMemory();
+ }
+ // Attempt to free up some resources.
+ drawer.release();
+ frameDrawer.release();
+ bitmapTextureFramebuffer.release();
+ // Continue here on purpose and retry again for next frame. In worst case, this is a continous
+ // problem and no more frames will be drawn.
+ } finally {
+ frame.release();
+ }
+ }
+
+ private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
+ if (frameListeners.isEmpty())
+ return;
+
+ drawMatrix.reset();
+ drawMatrix.preTranslate(0.5f, 0.5f);
+ drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+ drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
+ drawMatrix.preTranslate(-0.5f, -0.5f);
+
+ Iterator<FrameListenerAndParams> it = frameListeners.iterator();
+ while (it.hasNext()) {
+ FrameListenerAndParams listenerAndParams = it.next();
+ if (!wasRendered && listenerAndParams.applyFpsReduction) {
+ continue;
+ }
+ it.remove();
+
+ final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
+ final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
+
+ if (scaledWidth == 0 || scaledHeight == 0) {
+ listenerAndParams.listener.onFrame(null);
+ continue;
+ }
+
+ bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+ GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
+
+ GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
+ 0 /* viewportY */, scaledWidth, scaledHeight);
+
+ final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
+ GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
+ GLES20.glReadPixels(
+ 0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
+
+ final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(bitmapBuffer);
+ listenerAndParams.listener.onFrame(bitmap);
+ }
+ }
+
+ private String averageTimeAsString(long sumTimeNs, int count) {
+ return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " us";
+ }
+
+ private void logStatistics() {
+ final DecimalFormat fpsFormat = new DecimalFormat("#.0");
+ final long currentTimeNs = System.nanoTime();
+ synchronized (statisticsLock) {
+ final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
+ if (elapsedTimeNs <= 0 || (minRenderPeriodNs == Long.MAX_VALUE && framesReceived == 0)) {
+ return;
+ }
+ final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
+ logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
+ + " Frames received: " + framesReceived + "."
+ + " Dropped: " + framesDropped + "."
+ + " Rendered: " + framesRendered + "."
+ + " Render fps: " + fpsFormat.format(renderFps) + "."
+ + " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
+ + " Average swapBuffer time: "
+ + averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
+ resetStatistics(currentTimeNs);
+ }
+ }
+
+ private void logE(String string, Throwable e) {
+ Logging.e(TAG, name + string, e);
+ }
+
+ private void logD(String string) {
+ Logging.d(TAG, name + string);
+ }
+
+ private void logW(String string) {
+ Logging.w(TAG, name + string);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EncodedImage.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EncodedImage.java
new file mode 100644
index 0000000000..a6eef67da8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EncodedImage.java
@@ -0,0 +1,183 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * An encoded frame from a video stream. Used as an input for decoders and as an output for
+ * encoders.
+ */
+public class EncodedImage implements RefCounted {
+ // Must be kept in sync with common_types.h FrameType.
+ public enum FrameType {
+ EmptyFrame(0),
+ VideoFrameKey(3),
+ VideoFrameDelta(4);
+
+ private final int nativeIndex;
+
+ private FrameType(int nativeIndex) {
+ this.nativeIndex = nativeIndex;
+ }
+
+ public int getNative() {
+ return nativeIndex;
+ }
+
+ @CalledByNative("FrameType")
+ static FrameType fromNativeIndex(int nativeIndex) {
+ for (FrameType type : FrameType.values()) {
+ if (type.getNative() == nativeIndex) {
+ return type;
+ }
+ }
+ throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
+ }
+ }
+
+ private final RefCountDelegate refCountDelegate;
+ public final ByteBuffer buffer;
+ public final int encodedWidth;
+ public final int encodedHeight;
+ public final long captureTimeMs; // Deprecated
+ public final long captureTimeNs;
+ public final FrameType frameType;
+ public final int rotation;
+ public final @Nullable Integer qp;
+
+ // TODO(bugs.webrtc.org/9378): Use retain and release from jni code.
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @CalledByNative
+ private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int encodedWidth,
+ int encodedHeight, long captureTimeNs, FrameType frameType, int rotation,
+ @Nullable Integer qp) {
+ this.buffer = buffer;
+ this.encodedWidth = encodedWidth;
+ this.encodedHeight = encodedHeight;
+ this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
+ this.captureTimeNs = captureTimeNs;
+ this.frameType = frameType;
+ this.rotation = rotation;
+ this.qp = qp;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @CalledByNative
+ private ByteBuffer getBuffer() {
+ return buffer;
+ }
+
+ @CalledByNative
+ private int getEncodedWidth() {
+ return encodedWidth;
+ }
+
+ @CalledByNative
+ private int getEncodedHeight() {
+ return encodedHeight;
+ }
+
+ @CalledByNative
+ private long getCaptureTimeNs() {
+ return captureTimeNs;
+ }
+
+ @CalledByNative
+ private int getFrameType() {
+ return frameType.getNative();
+ }
+
+ @CalledByNative
+ private int getRotation() {
+ return rotation;
+ }
+
+ @CalledByNative
+ private @Nullable Integer getQp() {
+ return qp;
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ public static class Builder {
+ private ByteBuffer buffer;
+ private @Nullable Runnable releaseCallback;
+ private int encodedWidth;
+ private int encodedHeight;
+ private long captureTimeNs;
+ private EncodedImage.FrameType frameType;
+ private int rotation;
+ private @Nullable Integer qp;
+
+ private Builder() {}
+
+ public Builder setBuffer(ByteBuffer buffer, @Nullable Runnable releaseCallback) {
+ this.buffer = buffer;
+ this.releaseCallback = releaseCallback;
+ return this;
+ }
+
+ public Builder setEncodedWidth(int encodedWidth) {
+ this.encodedWidth = encodedWidth;
+ return this;
+ }
+
+ public Builder setEncodedHeight(int encodedHeight) {
+ this.encodedHeight = encodedHeight;
+ return this;
+ }
+
+ @Deprecated
+ public Builder setCaptureTimeMs(long captureTimeMs) {
+ this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
+ return this;
+ }
+
+ public Builder setCaptureTimeNs(long captureTimeNs) {
+ this.captureTimeNs = captureTimeNs;
+ return this;
+ }
+
+ public Builder setFrameType(EncodedImage.FrameType frameType) {
+ this.frameType = frameType;
+ return this;
+ }
+
+ public Builder setRotation(int rotation) {
+ this.rotation = rotation;
+ return this;
+ }
+
+ public Builder setQp(@Nullable Integer qp) {
+ this.qp = qp;
+ return this;
+ }
+
+ public EncodedImage createEncodedImage() {
+ return new EncodedImage(buffer, releaseCallback, encodedWidth, encodedHeight, captureTimeNs,
+ frameType, rotation, qp);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java
new file mode 100644
index 0000000000..6d39390f72
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Factory for creating webrtc::FecControllerFactory instances.
+ */
+public interface FecControllerFactoryFactoryInterface {
+ /**
+ * Dynamically allocates a webrtc::FecControllerFactory instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ public long createNative();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java
new file mode 100644
index 0000000000..8270367970
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.Charset;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.TimeUnit;
+
+public class FileVideoCapturer implements VideoCapturer {
+ private interface VideoReader {
+ VideoFrame getNextFrame();
+ void close();
+ }
+
+ /**
+ * Read video data from file for the .y4m container.
+ */
+ @SuppressWarnings("StringSplitter")
+ private static class VideoReaderY4M implements VideoReader {
+ private static final String TAG = "VideoReaderY4M";
+ private static final String Y4M_FRAME_DELIMETER = "FRAME";
+ private static final int FRAME_DELIMETER_LENGTH = Y4M_FRAME_DELIMETER.length() + 1;
+
+ private final int frameWidth;
+ private final int frameHeight;
+ // First char after header
+ private final long videoStart;
+ private final RandomAccessFile mediaFile;
+ private final FileChannel mediaFileChannel;
+
+ public VideoReaderY4M(String file) throws IOException {
+ mediaFile = new RandomAccessFile(file, "r");
+ mediaFileChannel = mediaFile.getChannel();
+ StringBuilder builder = new StringBuilder();
+ for (;;) {
+ int c = mediaFile.read();
+ if (c == -1) {
+ // End of file reached.
+ throw new RuntimeException("Found end of file before end of header for file: " + file);
+ }
+ if (c == '\n') {
+ // End of header found.
+ break;
+ }
+ builder.append((char) c);
+ }
+ videoStart = mediaFileChannel.position();
+ String header = builder.toString();
+ String[] headerTokens = header.split("[ ]");
+ int w = 0;
+ int h = 0;
+ String colorSpace = "";
+ for (String tok : headerTokens) {
+ char c = tok.charAt(0);
+ switch (c) {
+ case 'W':
+ w = Integer.parseInt(tok.substring(1));
+ break;
+ case 'H':
+ h = Integer.parseInt(tok.substring(1));
+ break;
+ case 'C':
+ colorSpace = tok.substring(1);
+ break;
+ }
+ }
+ Logging.d(TAG, "Color space: " + colorSpace);
+ if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
+ throw new IllegalArgumentException(
+ "Does not support any other color space than I420 or I420mpeg2");
+ }
+ if ((w % 2) == 1 || (h % 2) == 1) {
+ throw new IllegalArgumentException("Does not support odd width or height");
+ }
+ frameWidth = w;
+ frameHeight = h;
+ Logging.d(TAG, "frame dim: (" + w + ", " + h + ")");
+ }
+
+ @Override
+ public VideoFrame getNextFrame() {
+ final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+ final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
+ final ByteBuffer dataY = buffer.getDataY();
+ final ByteBuffer dataU = buffer.getDataU();
+ final ByteBuffer dataV = buffer.getDataV();
+ final int chromaHeight = (frameHeight + 1) / 2;
+ final int sizeY = frameHeight * buffer.getStrideY();
+ final int sizeU = chromaHeight * buffer.getStrideU();
+ final int sizeV = chromaHeight * buffer.getStrideV();
+
+ try {
+ ByteBuffer frameDelim = ByteBuffer.allocate(FRAME_DELIMETER_LENGTH);
+ if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+ // We reach end of file, loop
+ mediaFileChannel.position(videoStart);
+ if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+ throw new RuntimeException("Error looping video");
+ }
+ }
+ String frameDelimStr = new String(frameDelim.array(), Charset.forName("US-ASCII"));
+ if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
+ throw new RuntimeException(
+ "Frames should be delimited by FRAME plus newline, found delimter was: '"
+ + frameDelimStr + "'");
+ }
+
+ mediaFileChannel.read(dataY);
+ mediaFileChannel.read(dataU);
+ mediaFileChannel.read(dataV);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs);
+ }
+
+ @Override
+ public void close() {
+ try {
+ // Closing a file also closes the channel.
+ mediaFile.close();
+ } catch (IOException e) {
+ Logging.e(TAG, "Problem closing file", e);
+ }
+ }
+ }
+
+ private final static String TAG = "FileVideoCapturer";
+ private final VideoReader videoReader;
+ private CapturerObserver capturerObserver;
+ private final Timer timer = new Timer();
+
+ private final TimerTask tickTask = new TimerTask() {
+ @Override
+ public void run() {
+ tick();
+ }
+ };
+
+ public FileVideoCapturer(String inputFile) throws IOException {
+ try {
+ videoReader = new VideoReaderY4M(inputFile);
+ } catch (IOException e) {
+ Logging.d(TAG, "Could not open video file: " + inputFile);
+ throw e;
+ }
+ }
+
+ public void tick() {
+ VideoFrame videoFrame = videoReader.getNextFrame();
+ capturerObserver.onFrameCaptured(videoFrame);
+ videoFrame.release();
+ }
+
+ @Override
+ public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ CapturerObserver capturerObserver) {
+ this.capturerObserver = capturerObserver;
+ }
+
+ @Override
+ public void startCapture(int width, int height, int framerate) {
+ timer.schedule(tickTask, 0, 1000 / framerate);
+ }
+
+ @Override
+ public void stopCapture() throws InterruptedException {
+ timer.cancel();
+ }
+
+ @Override
+ public void changeCaptureFormat(int width, int height, int framerate) {
+ // Empty on purpose
+ }
+
+ @Override
+ public void dispose() {
+ videoReader.close();
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameDecryptor.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameDecryptor.java
new file mode 100644
index 0000000000..2932f3d94a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameDecryptor.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameDecryptor interface allows Java API users to provide a
+ * pointer to their native implementation of the FrameDecryptorInterface.
+ * FrameDecryptors are extremely performance sensitive as they must process all
+ * incoming video and audio frames. Due to this reason they should always be
+ * backed by a native implementation
+ * @note Not ready for production use.
+ */
+public interface FrameDecryptor {
+ /**
+ * @return A FrameDecryptorInterface pointer.
+ */
+ long getNativeFrameDecryptor();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameEncryptor.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameEncryptor.java
new file mode 100644
index 0000000000..bc81223f21
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameEncryptor.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameEncryptor interface allows Java API users to provide a pointer to
+ * their native implementation of the FrameEncryptorInterface.
+ * FrameEncyptors are extremely performance sensitive as they must process all
+ * outgoing video and audio frames. Due to this reason they should always be
+ * backed by a native implementation.
+ * @note Not ready for production use.
+ */
+public interface FrameEncryptor {
+ /**
+ * @return A FrameEncryptorInterface pointer.
+ */
+ long getNativeFrameEncryptor();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/GlRectDrawer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlRectDrawer.java
new file mode 100644
index 0000000000..d1fbd1b7bc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlRectDrawer.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Simplest possible GL shader that just draws frames as opaque quads. */
+public class GlRectDrawer extends GlGenericDrawer {
+ private static final String FRAGMENT_SHADER = "void main() {\n"
+ + " gl_FragColor = sample(tc);\n"
+ + "}\n";
+
+ private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks {
+ @Override
+ public void onNewShader(GlShader shader) {}
+
+ @Override
+ public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight) {}
+ }
+
+ public GlRectDrawer() {
+ super(FRAGMENT_SHADER, new ShaderCallbacks());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/GlShader.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlShader.java
new file mode 100644
index 0000000000..7efd8d3a95
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlShader.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.FloatBuffer;
+
+// Helper class for handling OpenGL shaders and shader programs.
+public class GlShader {
+ private static final String TAG = "GlShader";
+
+ private static int compileShader(int shaderType, String source) {
+ final int shader = GLES20.glCreateShader(shaderType);
+ if (shader == 0) {
+ throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
+ }
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compileStatus = new int[] {GLES20.GL_FALSE};
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
+ if (compileStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(
+ TAG, "Compile error " + GLES20.glGetShaderInfoLog(shader) + " in shader:\n" + source);
+ throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+ }
+ GlUtil.checkNoGLES2Error("compileShader");
+ return shader;
+ }
+
+ private int program;
+
+ public GlShader(String vertexSource, String fragmentSource) {
+ final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ program = GLES20.glCreateProgram();
+ if (program == 0) {
+ throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[] {GLES20.GL_FALSE};
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
+ throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+ }
+ // According to the documentation of glLinkProgram():
+ // "After the link operation, applications are free to modify attached shader objects, compile
+ // attached shader objects, detach shader objects, delete shader objects, and attach additional
+ // shader objects. None of these operations affects the information log or the program that is
+ // part of the program object."
+ // But in practice, detaching shaders from the program seems to break some devices. Deleting the
+ // shaders are fine however - it will delete them when they are no longer attached to a program.
+ GLES20.glDeleteShader(vertexShader);
+ GLES20.glDeleteShader(fragmentShader);
+ GlUtil.checkNoGLES2Error("Creating GlShader");
+ }
+
+ public int getAttribLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetAttribLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate '" + label + "' in program");
+ }
+ return location;
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute `label`. The vertex data is specified in
+ * `buffer` with `dimension` number of components per vertex.
+ */
+ public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
+ setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute `label`. The vertex data is specified in
+ * `buffer` with `dimension` number of components per vertex and specified `stride`.
+ */
+ public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = getAttribLocation(label);
+ GLES20.glEnableVertexAttribArray(location);
+ GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer);
+ GlUtil.checkNoGLES2Error("setVertexAttribArray");
+ }
+
+ public int getUniformLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetUniformLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate uniform '" + label + "' in program");
+ }
+ return location;
+ }
+
+ public void useProgram() {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ synchronized (EglBase.lock) {
+ GLES20.glUseProgram(program);
+ }
+ GlUtil.checkNoGLES2Error("glUseProgram");
+ }
+
+ public void release() {
+ Logging.d(TAG, "Deleting shader.");
+ // Delete program, automatically detaching any shaders from it.
+ if (program != -1) {
+ GLES20.glDeleteProgram(program);
+ program = -1;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
new file mode 100644
index 0000000000..b906fe56e0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+/**
+ * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
+ * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
+ * conversion. This class is not thread safe and must be used by a thread with an active GL context.
+ */
+// TODO(magjed): Add unittests for this class.
+public class GlTextureFrameBuffer {
+ private final int pixelFormat;
+ private int frameBufferId;
+ private int textureId;
+ private int width;
+ private int height;
+
+ /**
+ * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
+ * when calling this function. The framebuffer is not complete until setSize() is called.
+ */
+ public GlTextureFrameBuffer(int pixelFormat) {
+ switch (pixelFormat) {
+ case GLES20.GL_LUMINANCE:
+ case GLES20.GL_RGB:
+ case GLES20.GL_RGBA:
+ this.pixelFormat = pixelFormat;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
+ }
+ this.width = 0;
+ this.height = 0;
+ }
+
+ /**
+ * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
+ * EGLContext must be bound on the current thread when calling this function. Must be called at
+ * least once before using the framebuffer. May be called multiple times to change size.
+ */
+ public void setSize(int width, int height) {
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
+ }
+ if (width == this.width && height == this.height) {
+ return;
+ }
+ this.width = width;
+ this.height = height;
+ // Lazy allocation the first time setSize() is called.
+ if (textureId == 0) {
+ textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ if (frameBufferId == 0) {
+ final int frameBuffers[] = new int[1];
+ GLES20.glGenFramebuffers(1, frameBuffers, 0);
+ frameBufferId = frameBuffers[0];
+ }
+
+ // Allocate texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
+ GLES20.GL_UNSIGNED_BYTE, null);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
+
+ // Attach the texture to the framebuffer as color attachment.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GLES20.glFramebufferTexture2D(
+ GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
+
+ // Check that the framebuffer is in a good state.
+ final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+ if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ throw new IllegalStateException("Framebuffer not complete, status: " + status);
+ }
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ }
+
+ public int getWidth() {
+ return width;
+ }
+
+ public int getHeight() {
+ return height;
+ }
+
+ /** Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. */
+ public int getFrameBufferId() {
+ return frameBufferId;
+ }
+
+ /** Gets the OpenGL texture id. This value is only valid after setSize() has been called. */
+ public int getTextureId() {
+ return textureId;
+ }
+
+ /**
+ * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
+ * this function. This object should not be used after this call.
+ */
+ public void release() {
+ GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
+ textureId = 0;
+ GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
+ frameBufferId = 0;
+ width = 0;
+ height = 0;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/GlUtil.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlUtil.java
new file mode 100644
index 0000000000..e2dd0c56d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlUtil.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+import android.opengl.GLException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL static utility functions.
+ */
+public class GlUtil {
+ private GlUtil() {}
+
+ public static class GlOutOfMemoryException extends GLException {
+ public GlOutOfMemoryException(int error, String msg) {
+ super(error, msg);
+ }
+ }
+
+ // Assert that no OpenGL ES 2.0 error has been raised.
+ public static void checkNoGLES2Error(String msg) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ throw error == GLES20.GL_OUT_OF_MEMORY
+ ? new GlOutOfMemoryException(error, msg)
+ : new GLException(error, msg + ": GLES20 error: " + error);
+ }
+ }
+
+ public static FloatBuffer createFloatBuffer(float[] coords) {
+ // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+ ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
+ bb.order(ByteOrder.nativeOrder());
+ FloatBuffer fb = bb.asFloatBuffer();
+ fb.put(coords);
+ fb.position(0);
+ return fb;
+ }
+
+ /**
+ * Generate texture with standard parameters.
+ */
+ public static int generateTexture(int target) {
+ final int textureArray[] = new int[1];
+ GLES20.glGenTextures(1, textureArray, 0);
+ final int textureId = textureArray[0];
+ GLES20.glBindTexture(target, textureId);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ checkNoGLES2Error("generateTexture");
+ return textureId;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
new file mode 100644
index 0000000000..215598a85d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+
+/** Factory for Android hardware VideoDecoders. */
+public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
+ private final static Predicate<MediaCodecInfo> defaultAllowedPredicate =
+ new Predicate<MediaCodecInfo>() {
+ @Override
+ public boolean test(MediaCodecInfo arg) {
+ return MediaCodecUtils.isHardwareAccelerated(arg);
+ }
+ };
+
+ /** Creates a HardwareVideoDecoderFactory that does not use surface textures. */
+ @Deprecated // Not removed yet to avoid breaking callers.
+ public HardwareVideoDecoderFactory() {
+ this(null);
+ }
+
+ /**
+ * Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ */
+ public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) {
+ this(sharedContext, /* codecAllowedPredicate= */ null);
+ }
+
+ /**
+ * Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param codecAllowedPredicate predicate to filter codecs. It is combined with the default
+ * predicate that only allows hardware codecs.
+ */
+ public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
+ @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) {
+ super(sharedContext,
+ (codecAllowedPredicate == null ? defaultAllowedPredicate
+ : codecAllowedPredicate.and(defaultAllowedPredicate)));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/IceCandidateErrorEvent.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/IceCandidateErrorEvent.java
new file mode 100644
index 0000000000..aae9da7061
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/IceCandidateErrorEvent.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public final class IceCandidateErrorEvent {
+ /** The local IP address used to communicate with the STUN or TURN server. */
+ public final String address;
+ /** The port used to communicate with the STUN or TURN server. */
+ public final int port;
+ /**
+ * The STUN or TURN URL that identifies the STUN or TURN server for which the failure occurred.
+ */
+ public final String url;
+ /**
+ * The numeric STUN error code returned by the STUN or TURN server. If no host candidate can reach
+ * the server, errorCode will be set to the value 701 which is outside the STUN error code range.
+ * This error is only fired once per server URL while in the RTCIceGatheringState of "gathering".
+ */
+ public final int errorCode;
+ /**
+ * The STUN reason text returned by the STUN or TURN server. If the server could not be reached,
+ * errorText will be set to an implementation-specific value providing details about the error.
+ */
+ public final String errorText;
+
+ @CalledByNative
+ public IceCandidateErrorEvent(
+ String address, int port, String url, int errorCode, String errorText) {
+ this.address = address;
+ this.port = port;
+ this.url = url;
+ this.errorCode = errorCode;
+ this.errorText = errorText;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java
new file mode 100644
index 0000000000..322b8f38c9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import org.webrtc.VideoFrame.I420Buffer;
+
+/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */
+public class JavaI420Buffer implements VideoFrame.I420Buffer {
+ private final int width;
+ private final int height;
+ private final ByteBuffer dataY;
+ private final ByteBuffer dataU;
+ private final ByteBuffer dataV;
+ private final int strideY;
+ private final int strideU;
+ private final int strideV;
+ private final RefCountDelegate refCountDelegate;
+
+ private JavaI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
+ int strideU, ByteBuffer dataV, int strideV, @Nullable Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.dataY = dataY;
+ this.dataU = dataU;
+ this.dataV = dataV;
+ this.strideY = strideY;
+ this.strideU = strideU;
+ this.strideV = strideV;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ private static void checkCapacity(ByteBuffer data, int width, int height, int stride) {
+ // The last row does not necessarily need padding.
+ final int minCapacity = stride * (height - 1) + width;
+ if (data.capacity() < minCapacity) {
+ throw new IllegalArgumentException(
+ "Buffer must be at least " + minCapacity + " bytes, but was " + data.capacity());
+ }
+ }
+
+ /** Wraps existing ByteBuffers into JavaI420Buffer object without copying the contents. */
+ public static JavaI420Buffer wrap(int width, int height, ByteBuffer dataY, int strideY,
+ ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV,
+ @Nullable Runnable releaseCallback) {
+ if (dataY == null || dataU == null || dataV == null) {
+ throw new IllegalArgumentException("Data buffers cannot be null.");
+ }
+ if (!dataY.isDirect() || !dataU.isDirect() || !dataV.isDirect()) {
+ throw new IllegalArgumentException("Data buffers must be direct byte buffers.");
+ }
+
+ // Slice the buffers to prevent external modifications to the position / limit of the buffer.
+ // Note that this doesn't protect the contents of the buffers from modifications.
+ dataY = dataY.slice();
+ dataU = dataU.slice();
+ dataV = dataV.slice();
+
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ checkCapacity(dataY, width, height, strideY);
+ checkCapacity(dataU, chromaWidth, chromaHeight, strideU);
+ checkCapacity(dataV, chromaWidth, chromaHeight, strideV);
+
+ return new JavaI420Buffer(
+ width, height, dataY, strideY, dataU, strideU, dataV, strideV, releaseCallback);
+ }
+
+ /** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
+ public static JavaI420Buffer allocate(int width, int height) {
+ int chromaHeight = (height + 1) / 2;
+ int strideUV = (width + 1) / 2;
+ int yPos = 0;
+ int uPos = yPos + width * height;
+ int vPos = uPos + strideUV * chromaHeight;
+
+ ByteBuffer buffer =
+ JniCommon.nativeAllocateByteBuffer(width * height + 2 * strideUV * chromaHeight);
+
+ buffer.position(yPos);
+ buffer.limit(uPos);
+ ByteBuffer dataY = buffer.slice();
+
+ buffer.position(uPos);
+ buffer.limit(vPos);
+ ByteBuffer dataU = buffer.slice();
+
+ buffer.position(vPos);
+ buffer.limit(vPos + strideUV * chromaHeight);
+ ByteBuffer dataV = buffer.slice();
+
+ return new JavaI420Buffer(width, height, dataY, width, dataU, strideUV, dataV, strideUV,
+ () -> { JniCommon.nativeFreeByteBuffer(buffer); });
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public ByteBuffer getDataY() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataY.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataU() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataU.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataV() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataV.slice();
+ }
+
+ @Override
+ public int getStrideY() {
+ return strideY;
+ }
+
+ @Override
+ public int getStrideU() {
+ return strideU;
+ }
+
+ @Override
+ public int getStrideV() {
+ return strideV;
+ }
+
+ @Override
+ public I420Buffer toI420() {
+ retain();
+ return this;
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ return cropAndScaleI420(this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ }
+
+ public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
+ int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
+ // No scaling.
+ ByteBuffer dataY = buffer.getDataY();
+ ByteBuffer dataU = buffer.getDataU();
+ ByteBuffer dataV = buffer.getDataV();
+
+ dataY.position(cropX + cropY * buffer.getStrideY());
+ dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
+ dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
+
+ buffer.retain();
+ return JavaI420Buffer.wrap(scaleWidth, scaleHeight, dataY.slice(), buffer.getStrideY(),
+ dataU.slice(), buffer.getStrideU(), dataV.slice(), buffer.getStrideV(), buffer::release);
+ }
+
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
+ buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
+ cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+ newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
+ scaleHeight);
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
+ ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
+ int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
+ int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Decoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Decoder.java
new file mode 100644
index 0000000000..609203fe3f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Decoder.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibaomAv1Decoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+
+ static native boolean nativeIsSupported();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Encoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Encoder.java
new file mode 100644
index 0000000000..569a719f44
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Encoder.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibaomAv1Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java
new file mode 100644
index 0000000000..54ad0aa137
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp8Decoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java
new file mode 100644
index 0000000000..4be9e52c14
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp8Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java
new file mode 100644
index 0000000000..90a24433a3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp9Decoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+
+ static native boolean nativeIsSupported();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java
new file mode 100644
index 0000000000..1211ae93fb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+
+ static native boolean nativeIsSupported();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaConstraints.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaConstraints.java
new file mode 100644
index 0000000000..bae04e532c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaConstraints.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Description of media constraints for {@code MediaStream} and
+ * {@code PeerConnection}.
+ */
+public class MediaConstraints {
+ /** Simple String key/value pair. */
+ public static class KeyValuePair {
+ private final String key;
+ private final String value;
+
+ public KeyValuePair(String key, String value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ @CalledByNative("KeyValuePair")
+ public String getKey() {
+ return key;
+ }
+
+ @CalledByNative("KeyValuePair")
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public String toString() {
+ return key + ": " + value;
+ }
+
+ @Override
+ public boolean equals(@Nullable Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+ KeyValuePair that = (KeyValuePair) other;
+ return key.equals(that.key) && value.equals(that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return key.hashCode() + value.hashCode();
+ }
+ }
+
+ public final List<KeyValuePair> mandatory;
+ public final List<KeyValuePair> optional;
+
+ public MediaConstraints() {
+ mandatory = new ArrayList<KeyValuePair>();
+ optional = new ArrayList<KeyValuePair>();
+ }
+
+ private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
+ StringBuilder builder = new StringBuilder("[");
+ for (KeyValuePair pair : list) {
+ if (builder.length() > 1) {
+ builder.append(", ");
+ }
+ builder.append(pair.toString());
+ }
+ return builder.append("]").toString();
+ }
+
+ @Override
+ public String toString() {
+ return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
+ + stringifyKeyValuePairList(optional);
+ }
+
+ @CalledByNative
+ List<KeyValuePair> getMandatory() {
+ return mandatory;
+ }
+
+ @CalledByNative
+ List<KeyValuePair> getOptional() {
+ return optional;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaSource.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaSource.java
new file mode 100644
index 0000000000..9245e3e2eb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaSource.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaSourceInterface. */
+public class MediaSource {
+ /** Tracks MediaSourceInterface.SourceState */
+ public enum State {
+ INITIALIZING,
+ LIVE,
+ ENDED,
+ MUTED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ private final RefCountDelegate refCountDelegate;
+ private long nativeSource;
+
+ public MediaSource(long nativeSource) {
+ refCountDelegate = new RefCountDelegate(() -> JniCommon.nativeReleaseRef(nativeSource));
+ this.nativeSource = nativeSource;
+ }
+
+ public State state() {
+ checkMediaSourceExists();
+ return nativeGetState(nativeSource);
+ }
+
+ public void dispose() {
+ checkMediaSourceExists();
+ refCountDelegate.release();
+ nativeSource = 0;
+ }
+
+ /** Returns a pointer to webrtc::MediaSourceInterface. */
+ protected long getNativeMediaSource() {
+ checkMediaSourceExists();
+ return nativeSource;
+ }
+
+ /**
+ * Runs code in {@code runnable} holding a reference to the media source. If the object has
+ * already been released, does nothing.
+ */
+ void runWithReference(Runnable runnable) {
+ if (refCountDelegate.safeRetain()) {
+ try {
+ runnable.run();
+ } finally {
+ refCountDelegate.release();
+ }
+ }
+ }
+
+ private void checkMediaSourceExists() {
+ if (nativeSource == 0) {
+ throw new IllegalStateException("MediaSource has been disposed.");
+ }
+ }
+
+ private static native State nativeGetState(long pointer);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java
new file mode 100644
index 0000000000..2e4c3e18f7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/** Java wrapper for a C++ MediaStreamTrackInterface. */
+public class MediaStreamTrack {
+ public static final String AUDIO_TRACK_KIND = "audio";
+ public static final String VIDEO_TRACK_KIND = "video";
+
+ /** Tracks MediaStreamTrackInterface.TrackState */
+ public enum State {
+ LIVE,
+ ENDED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ // Must be kept in sync with cricket::MediaType.
+ public enum MediaType {
+ MEDIA_TYPE_AUDIO(0),
+ MEDIA_TYPE_VIDEO(1);
+
+ private final int nativeIndex;
+
+ private MediaType(int nativeIndex) {
+ this.nativeIndex = nativeIndex;
+ }
+
+ @CalledByNative("MediaType")
+ int getNative() {
+ return nativeIndex;
+ }
+
+ @CalledByNative("MediaType")
+ static MediaType fromNativeIndex(int nativeIndex) {
+ for (MediaType type : MediaType.values()) {
+ if (type.getNative() == nativeIndex) {
+ return type;
+ }
+ }
+ throw new IllegalArgumentException("Unknown native media type: " + nativeIndex);
+ }
+ }
+
+ /** Factory method to create an AudioTrack or VideoTrack subclass. */
+ static @Nullable MediaStreamTrack createMediaStreamTrack(long nativeTrack) {
+ if (nativeTrack == 0) {
+ return null;
+ }
+ String trackKind = nativeGetKind(nativeTrack);
+ if (trackKind.equals(AUDIO_TRACK_KIND)) {
+ return new AudioTrack(nativeTrack);
+ } else if (trackKind.equals(VIDEO_TRACK_KIND)) {
+ return new VideoTrack(nativeTrack);
+ } else {
+ return null;
+ }
+ }
+
+ private long nativeTrack;
+
+ public MediaStreamTrack(long nativeTrack) {
+ if (nativeTrack == 0) {
+ throw new IllegalArgumentException("nativeTrack may not be null");
+ }
+ this.nativeTrack = nativeTrack;
+ }
+
+ public String id() {
+ checkMediaStreamTrackExists();
+ return nativeGetId(nativeTrack);
+ }
+
+ public String kind() {
+ checkMediaStreamTrackExists();
+ return nativeGetKind(nativeTrack);
+ }
+
+ public boolean enabled() {
+ checkMediaStreamTrackExists();
+ return nativeGetEnabled(nativeTrack);
+ }
+
+ public boolean setEnabled(boolean enable) {
+ checkMediaStreamTrackExists();
+ return nativeSetEnabled(nativeTrack, enable);
+ }
+
+ public State state() {
+ checkMediaStreamTrackExists();
+ return nativeGetState(nativeTrack);
+ }
+
+ public void dispose() {
+ checkMediaStreamTrackExists();
+ JniCommon.nativeReleaseRef(nativeTrack);
+ nativeTrack = 0;
+ }
+
+ long getNativeMediaStreamTrack() {
+ checkMediaStreamTrackExists();
+ return nativeTrack;
+ }
+
+ private void checkMediaStreamTrackExists() {
+ if (nativeTrack == 0) {
+ throw new IllegalStateException("MediaStreamTrack has been disposed.");
+ }
+ }
+
+ private static native String nativeGetId(long track);
+ private static native String nativeGetKind(long track);
+ private static native boolean nativeGetEnabled(long track);
+ private static native boolean nativeSetEnabled(long track, boolean enabled);
+ private static native State nativeGetState(long track);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Metrics.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Metrics.java
new file mode 100644
index 0000000000..253376831c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Metrics.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+// Java-side of androidmetrics.cc
+//
+// Rtc histograms can be queried through the API, getAndReset().
+// The returned map holds the name of a histogram and its samples.
+//
+// Example of `map` with one histogram:
+// `name`: "WebRTC.Video.InputFramesPerSecond"
+// `min`: 1
+// `max`: 100
+// `bucketCount`: 50
+// `samples`: [30]:1
+//
+// Most histograms are not updated frequently (e.g. most video metrics are an
+// average over the call and recorded when a stream is removed).
+// The metrics can for example be retrieved when a peer connection is closed.
+public class Metrics {
+ private static final String TAG = "Metrics";
+
+ public final Map<String, HistogramInfo> map =
+ new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
+
+ @CalledByNative
+ Metrics() {}
+
+ /**
+ * Class holding histogram information.
+ */
+ public static class HistogramInfo {
+ public final int min;
+ public final int max;
+ public final int bucketCount;
+ public final Map<Integer, Integer> samples =
+ new HashMap<Integer, Integer>(); // <value, # of events>
+
+ @CalledByNative("HistogramInfo")
+ public HistogramInfo(int min, int max, int bucketCount) {
+ this.min = min;
+ this.max = max;
+ this.bucketCount = bucketCount;
+ }
+
+ @CalledByNative("HistogramInfo")
+ public void addSample(int value, int numEvents) {
+ samples.put(value, numEvents);
+ }
+ }
+
+ @CalledByNative
+ private void add(String name, HistogramInfo info) {
+ map.put(name, info);
+ }
+
+ // Enables gathering of metrics (which can be fetched with getAndReset()).
+ // Must be called before PeerConnectionFactory is created.
+ public static void enable() {
+ nativeEnable();
+ }
+
+ // Gets and clears native histograms.
+ public static Metrics getAndReset() {
+ return nativeGetAndReset();
+ }
+
+ private static native void nativeEnable();
+ private static native Metrics nativeGetAndReset();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java
new file mode 100644
index 0000000000..8bd7b3b250
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for loading native libraries. A custom loader can be passed to
+ * PeerConnectionFactory.initialize.
+ */
+public interface NativeLibraryLoader {
+ /**
+ * Loads a native library with the given name.
+ *
+ * @return True on success
+ */
+ boolean load(String name);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java
new file mode 100644
index 0000000000..aeb91e1750
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::jni::OwnedPeerConnection instances. */
+public interface NativePeerConnectionFactory {
+ /**
+ * Create a new webrtc::jni::OwnedPeerConnection instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ long createNativePeerConnection();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/NetEqFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/NetEqFactoryFactory.java
new file mode 100644
index 0000000000..8464324cbc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/NetEqFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::NetEqFactory}.
+ */
+public interface NetEqFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::NetEqFactory}. The caller takes ownership.
+ */
+ long createNativeNetEqFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/OWNERS b/third_party/libwebrtc/sdk/android/api/org/webrtc/OWNERS
new file mode 100644
index 0000000000..b64df86672
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/OWNERS
@@ -0,0 +1,3 @@
+per-file Camera*=xalep@webrtc.org
+per-file Histogram.java=xalep@webrtc.org
+per-file Metrics.java=xalep@webrtc.org
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
new file mode 100644
index 0000000000..caca5e5889
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+
+/** Factory for Android platform software VideoDecoders. */
+public class PlatformSoftwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
+ /**
+ * Default allowed predicate.
+ */
+ private static final Predicate<MediaCodecInfo> defaultAllowedPredicate =
+ new Predicate<MediaCodecInfo>() {
+ @Override
+ public boolean test(MediaCodecInfo arg) {
+ return MediaCodecUtils.isSoftwareOnly(arg);
+ }
+ };
+
+ /**
+ * Creates a PlatformSoftwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ */
+ public PlatformSoftwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) {
+ super(sharedContext, defaultAllowedPredicate);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Predicate.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Predicate.java
new file mode 100644
index 0000000000..50e6975000
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Predicate.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Represents a predicate (boolean-valued function) of one argument.
+ */
+public interface Predicate<T> {
+ /**
+ * Evaluates this predicate on the given argument.
+ *
+ * @param arg the input argument
+ * @return true if the input argument matches the predicate, otherwise false
+ */
+ boolean test(T arg);
+
+ /**
+ * Returns a composed predicate that represents a short-circuiting logical OR of this predicate
+ * and another. When evaluating the composed predicate, if this predicate is true, then the other
+ * predicate is not evaluated.
+ *
+ * @param other a predicate that will be logically-ORed with this predicate
+ * @return a composed predicate that represents the short-circuiting logical OR of this predicate
+ * and the other predicate
+ */
+ default Predicate<T> or(Predicate<? super T> other) {
+ return new Predicate<T>() {
+ @Override
+ public boolean test(T arg) {
+ return Predicate.this.test(arg) || other.test(arg);
+ }
+ };
+ }
+
+ /**
+ * Returns a composed predicate that represents a short-circuiting logical AND of this predicate
+ * and another.
+ *
+ * @param other a predicate that will be logically-ANDed with this predicate
+ * @return a composed predicate that represents the short-circuiting logical AND of this predicate
+ * and the other predicate
+ */
+ default Predicate<T> and(Predicate<? super T> other) {
+ return new Predicate<T>() {
+ @Override
+ public boolean test(T arg) {
+ return Predicate.this.test(arg) && other.test(arg);
+ }
+ };
+ }
+
+ /**
+ * Returns a predicate that represents the logical negation of this predicate.
+ *
+ * @return a predicate that represents the logical negation of this predicate
+ */
+ default Predicate<T> negate() {
+ return new Predicate<T>() {
+ @Override
+ public boolean test(T arg) {
+ return !Predicate.this.test(arg);
+ }
+ };
+ }
+} \ No newline at end of file
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/RefCounted.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/RefCounted.java
new file mode 100644
index 0000000000..0c1c3bf1f9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/RefCounted.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for ref counted objects in WebRTC. These objects have significant resources that need
+ * to be freed when they are no longer in use. Each objects starts with ref count of one when
+ * created. If a reference is passed as a parameter to a method, the caller has ownesrship of the
+ * object by default - calling release is not necessary unless retain is called.
+ */
+public interface RefCounted {
+ /** Increases ref count by one. */
+ @CalledByNative void retain();
+
+ /**
+ * Decreases ref count by one. When the ref count reaches zero, resources related to the object
+ * will be freed.
+ */
+ @CalledByNative void release();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/RendererCommon.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/RendererCommon.java
new file mode 100644
index 0000000000..b97901c634
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/RendererCommon.java
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.opengl.Matrix;
+import android.view.View;
+
+/**
+ * Static helper functions for renderer implementations.
+ */
+public class RendererCommon {
+ /** Interface for reporting rendering events. */
+ public static interface RendererEvents {
+ /**
+ * Callback fired once first frame is rendered.
+ */
+ public void onFirstFrameRendered();
+
+ /**
+ * Callback fired when rendered frame resolution or rotation has changed.
+ */
+ public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
+ }
+
+ /**
+ * Interface for rendering frames on an EGLSurface with specified viewport location. Rotation,
+ * mirror, and cropping is specified using a 4x4 texture coordinate transform matrix. The frame
+ * input can either be an OES texture, RGB texture, or YUV textures in I420 format. The function
+ * release() must be called manually to free the resources held by this object.
+ */
+ public static interface GlDrawer {
+ /**
+ * Functions for drawing frames with different sources. The rendering surface target is
+ * implied by the current EGL context of the calling thread and requires no explicit argument.
+ * The coordinates specify the viewport location on the surface target.
+ */
+ void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+ void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
+ int viewportY, int viewportWidth, int viewportHeight);
+ void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+
+ /**
+ * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+ */
+ void release();
+ }
+
+ /**
+ * Helper class for determining layout size based on layout requirements, scaling type, and video
+ * aspect ratio.
+ */
+ public static class VideoLayoutMeasure {
+ // The scaling type determines how the video will fill the allowed layout area in measure(). It
+ // can be specified separately for the case when video has matched orientation with layout size
+ // and when there is an orientation mismatch.
+ private float visibleFractionMatchOrientation =
+ convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED);
+ private float visibleFractionMismatchOrientation =
+ convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED);
+
+ public void setScalingType(ScalingType scalingType) {
+ setScalingType(/* scalingTypeMatchOrientation= */ scalingType,
+ /* scalingTypeMismatchOrientation= */ scalingType);
+ }
+
+ public void setScalingType(
+ ScalingType scalingTypeMatchOrientation, ScalingType scalingTypeMismatchOrientation) {
+ this.visibleFractionMatchOrientation =
+ convertScalingTypeToVisibleFraction(scalingTypeMatchOrientation);
+ this.visibleFractionMismatchOrientation =
+ convertScalingTypeToVisibleFraction(scalingTypeMismatchOrientation);
+ }
+
+ public void setVisibleFraction(
+ float visibleFractionMatchOrientation, float visibleFractionMismatchOrientation) {
+ this.visibleFractionMatchOrientation = visibleFractionMatchOrientation;
+ this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation;
+ }
+
+ public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
+ // Calculate max allowed layout size.
+ final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
+ final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
+ if (frameWidth == 0 || frameHeight == 0 || maxWidth == 0 || maxHeight == 0) {
+ return new Point(maxWidth, maxHeight);
+ }
+ // Calculate desired display size based on scaling type, video aspect ratio,
+ // and maximum layout size.
+ final float frameAspect = frameWidth / (float) frameHeight;
+ final float displayAspect = maxWidth / (float) maxHeight;
+ final float visibleFraction = (frameAspect > 1.0f) == (displayAspect > 1.0f)
+ ? visibleFractionMatchOrientation
+ : visibleFractionMismatchOrientation;
+ final Point layoutSize = getDisplaySize(visibleFraction, frameAspect, maxWidth, maxHeight);
+
+ // If the measure specification is forcing a specific size - yield.
+ if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
+ layoutSize.x = maxWidth;
+ }
+ if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
+ layoutSize.y = maxHeight;
+ }
+ return layoutSize;
+ }
+ }
+
+ // Types of video scaling:
+ // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+ // maintaining the aspect ratio (black borders may be displayed).
+ // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+ // maintaining the aspect ratio. Some portion of the video frame may be
+ // clipped.
+ // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
+ // possible of the view while maintaining aspect ratio, under the constraint that at least
+ // `BALANCED_VISIBLE_FRACTION` of the frame content will be shown.
+ public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
+ // The minimum fraction of the frame content that will be shown for `SCALE_ASPECT_BALANCED`.
+ // This limits excessive cropping when adjusting display size.
+ private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+
+ /**
+ * Returns layout transformation matrix that applies an optional mirror effect and compensates
+ * for video vs display aspect ratio.
+ */
+ public static float[] getLayoutMatrix(
+ boolean mirror, float videoAspectRatio, float displayAspectRatio) {
+ float scaleX = 1;
+ float scaleY = 1;
+ // Scale X or Y dimension so that video and display size have same aspect ratio.
+ if (displayAspectRatio > videoAspectRatio) {
+ scaleY = videoAspectRatio / displayAspectRatio;
+ } else {
+ scaleX = displayAspectRatio / videoAspectRatio;
+ }
+ // Apply optional horizontal flip.
+ if (mirror) {
+ scaleX *= -1;
+ }
+ final float matrix[] = new float[16];
+ Matrix.setIdentityM(matrix, 0);
+ Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
+ adjustOrigin(matrix);
+ return matrix;
+ }
+
+ /** Converts a float[16] matrix array to android.graphics.Matrix. */
+ public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) {
+ // clang-format off
+ float[] values = {
+ matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0],
+ matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1],
+ matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3],
+ };
+ // clang-format on
+
+ android.graphics.Matrix matrix = new android.graphics.Matrix();
+ matrix.setValues(values);
+ return matrix;
+ }
+
+ /** Converts android.graphics.Matrix to a float[16] matrix array. */
+ public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) {
+ float[] values = new float[9];
+ matrix.getValues(values);
+
+ // The android.graphics.Matrix looks like this:
+ // [x1 y1 w1]
+ // [x2 y2 w2]
+ // [x3 y3 w3]
+ // We want to contruct a matrix that looks like this:
+ // [x1 y1 0 w1]
+ // [x2 y2 0 w2]
+ // [ 0 0 1 0]
+ // [x3 y3 0 w3]
+ // Since it is stored in column-major order, it looks like this:
+ // [x1 x2 0 x3
+ // y1 y2 0 y3
+ // 0 0 1 0
+ // w1 w2 0 w3]
+ // clang-format off
+ float[] matrix4x4 = {
+ values[0 * 3 + 0], values[1 * 3 + 0], 0, values[2 * 3 + 0],
+ values[0 * 3 + 1], values[1 * 3 + 1], 0, values[2 * 3 + 1],
+ 0, 0, 1, 0,
+ values[0 * 3 + 2], values[1 * 3 + 2], 0, values[2 * 3 + 2],
+ };
+ // clang-format on
+ return matrix4x4;
+ }
+
+ /**
+ * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(
+ ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
+ return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
+ maxDisplayWidth, maxDisplayHeight);
+ }
+
+ /**
+ * Move `matrix` transformation origin to (0.5, 0.5). This is the origin for texture coordinates
+ * that are in the range 0 to 1.
+ */
+ private static void adjustOrigin(float[] matrix) {
+ // Note that OpenGL is using column-major order.
+ // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
+ matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
+ matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
+ // Post translate with 0.5 to move coordinates to range [0, 1].
+ matrix[12] += 0.5f;
+ matrix[13] += 0.5f;
+ }
+
+ /**
+ * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
+ * that must remain visible.
+ */
+ private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
+ switch (scalingType) {
+ case SCALE_ASPECT_FIT:
+ return 1.0f;
+ case SCALE_ASPECT_FILL:
+ return 0.0f;
+ case SCALE_ASPECT_BALANCED:
+ return BALANCED_VISIBLE_FRACTION;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * Calculate display size based on minimum fraction of the video that must remain visible,
+ * video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(
+ float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
+ // If there is no constraint on the amount of cropping, fill the allowed display area.
+ if (minVisibleFraction == 0 || videoAspectRatio == 0) {
+ return new Point(maxDisplayWidth, maxDisplayHeight);
+ }
+ // Each dimension is constrained on max display size and how much we are allowed to crop.
+ final int width = Math.min(
+ maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+ final int height = Math.min(
+ maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+ return new Point(width, height);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SSLCertificateVerifier.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SSLCertificateVerifier.java
new file mode 100644
index 0000000000..461cd3b143
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SSLCertificateVerifier.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The SSLCertificateVerifier interface allows API users to provide custom
+ * logic to verify certificates.
+ */
+public interface SSLCertificateVerifier {
+ /**
+ * Implementations of verify allow applications to provide custom logic for
+ * verifying certificates. This is not required by default and should be used
+ * with care.
+ *
+ * @param certificate A byte array containing a DER encoded X509 certificate.
+ * @return True if the certificate is verified and trusted else false.
+ */
+ @CalledByNative boolean verify(byte[] certificate);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
new file mode 100644
index 0000000000..231d507155
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
@@ -0,0 +1,212 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.hardware.display.DisplayManager;
+import android.hardware.display.VirtualDisplay;
+import android.media.projection.MediaProjection;
+import android.media.projection.MediaProjectionManager;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+
+/**
+ * An implementation of VideoCapturer to capture the screen content as a video stream.
+ * Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this
+ * {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
+ * The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
+ * {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
+ * as a texture to the native code via {@code CapturerObserver.onFrameCaptured()}. This takes
+ * place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
+ * the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
+ * frames. At any time, at most one frame is being processed.
+ */
+public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
+ private static final int DISPLAY_FLAGS =
+ DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
+ // DPI for VirtualDisplay, does not seem to matter for us.
+ private static final int VIRTUAL_DISPLAY_DPI = 400;
+
+ private final Intent mediaProjectionPermissionResultData;
+ private final MediaProjection.Callback mediaProjectionCallback;
+
+ private int width;
+ private int height;
+ @Nullable private VirtualDisplay virtualDisplay;
+ @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable private CapturerObserver capturerObserver;
+ private long numCapturedFrames;
+ @Nullable private MediaProjection mediaProjection;
+ private boolean isDisposed;
+ @Nullable private MediaProjectionManager mediaProjectionManager;
+
+ /**
+ * Constructs a new Screen Capturer.
+ *
+ * @param mediaProjectionPermissionResultData the result data of MediaProjection permission
+ * activity; the calling app must validate that result code is Activity.RESULT_OK before
+ * calling this method.
+ * @param mediaProjectionCallback MediaProjection callback to implement application specific
+ * logic in events such as when the user revokes a previously granted capture permission.
+ **/
+ public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
+ MediaProjection.Callback mediaProjectionCallback) {
+ this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
+ this.mediaProjectionCallback = mediaProjectionCallback;
+ }
+
+ private void checkNotDisposed() {
+ if (isDisposed) {
+ throw new RuntimeException("capturer is disposed.");
+ }
+ }
+
+ @Nullable
+ public MediaProjection getMediaProjection() {
+ return mediaProjection;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
+ final Context applicationContext, final CapturerObserver capturerObserver) {
+ checkNotDisposed();
+
+ if (capturerObserver == null) {
+ throw new RuntimeException("capturerObserver not set.");
+ }
+ this.capturerObserver = capturerObserver;
+
+ if (surfaceTextureHelper == null) {
+ throw new RuntimeException("surfaceTextureHelper not set.");
+ }
+ this.surfaceTextureHelper = surfaceTextureHelper;
+
+ mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
+ Context.MEDIA_PROJECTION_SERVICE);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void startCapture(
+ final int width, final int height, final int ignoredFramerate) {
+ checkNotDisposed();
+
+ this.width = width;
+ this.height = height;
+
+ mediaProjection = mediaProjectionManager.getMediaProjection(
+ Activity.RESULT_OK, mediaProjectionPermissionResultData);
+
+ // Let MediaProjection callback use the SurfaceTextureHelper thread.
+ mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler());
+
+ createVirtualDisplay();
+ capturerObserver.onCapturerStarted(true);
+ surfaceTextureHelper.startListening(ScreenCapturerAndroid.this);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void stopCapture() {
+ checkNotDisposed();
+ ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
+ @Override
+ public void run() {
+ surfaceTextureHelper.stopListening();
+ capturerObserver.onCapturerStopped();
+
+ if (virtualDisplay != null) {
+ virtualDisplay.release();
+ virtualDisplay = null;
+ }
+
+ if (mediaProjection != null) {
+ // Unregister the callback before stopping, otherwise the callback recursively
+ // calls this method.
+ mediaProjection.unregisterCallback(mediaProjectionCallback);
+ mediaProjection.stop();
+ mediaProjection = null;
+ }
+ }
+ });
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void dispose() {
+ isDisposed = true;
+ }
+
+ /**
+ * Changes output video format. This method can be used to scale the output
+ * video, or to change orientation when the captured screen is rotated for example.
+ *
+ * @param width new output video width
+ * @param height new output video height
+ * @param ignoredFramerate ignored
+ */
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void changeCaptureFormat(
+ final int width, final int height, final int ignoredFramerate) {
+ checkNotDisposed();
+
+ this.width = width;
+ this.height = height;
+
+ if (virtualDisplay == null) {
+ // Capturer is stopped, the virtual display will be created in startCaptuer().
+ return;
+ }
+
+ // Create a new virtual display on the surfaceTextureHelper thread to avoid interference
+ // with frame processing, which happens on the same thread (we serialize events by running
+ // them on the same thread).
+ ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
+ @Override
+ public void run() {
+ virtualDisplay.release();
+ createVirtualDisplay();
+ }
+ });
+ }
+
+ private void createVirtualDisplay() {
+ surfaceTextureHelper.setTextureSize(width, height);
+ virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
+ VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
+ null /* callback */, null /* callback handler */);
+ }
+
+ // This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ numCapturedFrames++;
+ capturerObserver.onFrameCaptured(frame);
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return true;
+ }
+
+ public long getNumCapturedFrames() {
+ return numCapturedFrames;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SdpObserver.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SdpObserver.java
new file mode 100644
index 0000000000..afa99bc552
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SdpObserver.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for observing SDP-related events. */
+public interface SdpObserver {
+ /** Called on success of Create{Offer,Answer}(). */
+ @CalledByNative void onCreateSuccess(SessionDescription sdp);
+
+ /** Called on success of Set{Local,Remote}Description(). */
+ @CalledByNative void onSetSuccess();
+
+ /** Called on error of Create{Offer,Answer}(). */
+ @CalledByNative void onCreateFailure(String error);
+
+ /** Called on error of Set{Local,Remote}Description(). */
+ @CalledByNative void onSetFailure(String error);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SessionDescription.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SessionDescription.java
new file mode 100644
index 0000000000..be89599a5f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SessionDescription.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Locale;
+
+/**
+ * Description of an RFC 4566 Session.
+ * SDPs are passed as serialized Strings in Java-land and are materialized
+ * to SessionDescriptionInterface as appropriate in the JNI layer.
+ */
+public class SessionDescription {
+ /** Java-land enum version of SessionDescriptionInterface's type() string. */
+ public static enum Type {
+ OFFER,
+ PRANSWER,
+ ANSWER,
+ ROLLBACK;
+
+ public String canonicalForm() {
+ return name().toLowerCase(Locale.US);
+ }
+
+ @CalledByNative("Type")
+ public static Type fromCanonicalForm(String canonical) {
+ return Type.valueOf(Type.class, canonical.toUpperCase(Locale.US));
+ }
+ }
+
+ public final Type type;
+ public final String description;
+
+ @CalledByNative
+ public SessionDescription(Type type, String description) {
+ this.type = type;
+ this.description = description;
+ }
+
+ @CalledByNative
+ String getDescription() {
+ return description;
+ }
+
+ @CalledByNative
+ String getTypeInCanonicalForm() {
+ return type.canonicalForm();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
new file mode 100644
index 0000000000..ebcf204320
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class SoftwareVideoDecoderFactory implements VideoDecoderFactory {
+ @Nullable
+ @Override
+ public VideoDecoder createDecoder(VideoCodecInfo codecInfo) {
+ String codecName = codecInfo.getName();
+
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP8.name())) {
+ return new LibvpxVp8Decoder();
+ }
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP9.name())
+ && LibvpxVp9Decoder.nativeIsSupported()) {
+ return new LibvpxVp9Decoder();
+ }
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.AV1.name())
+ && LibaomAv1Decoder.nativeIsSupported()) {
+ return new LibaomAv1Decoder();
+ }
+
+ return null;
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ return supportedCodecs();
+ }
+
+ static VideoCodecInfo[] supportedCodecs() {
+ List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
+
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP8.name(), new HashMap<>()));
+ if (LibvpxVp9Decoder.nativeIsSupported()) {
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP9.name(), new HashMap<>()));
+ }
+ if (LibaomAv1Decoder.nativeIsSupported()) {
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.AV1.name(), new HashMap<>()));
+ }
+
+ return codecs.toArray(new VideoCodecInfo[codecs.size()]);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
new file mode 100644
index 0000000000..c4ac229071
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class SoftwareVideoEncoderFactory implements VideoEncoderFactory {
+ @Nullable
+ @Override
+ public VideoEncoder createEncoder(VideoCodecInfo codecInfo) {
+ String codecName = codecInfo.getName();
+
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP8.name())) {
+ return new LibvpxVp8Encoder();
+ }
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP9.name())
+ && LibvpxVp9Encoder.nativeIsSupported()) {
+ return new LibvpxVp9Encoder();
+ }
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.AV1.name())) {
+ return new LibaomAv1Encoder();
+ }
+
+ return null;
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ return supportedCodecs();
+ }
+
+ static VideoCodecInfo[] supportedCodecs() {
+ List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
+
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP8.name(), new HashMap<>()));
+ if (LibvpxVp9Encoder.nativeIsSupported()) {
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP9.name(), new HashMap<>()));
+ }
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.AV1.name(), new HashMap<>()));
+
+ return codecs.toArray(new VideoCodecInfo[codecs.size()]);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsObserver.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsObserver.java
new file mode 100644
index 0000000000..b9984c18db
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsObserver.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for observing Stats reports (see webrtc::StatsObservers). */
+public interface StatsObserver {
+ /** Called when the reports are ready.*/
+ @CalledByNative public void onComplete(StatsReport[] reports);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsReport.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsReport.java
new file mode 100644
index 0000000000..b8f1cf87fe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsReport.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java version of webrtc::StatsReport. */
+public class StatsReport {
+ /** Java version of webrtc::StatsReport::Value. */
+ public static class Value {
+ public final String name;
+ public final String value;
+
+ @CalledByNative("Value")
+ public Value(String name, String value) {
+ this.name = name;
+ this.value = value;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("[").append(name).append(": ").append(value).append("]");
+ return builder.toString();
+ }
+ }
+
+ public final String id;
+ public final String type;
+ // Time since 1970-01-01T00:00:00Z in milliseconds.
+ public final double timestamp;
+ public final Value[] values;
+
+ @CalledByNative
+ public StatsReport(String id, String type, double timestamp, Value[] values) {
+ this.id = id;
+ this.type = type;
+ this.timestamp = timestamp;
+ this.values = values;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("id: ")
+ .append(id)
+ .append(", type: ")
+ .append(type)
+ .append(", timestamp: ")
+ .append(timestamp)
+ .append(", values: ");
+ for (int i = 0; i < values.length; ++i) {
+ builder.append(values[i].toString()).append(", ");
+ }
+ return builder.toString();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
new file mode 100644
index 0000000000..6cba3f473b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.view.SurfaceHolder;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Display the video stream on a Surface.
+ * renderFrame() is asynchronous to avoid blocking the calling thread.
+ * This class is thread safe and handles access from potentially three different threads:
+ * Interaction from the main app in init, release and setMirror.
+ * Interaction from C++ rtc::VideoSinkInterface in renderFrame.
+ * Interaction from SurfaceHolder lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
+ */
+public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Callback {
+ private static final String TAG = "SurfaceEglRenderer";
+
+ // Callback for reporting renderer events. Read-only after initialization so no lock required.
+ private RendererCommon.RendererEvents rendererEvents;
+
+ private final Object layoutLock = new Object();
+ private boolean isRenderingPaused;
+ private boolean isFirstFrameRendered;
+ private int rotatedFrameWidth;
+ private int rotatedFrameHeight;
+ private int frameRotation;
+
+ /**
+ * In order to render something, you must first call init().
+ */
+ public SurfaceEglRenderer(String name) {
+ super(name);
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle.
+ */
+ public void init(final EglBase.Context sharedContext,
+ RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ ThreadUtils.checkIsOnMainThread();
+ this.rendererEvents = rendererEvents;
+ synchronized (layoutLock) {
+ isFirstFrameRendered = false;
+ rotatedFrameWidth = 0;
+ rotatedFrameHeight = 0;
+ frameRotation = 0;
+ }
+ super.init(sharedContext, configAttributes, drawer);
+ }
+
+ @Override
+ public void init(final EglBase.Context sharedContext, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ init(sharedContext, null /* rendererEvents */, configAttributes, drawer);
+ }
+
+ /**
+ * Limit render framerate.
+ *
+ * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+ * reduction.
+ */
+ @Override
+ public void setFpsReduction(float fps) {
+ synchronized (layoutLock) {
+ isRenderingPaused = fps == 0f;
+ }
+ super.setFpsReduction(fps);
+ }
+
+ @Override
+ public void disableFpsReduction() {
+ synchronized (layoutLock) {
+ isRenderingPaused = false;
+ }
+ super.disableFpsReduction();
+ }
+
+ @Override
+ public void pauseVideo() {
+ synchronized (layoutLock) {
+ isRenderingPaused = true;
+ }
+ super.pauseVideo();
+ }
+
+ // VideoSink interface.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ updateFrameDimensionsAndReportEvents(frame);
+ super.onFrame(frame);
+ }
+
+ // SurfaceHolder.Callback interface.
+ @Override
+ public void surfaceCreated(final SurfaceHolder holder) {
+ ThreadUtils.checkIsOnMainThread();
+ createEglSurface(holder.getSurface());
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ ThreadUtils.checkIsOnMainThread();
+ final CountDownLatch completionLatch = new CountDownLatch(1);
+ releaseEglSurface(completionLatch::countDown);
+ ThreadUtils.awaitUninterruptibly(completionLatch);
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ ThreadUtils.checkIsOnMainThread();
+ logD("surfaceChanged: format: " + format + " size: " + width + "x" + height);
+ }
+
+ // Update frame dimensions and report any changes to `rendererEvents`.
+ private void updateFrameDimensionsAndReportEvents(VideoFrame frame) {
+ synchronized (layoutLock) {
+ if (isRenderingPaused) {
+ return;
+ }
+ if (!isFirstFrameRendered) {
+ isFirstFrameRendered = true;
+ logD("Reporting first rendered frame.");
+ if (rendererEvents != null) {
+ rendererEvents.onFirstFrameRendered();
+ }
+ }
+ if (rotatedFrameWidth != frame.getRotatedWidth()
+ || rotatedFrameHeight != frame.getRotatedHeight()
+ || frameRotation != frame.getRotation()) {
+ logD("Reporting frame resolution changed to " + frame.getBuffer().getWidth() + "x"
+ + frame.getBuffer().getHeight() + " with rotation " + frame.getRotation());
+ if (rendererEvents != null) {
+ rendererEvents.onFrameResolutionChanged(
+ frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), frame.getRotation());
+ }
+ rotatedFrameWidth = frame.getRotatedWidth();
+ rotatedFrameHeight = frame.getRotatedHeight();
+ frameRotation = frame.getRotation();
+ }
+ }
+ }
+
+ private void logD(String string) {
+ Logging.d(TAG, name + ": " + string);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
new file mode 100644
index 0000000000..3ea22736ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -0,0 +1,390 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import androidx.annotation.Nullable;
+import java.util.concurrent.Callable;
+import org.webrtc.EglBase.Context;
+import org.webrtc.TextureBufferImpl.RefCountMonitor;
+import org.webrtc.VideoFrame.TextureBuffer;
+
+/**
+ * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
+ * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only
+ * one texture frame can be in flight at once, so the frame must be released in order to receive a
+ * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all
+ * resources once the texture frame is released.
+ */
+public class SurfaceTextureHelper {
+ /**
+ * Interface for monitoring texture buffers created from this SurfaceTexture. Since only one
+ * texture buffer can exist at a time, this can be used to monitor for stuck frames.
+ */
+ public interface FrameRefMonitor {
+ /** A new frame was created. New frames start with ref count of 1. */
+ void onNewBuffer(TextureBuffer textureBuffer);
+ /** Ref count of the frame was incremented by the calling thread. */
+ void onRetainBuffer(TextureBuffer textureBuffer);
+ /** Ref count of the frame was decremented by the calling thread. */
+ void onReleaseBuffer(TextureBuffer textureBuffer);
+ /** Frame was destroyed (ref count reached 0). */
+ void onDestroyBuffer(TextureBuffer textureBuffer);
+ }
+
+ private static final String TAG = "SurfaceTextureHelper";
+ /**
+ * Construct a new SurfaceTextureHelper sharing OpenGL resources with `sharedContext`. A dedicated
+ * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
+ * initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame
+ * timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to
+ * rtc::TimeNanos() there is no need for aligning timestamps again in
+ * PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and
+ * closer to actual creation time.
+ */
+ public static SurfaceTextureHelper create(final String threadName,
+ final EglBase.Context sharedContext, boolean alignTimestamps, final YuvConverter yuvConverter,
+ FrameRefMonitor frameRefMonitor) {
+ final HandlerThread thread = new HandlerThread(threadName);
+ thread.start();
+ final Handler handler = new Handler(thread.getLooper());
+
+ // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
+ // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
+ // is constructed on the `handler` thread.
+ return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
+ @Nullable
+ @Override
+ public SurfaceTextureHelper call() {
+ try {
+ return new SurfaceTextureHelper(
+ sharedContext, handler, alignTimestamps, yuvConverter, frameRefMonitor);
+ } catch (RuntimeException e) {
+ Logging.e(TAG, threadName + " create failure", e);
+ return null;
+ }
+ }
+ });
+ }
+
+ /**
+ * Same as above with alignTimestamps set to false and yuvConverter set to new YuvConverter.
+ *
+ * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
+ */
+ public static SurfaceTextureHelper create(
+ final String threadName, final EglBase.Context sharedContext) {
+ return create(threadName, sharedContext, /* alignTimestamps= */ false, new YuvConverter(),
+ /*frameRefMonitor=*/null);
+ }
+
+ /**
+ * Same as above with yuvConverter set to new YuvConverter.
+ *
+ * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
+ */
+ public static SurfaceTextureHelper create(
+ final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) {
+ return create(
+ threadName, sharedContext, alignTimestamps, new YuvConverter(), /*frameRefMonitor=*/null);
+ }
+
+ /**
+ * Create a SurfaceTextureHelper without frame ref monitor.
+ *
+ * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
+ */
+ public static SurfaceTextureHelper create(final String threadName,
+ final EglBase.Context sharedContext, boolean alignTimestamps, YuvConverter yuvConverter) {
+ return create(
+ threadName, sharedContext, alignTimestamps, yuvConverter, /*frameRefMonitor=*/null);
+ }
+
+ private final RefCountMonitor textureRefCountMonitor = new RefCountMonitor() {
+ @Override
+ public void onRetain(TextureBufferImpl textureBuffer) {
+ if (frameRefMonitor != null) {
+ frameRefMonitor.onRetainBuffer(textureBuffer);
+ }
+ }
+
+ @Override
+ public void onRelease(TextureBufferImpl textureBuffer) {
+ if (frameRefMonitor != null) {
+ frameRefMonitor.onReleaseBuffer(textureBuffer);
+ }
+ }
+
+ @Override
+ public void onDestroy(TextureBufferImpl textureBuffer) {
+ returnTextureFrame();
+ if (frameRefMonitor != null) {
+ frameRefMonitor.onDestroyBuffer(textureBuffer);
+ }
+ }
+ };
+
+ private final Handler handler;
+ private final EglBase eglBase;
+ private final SurfaceTexture surfaceTexture;
+ private final int oesTextureId;
+ private final YuvConverter yuvConverter;
+ @Nullable private final TimestampAligner timestampAligner;
+ private final FrameRefMonitor frameRefMonitor;
+
+ // These variables are only accessed from the `handler` thread.
+ @Nullable private VideoSink listener;
+ // The possible states of this class.
+ private boolean hasPendingTexture;
+ private volatile boolean isTextureInUse;
+ private boolean isQuitting;
+ private int frameRotation;
+ private int textureWidth;
+ private int textureHeight;
+ // `pendingListener` is set in setListener() and the runnable is posted to the handler thread.
+ // setListener() is not allowed to be called again before stopListening(), so this is thread safe.
+ @Nullable private VideoSink pendingListener;
+ final Runnable setListenerRunnable = new Runnable() {
+ @Override
+ public void run() {
+ Logging.d(TAG, "Setting listener to " + pendingListener);
+ listener = pendingListener;
+ pendingListener = null;
+ // May have a pending frame from the previous capture session - drop it.
+ if (hasPendingTexture) {
+ // Calling updateTexImage() is neccessary in order to receive new frames.
+ updateTexImage();
+ hasPendingTexture = false;
+ }
+ }
+ };
+
+ private SurfaceTextureHelper(Context sharedContext, Handler handler, boolean alignTimestamps,
+ YuvConverter yuvConverter, FrameRefMonitor frameRefMonitor) {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
+ }
+ this.handler = handler;
+ this.timestampAligner = alignTimestamps ? new TimestampAligner() : null;
+ this.yuvConverter = yuvConverter;
+ this.frameRefMonitor = frameRefMonitor;
+
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
+ try {
+ // Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+ } catch (RuntimeException e) {
+ // Clean up before rethrowing the exception.
+ eglBase.release();
+ handler.getLooper().quit();
+ throw e;
+ }
+
+ oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ surfaceTexture = new SurfaceTexture(oesTextureId);
+ surfaceTexture.setOnFrameAvailableListener(st -> {
+ if (hasPendingTexture) {
+ Logging.d(TAG, "A frame is already pending, dropping frame.");
+ }
+
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ }, handler);
+ }
+
+ /**
+ * Start to stream textures to the given `listener`. If you need to change listener, you need to
+ * call stopListening() first.
+ */
+ public void startListening(final VideoSink listener) {
+ if (this.listener != null || this.pendingListener != null) {
+ throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
+ }
+ this.pendingListener = listener;
+ handler.post(setListenerRunnable);
+ }
+
+ /**
+ * Stop listening. The listener set in startListening() is guaranteded to not receive any more
+ * onFrame() callbacks after this function returns.
+ */
+ public void stopListening() {
+ Logging.d(TAG, "stopListening()");
+ handler.removeCallbacks(setListenerRunnable);
+ ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+ listener = null;
+ pendingListener = null;
+ });
+ }
+
+ /**
+ * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself
+ * since this class needs to be aware of the texture size.
+ */
+ public void setTextureSize(int textureWidth, int textureHeight) {
+ if (textureWidth <= 0) {
+ throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth);
+ }
+ if (textureHeight <= 0) {
+ throw new IllegalArgumentException(
+ "Texture height must be positive, but was " + textureHeight);
+ }
+ surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight);
+ handler.post(() -> {
+ this.textureWidth = textureWidth;
+ this.textureHeight = textureHeight;
+ tryDeliverTextureFrame();
+ });
+ }
+
+ /**
+ * Forces a frame to be produced. If no new frame is available, the last frame is sent to the
+ * listener again.
+ */
+ public void forceFrame() {
+ handler.post(() -> {
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ });
+ }
+
+ /** Set the rotation of the delivered frames. */
+ public void setFrameRotation(int rotation) {
+ handler.post(() -> this.frameRotation = rotation);
+ }
+
+ /**
+ * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
+ * producer such as a camera or decoder.
+ */
+ public SurfaceTexture getSurfaceTexture() {
+ return surfaceTexture;
+ }
+
+ /** Retrieve the handler that calls onFrame(). This handler is valid until dispose() is called. */
+ public Handler getHandler() {
+ return handler;
+ }
+
+ /**
+ * This function is called when the texture frame is released. Only one texture frame can be in
+ * flight at once, so this function must be called before a new frame is delivered.
+ */
+ private void returnTextureFrame() {
+ handler.post(() -> {
+ isTextureInUse = false;
+ if (isQuitting) {
+ release();
+ } else {
+ tryDeliverTextureFrame();
+ }
+ });
+ }
+
+ public boolean isTextureInUse() {
+ return isTextureInUse;
+ }
+
+ /**
+ * Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
+ * stopped when the texture frame has been released. You are guaranteed to not receive any more
+ * onFrame() after this function returns.
+ */
+ public void dispose() {
+ Logging.d(TAG, "dispose()");
+ ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+ isQuitting = true;
+ if (!isTextureInUse) {
+ release();
+ }
+ });
+ }
+
+ /**
+ * Posts to the correct thread to convert `textureBuffer` to I420.
+ *
+ * @deprecated Use toI420() instead.
+ */
+ @Deprecated
+ public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
+ return textureBuffer.toI420();
+ }
+
+ private void updateTexImage() {
+ // SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
+ // as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
+ // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+ synchronized (EglBase.lock) {
+ surfaceTexture.updateTexImage();
+ }
+ }
+
+ private void tryDeliverTextureFrame() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
+ return;
+ }
+ if (textureWidth == 0 || textureHeight == 0) {
+ // Information about the resolution needs to be provided by a call to setTextureSize() before
+ // frames are produced.
+ Logging.w(TAG, "Texture size has not been set.");
+ return;
+ }
+ isTextureInUse = true;
+ hasPendingTexture = false;
+
+ updateTexImage();
+
+ final float[] transformMatrix = new float[16];
+ surfaceTexture.getTransformMatrix(transformMatrix);
+ long timestampNs = surfaceTexture.getTimestamp();
+ if (timestampAligner != null) {
+ timestampNs = timestampAligner.translateTimestamp(timestampNs);
+ }
+ final VideoFrame.TextureBuffer buffer =
+ new TextureBufferImpl(textureWidth, textureHeight, TextureBuffer.Type.OES, oesTextureId,
+ RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix), handler,
+ yuvConverter, textureRefCountMonitor);
+ if (frameRefMonitor != null) {
+ frameRefMonitor.onNewBuffer(buffer);
+ }
+ final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs);
+ listener.onFrame(frame);
+ frame.release();
+ }
+
+ private void release() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isTextureInUse || !isQuitting) {
+ throw new IllegalStateException("Unexpected release.");
+ }
+ yuvConverter.release();
+ GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+ surfaceTexture.release();
+ eglBase.release();
+ handler.getLooper().quit();
+ if (timestampAligner != null) {
+ timestampAligner.dispose();
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java
new file mode 100644
index 0000000000..6c9140abbd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java
@@ -0,0 +1,300 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.content.res.Resources.NotFoundException;
+import android.graphics.Point;
+import android.os.Looper;
+import android.util.AttributeSet;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+/**
+ * Display the video stream on a SurfaceView.
+ */
+public class SurfaceViewRenderer extends SurfaceView
+ implements SurfaceHolder.Callback, VideoSink, RendererCommon.RendererEvents {
+ private static final String TAG = "SurfaceViewRenderer";
+
+ // Cached resource name.
+ private final String resourceName;
+ private final RendererCommon.VideoLayoutMeasure videoLayoutMeasure =
+ new RendererCommon.VideoLayoutMeasure();
+ private final SurfaceEglRenderer eglRenderer;
+
+ // Callback for reporting renderer events. Read-only after initialization so no lock required.
+ private RendererCommon.RendererEvents rendererEvents;
+
+ // Accessed only on the main thread.
+ private int rotatedFrameWidth;
+ private int rotatedFrameHeight;
+ private boolean enableFixedSize;
+ private int surfaceWidth;
+ private int surfaceHeight;
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context) {
+ super(context);
+ this.resourceName = getResourceName();
+ eglRenderer = new SurfaceEglRenderer(resourceName);
+ getHolder().addCallback(this);
+ getHolder().addCallback(eglRenderer);
+ }
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ this.resourceName = getResourceName();
+ eglRenderer = new SurfaceEglRenderer(resourceName);
+ getHolder().addCallback(this);
+ getHolder().addCallback(eglRenderer);
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. It is allowed to call init() to
+ * reinitialize the renderer after a previous init()/release() cycle.
+ */
+ public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle.
+ */
+ public void init(final EglBase.Context sharedContext,
+ RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ ThreadUtils.checkIsOnMainThread();
+ this.rendererEvents = rendererEvents;
+ rotatedFrameWidth = 0;
+ rotatedFrameHeight = 0;
+ eglRenderer.init(sharedContext, this /* rendererEvents */, configAttributes, drawer);
+ }
+
+ /**
+ * Block until any pending frame is returned and all GL resources released, even if an interrupt
+ * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+ * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+ * don't call this function, the GL resources might leak.
+ */
+ public void release() {
+ eglRenderer.release();
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ * @param drawer Custom drawer to use for this frame listener.
+ */
+ public void addFrameListener(
+ EglRenderer.FrameListener listener, float scale, RendererCommon.GlDrawer drawerParam) {
+ eglRenderer.addFrameListener(listener, scale, drawerParam);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received. This version uses
+ * the drawer of the EglRenderer that was passed in init.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ */
+ public void addFrameListener(EglRenderer.FrameListener listener, float scale) {
+ eglRenderer.addFrameListener(listener, scale);
+ }
+
+ public void removeFrameListener(EglRenderer.FrameListener listener) {
+ eglRenderer.removeFrameListener(listener);
+ }
+
+ /**
+ * Enables fixed size for the surface. This provides better performance but might be buggy on some
+ * devices. By default this is turned off.
+ */
+ public void setEnableHardwareScaler(boolean enabled) {
+ ThreadUtils.checkIsOnMainThread();
+ enableFixedSize = enabled;
+ updateSurfaceSize();
+ }
+
+ /**
+ * Set if the video stream should be mirrored or not.
+ */
+ public void setMirror(final boolean mirror) {
+ eglRenderer.setMirror(mirror);
+ }
+
+ /**
+ * Set how the video will fill the allowed layout area.
+ */
+ public void setScalingType(RendererCommon.ScalingType scalingType) {
+ ThreadUtils.checkIsOnMainThread();
+ videoLayoutMeasure.setScalingType(scalingType);
+ requestLayout();
+ }
+
+ public void setScalingType(RendererCommon.ScalingType scalingTypeMatchOrientation,
+ RendererCommon.ScalingType scalingTypeMismatchOrientation) {
+ ThreadUtils.checkIsOnMainThread();
+ videoLayoutMeasure.setScalingType(scalingTypeMatchOrientation, scalingTypeMismatchOrientation);
+ requestLayout();
+ }
+
+ /**
+ * Limit render framerate.
+ *
+ * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+ * reduction.
+ */
+ public void setFpsReduction(float fps) {
+ eglRenderer.setFpsReduction(fps);
+ }
+
+ public void disableFpsReduction() {
+ eglRenderer.disableFpsReduction();
+ }
+
+ public void pauseVideo() {
+ eglRenderer.pauseVideo();
+ }
+
+ // VideoSink interface.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ eglRenderer.onFrame(frame);
+ }
+
+ // View layout interface.
+ @Override
+ protected void onMeasure(int widthSpec, int heightSpec) {
+ ThreadUtils.checkIsOnMainThread();
+ Point size =
+ videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
+ setMeasuredDimension(size.x, size.y);
+ logD("onMeasure(). New size: " + size.x + "x" + size.y);
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+ ThreadUtils.checkIsOnMainThread();
+ eglRenderer.setLayoutAspectRatio((right - left) / (float) (bottom - top));
+ updateSurfaceSize();
+ }
+
+ private void updateSurfaceSize() {
+ ThreadUtils.checkIsOnMainThread();
+ if (enableFixedSize && rotatedFrameWidth != 0 && rotatedFrameHeight != 0 && getWidth() != 0
+ && getHeight() != 0) {
+ final float layoutAspectRatio = getWidth() / (float) getHeight();
+ final float frameAspectRatio = rotatedFrameWidth / (float) rotatedFrameHeight;
+ final int drawnFrameWidth;
+ final int drawnFrameHeight;
+ if (frameAspectRatio > layoutAspectRatio) {
+ drawnFrameWidth = (int) (rotatedFrameHeight * layoutAspectRatio);
+ drawnFrameHeight = rotatedFrameHeight;
+ } else {
+ drawnFrameWidth = rotatedFrameWidth;
+ drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio);
+ }
+ // Aspect ratio of the drawn frame and the view is the same.
+ final int width = Math.min(getWidth(), drawnFrameWidth);
+ final int height = Math.min(getHeight(), drawnFrameHeight);
+ logD("updateSurfaceSize. Layout size: " + getWidth() + "x" + getHeight() + ", frame size: "
+ + rotatedFrameWidth + "x" + rotatedFrameHeight + ", requested surface size: " + width
+ + "x" + height + ", old surface size: " + surfaceWidth + "x" + surfaceHeight);
+ if (width != surfaceWidth || height != surfaceHeight) {
+ surfaceWidth = width;
+ surfaceHeight = height;
+ getHolder().setFixedSize(width, height);
+ }
+ } else {
+ surfaceWidth = surfaceHeight = 0;
+ getHolder().setSizeFromLayout();
+ }
+ }
+
+ // SurfaceHolder.Callback interface.
+ @Override
+ public void surfaceCreated(final SurfaceHolder holder) {
+ ThreadUtils.checkIsOnMainThread();
+ surfaceWidth = surfaceHeight = 0;
+ updateSurfaceSize();
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {}
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
+
+ private String getResourceName() {
+ try {
+ return getResources().getResourceEntryName(getId());
+ } catch (NotFoundException e) {
+ return "";
+ }
+ }
+
+ /**
+ * Post a task to clear the SurfaceView to a transparent uniform color.
+ */
+ public void clearImage() {
+ eglRenderer.clearImage();
+ }
+
+ @Override
+ public void onFirstFrameRendered() {
+ if (rendererEvents != null) {
+ rendererEvents.onFirstFrameRendered();
+ }
+ }
+
+ @Override
+ public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) {
+ if (rendererEvents != null) {
+ rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
+ }
+ int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight;
+ int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth;
+ // run immediately if possible for ui thread tests
+ postOrRun(() -> {
+ rotatedFrameWidth = rotatedWidth;
+ rotatedFrameHeight = rotatedHeight;
+ updateSurfaceSize();
+ requestLayout();
+ });
+ }
+
+ private void postOrRun(Runnable r) {
+ if (Thread.currentThread() == Looper.getMainLooper().getThread()) {
+ r.run();
+ } else {
+ post(r);
+ }
+ }
+
+ private void logD(String string) {
+ Logging.d(TAG, resourceName + ": " + string);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/TextureBufferImpl.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/TextureBufferImpl.java
new file mode 100644
index 0000000000..6cff1d28a5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/TextureBufferImpl.java
@@ -0,0 +1,202 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Matrix;
+import android.os.Handler;
+import androidx.annotation.Nullable;
+
+/**
+ * Android texture buffer that glues together the necessary information together with a generic
+ * release callback. ToI420() is implemented by providing a Handler and a YuvConverter.
+ */
+public class TextureBufferImpl implements VideoFrame.TextureBuffer {
+ interface RefCountMonitor {
+ void onRetain(TextureBufferImpl textureBuffer);
+ void onRelease(TextureBufferImpl textureBuffer);
+ void onDestroy(TextureBufferImpl textureBuffer);
+ }
+
+ // This is the full resolution the texture has in memory after applying the transformation matrix
+ // that might include cropping. This resolution is useful to know when sampling the texture to
+ // avoid downscaling artifacts.
+ private final int unscaledWidth;
+ private final int unscaledHeight;
+ // This is the resolution that has been applied after cropAndScale().
+ private final int width;
+ private final int height;
+ private final Type type;
+ private final int id;
+ private final Matrix transformMatrix;
+ private final Handler toI420Handler;
+ private final YuvConverter yuvConverter;
+ private final RefCountDelegate refCountDelegate;
+ private final RefCountMonitor refCountMonitor;
+
+ public TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
+ Handler toI420Handler, YuvConverter yuvConverter, @Nullable Runnable releaseCallback) {
+ this(width, height, width, height, type, id, transformMatrix, toI420Handler, yuvConverter,
+ new RefCountMonitor() {
+ @Override
+ public void onRetain(TextureBufferImpl textureBuffer) {}
+
+ @Override
+ public void onRelease(TextureBufferImpl textureBuffer) {}
+
+ @Override
+ public void onDestroy(TextureBufferImpl textureBuffer) {
+ if (releaseCallback != null) {
+ releaseCallback.run();
+ }
+ }
+ });
+ }
+
+ TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
+ Handler toI420Handler, YuvConverter yuvConverter, RefCountMonitor refCountMonitor) {
+ this(width, height, width, height, type, id, transformMatrix, toI420Handler, yuvConverter,
+ refCountMonitor);
+ }
+
+ private TextureBufferImpl(int unscaledWidth, int unscaledHeight, int width, int height, Type type,
+ int id, Matrix transformMatrix, Handler toI420Handler, YuvConverter yuvConverter,
+ RefCountMonitor refCountMonitor) {
+ this.unscaledWidth = unscaledWidth;
+ this.unscaledHeight = unscaledHeight;
+ this.width = width;
+ this.height = height;
+ this.type = type;
+ this.id = id;
+ this.transformMatrix = transformMatrix;
+ this.toI420Handler = toI420Handler;
+ this.yuvConverter = yuvConverter;
+ this.refCountDelegate = new RefCountDelegate(() -> refCountMonitor.onDestroy(this));
+ this.refCountMonitor = refCountMonitor;
+ }
+
+ @Override
+ public VideoFrame.TextureBuffer.Type getType() {
+ return type;
+ }
+
+ @Override
+ public int getTextureId() {
+ return id;
+ }
+
+ @Override
+ public Matrix getTransformMatrix() {
+ return transformMatrix;
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ return ThreadUtils.invokeAtFrontUninterruptibly(
+ toI420Handler, () -> yuvConverter.convert(this));
+ }
+
+ @Override
+ public void retain() {
+ refCountMonitor.onRetain(this);
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountMonitor.onRelease(this);
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ final Matrix cropAndScaleMatrix = new Matrix();
+ // In WebRTC, Y=0 is the top row, while in OpenGL Y=0 is the bottom row. This means that the Y
+ // direction is effectively reversed.
+ final int cropYFromBottom = height - (cropY + cropHeight);
+ cropAndScaleMatrix.preTranslate(cropX / (float) width, cropYFromBottom / (float) height);
+ cropAndScaleMatrix.preScale(cropWidth / (float) width, cropHeight / (float) height);
+
+ return applyTransformMatrix(cropAndScaleMatrix,
+ Math.round(unscaledWidth * cropWidth / (float) width),
+ Math.round(unscaledHeight * cropHeight / (float) height), scaleWidth, scaleHeight);
+ }
+
+ /**
+ * Returns the width of the texture in memory. This should only be used for downscaling, and you
+ * should still respect the width from getWidth().
+ */
+ public int getUnscaledWidth() {
+ return unscaledWidth;
+ }
+
+ /**
+ * Returns the height of the texture in memory. This should only be used for downscaling, and you
+ * should still respect the height from getHeight().
+ */
+ public int getUnscaledHeight() {
+ return unscaledHeight;
+ }
+
+ public Handler getToI420Handler() {
+ return toI420Handler;
+ }
+
+ public YuvConverter getYuvConverter() {
+ return yuvConverter;
+ }
+
+ /**
+ * Create a new TextureBufferImpl with an applied transform matrix and a new size. The
+ * existing buffer is unchanged. The given transform matrix is applied first when texture
+ * coordinates are still in the unmodified [0, 1] range.
+ */
+ public TextureBufferImpl applyTransformMatrix(
+ Matrix transformMatrix, int newWidth, int newHeight) {
+ return applyTransformMatrix(transformMatrix, /* unscaledWidth= */ newWidth,
+ /* unscaledHeight= */ newHeight, /* scaledWidth= */ newWidth,
+ /* scaledHeight= */ newHeight);
+ }
+
+ private TextureBufferImpl applyTransformMatrix(Matrix transformMatrix, int unscaledWidth,
+ int unscaledHeight, int scaledWidth, int scaledHeight) {
+ final Matrix newMatrix = new Matrix(this.transformMatrix);
+ newMatrix.preConcat(transformMatrix);
+ retain();
+ return new TextureBufferImpl(unscaledWidth, unscaledHeight, scaledWidth, scaledHeight, type, id,
+ newMatrix, toI420Handler, yuvConverter, new RefCountMonitor() {
+ @Override
+ public void onRetain(TextureBufferImpl textureBuffer) {
+ refCountMonitor.onRetain(TextureBufferImpl.this);
+ }
+
+ @Override
+ public void onRelease(TextureBufferImpl textureBuffer) {
+ refCountMonitor.onRelease(TextureBufferImpl.this);
+ }
+
+ @Override
+ public void onDestroy(TextureBufferImpl textureBuffer) {
+ release();
+ }
+ });
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/TimestampAligner.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/TimestampAligner.java
new file mode 100644
index 0000000000..d96c939595
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/TimestampAligner.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The TimestampAligner class helps translating camera timestamps into the same timescale as is
+ * used by rtc::TimeNanos(). Some cameras have built in timestamping which is more accurate than
+ * reading the system clock, but using a different epoch and unknown clock drift. Frame timestamps
+ * in webrtc should use rtc::TimeNanos (system monotonic time), and this class provides a filter
+ * which lets us use the rtc::TimeNanos timescale, and at the same time take advantage of higher
+ * accuracy of the camera clock. This class is a wrapper on top of rtc::TimestampAligner.
+ */
+public class TimestampAligner {
+ /**
+ * Wrapper around rtc::TimeNanos(). This is normally same as System.nanoTime(), but call this
+ * function to be safe.
+ */
+ public static long getRtcTimeNanos() {
+ return nativeRtcTimeNanos();
+ }
+
+ private volatile long nativeTimestampAligner = nativeCreateTimestampAligner();
+
+ /**
+ * Translates camera timestamps to the same timescale as is used by rtc::TimeNanos().
+ * `cameraTimeNs` is assumed to be accurate, but with an unknown epoch and clock drift. Returns
+ * the translated timestamp.
+ */
+ public long translateTimestamp(long cameraTimeNs) {
+ checkNativeAlignerExists();
+ return nativeTranslateTimestamp(nativeTimestampAligner, cameraTimeNs);
+ }
+
+ /** Dispose native timestamp aligner. */
+ public void dispose() {
+ checkNativeAlignerExists();
+ nativeReleaseTimestampAligner(nativeTimestampAligner);
+ nativeTimestampAligner = 0;
+ }
+
+ private void checkNativeAlignerExists() {
+ if (nativeTimestampAligner == 0) {
+ throw new IllegalStateException("TimestampAligner has been disposed.");
+ }
+ }
+
+ private static native long nativeRtcTimeNanos();
+ private static native long nativeCreateTimestampAligner();
+ private static native void nativeReleaseTimestampAligner(long timestampAligner);
+ private static native long nativeTranslateTimestamp(long timestampAligner, long cameraTimeNs);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/TurnCustomizer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/TurnCustomizer.java
new file mode 100644
index 0000000000..41bedb7dcb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/TurnCustomizer.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ TurnCustomizer. */
+public class TurnCustomizer {
+ private long nativeTurnCustomizer;
+
+ public TurnCustomizer(long nativeTurnCustomizer) {
+ this.nativeTurnCustomizer = nativeTurnCustomizer;
+ }
+
+ public void dispose() {
+ checkTurnCustomizerExists();
+ nativeFreeTurnCustomizer(nativeTurnCustomizer);
+ nativeTurnCustomizer = 0;
+ }
+
+ private static native void nativeFreeTurnCustomizer(long turnCustomizer);
+
+ /** Return a pointer to webrtc::TurnCustomizer. */
+ @CalledByNative
+ long getNativeTurnCustomizer() {
+ checkTurnCustomizerExists();
+ return nativeTurnCustomizer;
+ }
+
+ private void checkTurnCustomizerExists() {
+ if (nativeTurnCustomizer == 0) {
+ throw new IllegalStateException("TurnCustomizer has been disposed.");
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCapturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCapturer.java
new file mode 100644
index 0000000000..67eb7ab086
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCapturer.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+// Base interface for all VideoCapturers to implement.
+public interface VideoCapturer {
+ /**
+ * This function is used to initialize the camera thread, the android application context, and the
+ * capture observer. It will be called only once and before any startCapture() request. The
+ * camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants
+ * to deliver texture frames, it should do this by rendering on the SurfaceTexture in
+ * {@code surfaceTextureHelper}, register itself as a listener, and forward the frames to
+ * CapturerObserver.onFrameCaptured(). The caller still has ownership of {@code
+ * surfaceTextureHelper} and is responsible for making sure surfaceTextureHelper.dispose() is
+ * called. This also means that the caller can reuse the SurfaceTextureHelper to initialize a new
+ * VideoCapturer once the previous VideoCapturer has been disposed.
+ */
+ void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ CapturerObserver capturerObserver);
+
+ /**
+ * Start capturing frames in a format that is as close as possible to {@code width x height} and
+ * {@code framerate}.
+ */
+ void startCapture(int width, int height, int framerate);
+
+ /**
+ * Stop capturing. This function should block until capture is actually stopped.
+ */
+ void stopCapture() throws InterruptedException;
+
+ void changeCaptureFormat(int width, int height, int framerate);
+
+ /**
+ * Perform any final cleanup here. No more capturing will be done after this call.
+ */
+ void dispose();
+
+ /**
+ * @return true if-and-only-if this is a screen capturer.
+ */
+ boolean isScreencast();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java
new file mode 100644
index 0000000000..363be347b5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.Locale;
+import java.util.Map;
+
+/**
+ * Represent a video codec as encoded in SDP.
+ */
+public class VideoCodecInfo {
+ // Keys for H264 VideoCodecInfo properties.
+ public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
+ public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
+ public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
+
+ public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0";
+ public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
+ public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
+ public static final String H264_CONSTRAINED_HIGH_3_1 =
+ H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
+ public static final String H264_CONSTRAINED_BASELINE_3_1 =
+ H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
+
+ public final String name;
+ public final Map<String, String> params;
+ @Deprecated public final int payload;
+
+ @CalledByNative
+ public VideoCodecInfo(String name, Map<String, String> params) {
+ this.payload = 0;
+ this.name = name;
+ this.params = params;
+ }
+
+ @Deprecated
+ public VideoCodecInfo(int payload, String name, Map<String, String> params) {
+ this.payload = payload;
+ this.name = name;
+ this.params = params;
+ }
+
+ @Override
+ public boolean equals(@Nullable Object obj) {
+ if (obj == null)
+ return false;
+ if (obj == this)
+ return true;
+ if (!(obj instanceof VideoCodecInfo))
+ return false;
+
+ VideoCodecInfo otherInfo = (VideoCodecInfo) obj;
+ return name.equalsIgnoreCase(otherInfo.name) && params.equals(otherInfo.params);
+ }
+
+ @Override
+ public int hashCode() {
+ Object[] values = {name.toUpperCase(Locale.ROOT), params};
+ return Arrays.hashCode(values);
+ }
+
+ @Override
+ public String toString() {
+ return "VideoCodec{" + name + " " + params + "}";
+ }
+
+ @CalledByNative
+ String getName() {
+ return name;
+ }
+
+ @CalledByNative
+ Map<String, String> getParams() {
+ return params;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java
new file mode 100644
index 0000000000..a86d6fbf67
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Status codes reported by video encoding/decoding components. This should be kept in sync with
+ * video_error_codes.h.
+ */
+public enum VideoCodecStatus {
+ REQUEST_SLI(2),
+ NO_OUTPUT(1),
+ OK(0),
+ ERROR(-1),
+ LEVEL_EXCEEDED(-2),
+ MEMORY(-3),
+ ERR_PARAMETER(-4),
+ ERR_SIZE(-5),
+ TIMEOUT(-6),
+ UNINITIALIZED(-7),
+ ERR_REQUEST_SLI(-12),
+ FALLBACK_SOFTWARE(-13),
+ TARGET_BITRATE_OVERSHOOT(-14);
+
+ private final int number;
+
+ private VideoCodecStatus(int number) {
+ this.number = number;
+ }
+
+ @CalledByNative
+ public int getNumber() {
+ return number;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoder.java
new file mode 100644
index 0000000000..a80fa4fef2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoder.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for a video decoder that can be used in WebRTC. All calls to the class will be made on
+ * a single decoding thread.
+ */
+public interface VideoDecoder {
+ /** Settings passed to the decoder by WebRTC. */
+ public class Settings {
+ public final int numberOfCores;
+ public final int width;
+ public final int height;
+
+ @CalledByNative("Settings")
+ public Settings(int numberOfCores, int width, int height) {
+ this.numberOfCores = numberOfCores;
+ this.width = width;
+ this.height = height;
+ }
+ }
+
+ /** Additional info for decoding. */
+ public class DecodeInfo {
+ public final boolean isMissingFrames;
+ public final long renderTimeMs;
+
+ public DecodeInfo(boolean isMissingFrames, long renderTimeMs) {
+ this.isMissingFrames = isMissingFrames;
+ this.renderTimeMs = renderTimeMs;
+ }
+ }
+
+ public interface Callback {
+ /**
+ * Call to return a decoded frame. Can be called on any thread.
+ *
+ * @param frame Decoded frame
+ * @param decodeTimeMs Time it took to decode the frame in milliseconds or null if not available
+ * @param qp QP value of the decoded frame or null if not available
+ */
+ void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp);
+ }
+
+ /**
+ * The decoder implementation backing this interface is either 1) a Java
+ * decoder (e.g., an Android platform decoder), or alternatively 2) a native
+ * decoder (e.g., a software decoder or a C++ decoder adapter).
+ *
+ * For case 1), createNativeVideoDecoder() should return zero.
+ * In this case, we expect the native library to call the decoder through
+ * JNI using the Java interface declared below.
+ *
+ * For case 2), createNativeVideoDecoder() should return a non-zero value.
+ * In this case, we expect the native library to treat the returned value as
+ * a raw pointer of type webrtc::VideoDecoder* (ownership is transferred to
+ * the caller). The native library should then directly call the
+ * webrtc::VideoDecoder interface without going through JNI. All calls to
+ * the Java interface methods declared below should thus throw an
+ * UnsupportedOperationException.
+ */
+ @CalledByNative
+ default long createNativeVideoDecoder() {
+ return 0;
+ }
+
+ /**
+ * Initializes the decoding process with specified settings. Will be called on the decoding thread
+ * before any decode calls.
+ */
+ @CalledByNative VideoCodecStatus initDecode(Settings settings, Callback decodeCallback);
+ /**
+ * Called when the decoder is no longer needed. Any more calls to decode will not be made.
+ */
+ @CalledByNative VideoCodecStatus release();
+ /**
+ * Request the decoder to decode a frame.
+ */
+ @CalledByNative VideoCodecStatus decode(EncodedImage frame, DecodeInfo info);
+ /**
+ * Should return a descriptive name for the implementation. Gets called once and cached. May be
+ * called from arbitrary thread.
+ */
+ @CalledByNative String getImplementationName();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java
new file mode 100644
index 0000000000..8b25516e99
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/** Factory for creating VideoDecoders. */
+public interface VideoDecoderFactory {
+ /**
+ * Creates a VideoDecoder for the given codec. Supports the same codecs supported by
+ * VideoEncoderFactory.
+ */
+ @Nullable @CalledByNative VideoDecoder createDecoder(VideoCodecInfo info);
+
+ /**
+ * Enumerates the list of supported video codecs.
+ */
+ @CalledByNative
+ default VideoCodecInfo[] getSupportedCodecs() {
+ return new VideoCodecInfo[0];
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java
new file mode 100644
index 0000000000..ddfa3ecd40
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * A combined video decoder that falls back on a secondary decoder if the primary decoder fails.
+ */
+public class VideoDecoderFallback extends WrappedNativeVideoDecoder {
+ private final VideoDecoder fallback;
+ private final VideoDecoder primary;
+
+ public VideoDecoderFallback(VideoDecoder fallback, VideoDecoder primary) {
+ this.fallback = fallback;
+ this.primary = primary;
+ }
+
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder(fallback, primary);
+ }
+
+ private static native long nativeCreateDecoder(VideoDecoder fallback, VideoDecoder primary);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoder.java
new file mode 100644
index 0000000000..0d8cf830ae
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoder.java
@@ -0,0 +1,385 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import org.webrtc.EncodedImage;
+
+/**
+ * Interface for a video encoder that can be used with WebRTC. All calls will be made on the
+ * encoding thread. The encoder may be constructed on a different thread and changing thread after
+ * calling release is allowed.
+ */
+public interface VideoEncoder {
+ /** Settings passed to the encoder by WebRTC. */
+ public class Settings {
+ public final int numberOfCores;
+ public final int width;
+ public final int height;
+ public final int startBitrate; // Kilobits per second.
+ public final int maxFramerate;
+ public final int numberOfSimulcastStreams;
+ public final boolean automaticResizeOn;
+ public final Capabilities capabilities;
+
+ // TODO(bugs.webrtc.org/10720): Remove.
+ @Deprecated
+ public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
+ int numberOfSimulcastStreams, boolean automaticResizeOn) {
+ this(numberOfCores, width, height, startBitrate, maxFramerate, numberOfSimulcastStreams,
+ automaticResizeOn, new VideoEncoder.Capabilities(false /* lossNotification */));
+ }
+
+ @CalledByNative("Settings")
+ public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
+ int numberOfSimulcastStreams, boolean automaticResizeOn, Capabilities capabilities) {
+ this.numberOfCores = numberOfCores;
+ this.width = width;
+ this.height = height;
+ this.startBitrate = startBitrate;
+ this.maxFramerate = maxFramerate;
+ this.numberOfSimulcastStreams = numberOfSimulcastStreams;
+ this.automaticResizeOn = automaticResizeOn;
+ this.capabilities = capabilities;
+ }
+ }
+
+ /** Capabilities (loss notification, etc.) passed to the encoder by WebRTC. */
+ public class Capabilities {
+ /**
+ * The remote side has support for the loss notification RTCP feedback message format, and will
+ * be sending these feedback messages if necessary.
+ */
+ public final boolean lossNotification;
+
+ @CalledByNative("Capabilities")
+ public Capabilities(boolean lossNotification) {
+ this.lossNotification = lossNotification;
+ }
+ }
+
+ /** Additional info for encoding. */
+ public class EncodeInfo {
+ public final EncodedImage.FrameType[] frameTypes;
+
+ @CalledByNative("EncodeInfo")
+ public EncodeInfo(EncodedImage.FrameType[] frameTypes) {
+ this.frameTypes = frameTypes;
+ }
+ }
+
+ // TODO(sakal): Add values to these classes as necessary.
+ /** Codec specific information about the encoded frame. */
+ public class CodecSpecificInfo {}
+
+ public class CodecSpecificInfoVP8 extends CodecSpecificInfo {}
+
+ public class CodecSpecificInfoVP9 extends CodecSpecificInfo {}
+
+ public class CodecSpecificInfoH264 extends CodecSpecificInfo {}
+
+ public class CodecSpecificInfoAV1 extends CodecSpecificInfo {}
+
+ /**
+ * Represents bitrate allocated for an encoder to produce frames. Bitrate can be divided between
+ * spatial and temporal layers.
+ */
+ public class BitrateAllocation {
+ // First index is the spatial layer and second the temporal layer.
+ public final int[][] bitratesBbs;
+
+ /**
+ * Initializes the allocation with a two dimensional array of bitrates. The first index of the
+ * array is the spatial layer and the second index in the temporal layer.
+ */
+ @CalledByNative("BitrateAllocation")
+ public BitrateAllocation(int[][] bitratesBbs) {
+ this.bitratesBbs = bitratesBbs;
+ }
+
+ /**
+ * Gets the total bitrate allocated for all layers.
+ */
+ public int getSum() {
+ int sum = 0;
+ for (int[] spatialLayer : bitratesBbs) {
+ for (int bitrate : spatialLayer) {
+ sum += bitrate;
+ }
+ }
+ return sum;
+ }
+ }
+
+ /** Settings for WebRTC quality based scaling. */
+ public class ScalingSettings {
+ public final boolean on;
+ @Nullable public final Integer low;
+ @Nullable public final Integer high;
+
+ /**
+ * Settings to disable quality based scaling.
+ */
+ public static final ScalingSettings OFF = new ScalingSettings();
+
+ /**
+ * Creates settings to enable quality based scaling.
+ *
+ * @param low Average QP at which to scale up the resolution.
+ * @param high Average QP at which to scale down the resolution.
+ */
+ public ScalingSettings(int low, int high) {
+ this.on = true;
+ this.low = low;
+ this.high = high;
+ }
+
+ private ScalingSettings() {
+ this.on = false;
+ this.low = null;
+ this.high = null;
+ }
+
+ // TODO(bugs.webrtc.org/8830): Below constructors are deprecated.
+ // Default thresholds are going away, so thresholds have to be set
+ // when scaling is on.
+ /**
+ * Creates quality based scaling setting.
+ *
+ * @param on True if quality scaling is turned on.
+ */
+ @Deprecated
+ public ScalingSettings(boolean on) {
+ this.on = on;
+ this.low = null;
+ this.high = null;
+ }
+
+ /**
+ * Creates quality based scaling settings with custom thresholds.
+ *
+ * @param on True if quality scaling is turned on.
+ * @param low Average QP at which to scale up the resolution.
+ * @param high Average QP at which to scale down the resolution.
+ */
+ @Deprecated
+ public ScalingSettings(boolean on, int low, int high) {
+ this.on = on;
+ this.low = low;
+ this.high = high;
+ }
+
+ @Override
+ public String toString() {
+ return on ? "[ " + low + ", " + high + " ]" : "OFF";
+ }
+ }
+
+ /**
+ * Bitrate limits for resolution.
+ */
+ public class ResolutionBitrateLimits {
+ /**
+ * Maximum size of video frame, in pixels, the bitrate limits are intended for.
+ */
+ public final int frameSizePixels;
+
+ /**
+ * Recommended minimum bitrate to start encoding.
+ */
+ public final int minStartBitrateBps;
+
+ /**
+ * Recommended minimum bitrate.
+ */
+ public final int minBitrateBps;
+
+ /**
+ * Recommended maximum bitrate.
+ */
+ public final int maxBitrateBps;
+
+ public ResolutionBitrateLimits(
+ int frameSizePixels, int minStartBitrateBps, int minBitrateBps, int maxBitrateBps) {
+ this.frameSizePixels = frameSizePixels;
+ this.minStartBitrateBps = minStartBitrateBps;
+ this.minBitrateBps = minBitrateBps;
+ this.maxBitrateBps = maxBitrateBps;
+ }
+
+ @CalledByNative("ResolutionBitrateLimits")
+ public int getFrameSizePixels() {
+ return frameSizePixels;
+ }
+
+ @CalledByNative("ResolutionBitrateLimits")
+ public int getMinStartBitrateBps() {
+ return minStartBitrateBps;
+ }
+
+ @CalledByNative("ResolutionBitrateLimits")
+ public int getMinBitrateBps() {
+ return minBitrateBps;
+ }
+
+ @CalledByNative("ResolutionBitrateLimits")
+ public int getMaxBitrateBps() {
+ return maxBitrateBps;
+ }
+ }
+
+ /** Rate control parameters. */
+ public class RateControlParameters {
+ /**
+ * Adjusted target bitrate, per spatial/temporal layer. May be lower or higher than the target
+ * depending on encoder behaviour.
+ */
+ public final BitrateAllocation bitrate;
+
+ /**
+ * Target framerate, in fps. A value <= 0.0 is invalid and should be interpreted as framerate
+ * target not available. In this case the encoder should fall back to the max framerate
+ * specified in `codec_settings` of the last InitEncode() call.
+ */
+ public final double framerateFps;
+
+ @CalledByNative("RateControlParameters")
+ public RateControlParameters(BitrateAllocation bitrate, double framerateFps) {
+ this.bitrate = bitrate;
+ this.framerateFps = framerateFps;
+ }
+ }
+
+ /**
+ * Metadata about the Encoder.
+ */
+ public class EncoderInfo {
+ /**
+ * The width and height of the incoming video frames should be divisible by
+ * |requested_resolution_alignment|
+ */
+ public final int requestedResolutionAlignment;
+
+ /**
+ * Same as above but if true, each simulcast layer should also be divisible by
+ * |requested_resolution_alignment|.
+ */
+ public final boolean applyAlignmentToAllSimulcastLayers;
+
+ public EncoderInfo(
+ int requestedResolutionAlignment, boolean applyAlignmentToAllSimulcastLayers) {
+ this.requestedResolutionAlignment = requestedResolutionAlignment;
+ this.applyAlignmentToAllSimulcastLayers = applyAlignmentToAllSimulcastLayers;
+ }
+
+ @CalledByNative("EncoderInfo")
+ public int getRequestedResolutionAlignment() {
+ return requestedResolutionAlignment;
+ }
+
+ @CalledByNative("EncoderInfo")
+ public boolean getApplyAlignmentToAllSimulcastLayers() {
+ return applyAlignmentToAllSimulcastLayers;
+ }
+ }
+
+ public interface Callback {
+ /**
+ * Old encoders assume that the byte buffer held by `frame` is not accessed after the call to
+ * this method returns. If the pipeline downstream needs to hold on to the buffer, it then has
+ * to make its own copy. We want to move to a model where no copying is needed, and instead use
+ * retain()/release() to signal to the encoder when it is safe to reuse the buffer.
+ *
+ * Over the transition, implementations of this class should use the maybeRetain() method if
+ * they want to keep a reference to the buffer, and fall back to copying if that method returns
+ * false.
+ */
+ void onEncodedFrame(EncodedImage frame, CodecSpecificInfo info);
+ }
+
+ /**
+ * The encoder implementation backing this interface is either 1) a Java
+ * encoder (e.g., an Android platform encoder), or alternatively 2) a native
+ * encoder (e.g., a software encoder or a C++ encoder adapter).
+ *
+ * For case 1), createNativeVideoEncoder() should return zero.
+ * In this case, we expect the native library to call the encoder through
+ * JNI using the Java interface declared below.
+ *
+ * For case 2), createNativeVideoEncoder() should return a non-zero value.
+ * In this case, we expect the native library to treat the returned value as
+ * a raw pointer of type webrtc::VideoEncoder* (ownership is transferred to
+ * the caller). The native library should then directly call the
+ * webrtc::VideoEncoder interface without going through JNI. All calls to
+ * the Java interface methods declared below should thus throw an
+ * UnsupportedOperationException.
+ */
+ @CalledByNative
+ default long createNativeVideoEncoder() {
+ return 0;
+ }
+
+ /**
+ * Returns true if the encoder is backed by hardware.
+ */
+ @CalledByNative
+ default boolean isHardwareEncoder() {
+ return true;
+ }
+
+ /**
+ * Initializes the encoding process. Call before any calls to encode.
+ */
+ @CalledByNative VideoCodecStatus initEncode(Settings settings, Callback encodeCallback);
+
+ /**
+ * Releases the encoder. No more calls to encode will be made after this call.
+ */
+ @CalledByNative VideoCodecStatus release();
+
+ /**
+ * Requests the encoder to encode a frame.
+ */
+ @CalledByNative VideoCodecStatus encode(VideoFrame frame, EncodeInfo info);
+
+ /** Sets the bitrate allocation and the target framerate for the encoder. */
+ VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate);
+
+ /** Sets the bitrate allocation and the target framerate for the encoder. */
+ default @CalledByNative VideoCodecStatus setRates(RateControlParameters rcParameters) {
+ // Round frame rate up to avoid overshoots.
+ int framerateFps = (int) Math.ceil(rcParameters.framerateFps);
+ return setRateAllocation(rcParameters.bitrate, framerateFps);
+ }
+
+ /** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */
+ @CalledByNative ScalingSettings getScalingSettings();
+
+ /** Returns the list of bitrate limits. */
+ @CalledByNative
+ default ResolutionBitrateLimits[] getResolutionBitrateLimits() {
+ // TODO(ssilkin): Update downstream projects and remove default implementation.
+ ResolutionBitrateLimits bitrate_limits[] = {};
+ return bitrate_limits;
+ }
+
+ /**
+ * Should return a descriptive name for the implementation. Gets called once and cached. May be
+ * called from arbitrary thread.
+ */
+ @CalledByNative String getImplementationName();
+
+ @CalledByNative
+ default EncoderInfo getEncoderInfo() {
+ return new EncoderInfo(
+ /* requestedResolutionAlignment= */ 1, /* applyAlignmentToAllSimulcastLayers= */ false);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java
new file mode 100644
index 0000000000..2a46662d14
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/** Factory for creating VideoEncoders. */
+public interface VideoEncoderFactory {
+ public interface VideoEncoderSelector {
+ /** Called with the VideoCodecInfo of the currently used encoder. */
+ @CalledByNative("VideoEncoderSelector") void onCurrentEncoder(VideoCodecInfo info);
+
+ /**
+ * Called with the current available bitrate. Returns null if the encoder selector prefers to
+ * keep the current encoder or a VideoCodecInfo if a new encoder is preferred.
+ */
+ @Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onAvailableBitrate(int kbps);
+
+ /**
+ * Called every time the encoder input resolution change. Returns null if the encoder selector
+ * prefers to keep the current encoder or a VideoCodecInfo if a new encoder is preferred.
+ */
+ @Nullable
+ @CalledByNative("VideoEncoderSelector")
+ default VideoCodecInfo onResolutionChange(int widht, int height) {
+ return null;
+ }
+
+ /**
+ * Called when the currently used encoder signal itself as broken. Returns null if the encoder
+ * selector prefers to keep the current encoder or a VideoCodecInfo if a new encoder is
+ * preferred.
+ */
+ @Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onEncoderBroken();
+ }
+
+ /** Creates an encoder for the given video codec. */
+ @Nullable @CalledByNative VideoEncoder createEncoder(VideoCodecInfo info);
+
+ /**
+ * Enumerates the list of supported video codecs. This method will only be called once and the
+ * result will be cached.
+ */
+ @CalledByNative VideoCodecInfo[] getSupportedCodecs();
+
+ /**
+ * Enumerates the list of supported video codecs that can also be tagged with
+ * implementation information. This method will only be called once and the
+ * result will be cached.
+ */
+ @CalledByNative
+ default VideoCodecInfo[] getImplementations() {
+ return getSupportedCodecs();
+ }
+
+ /**
+ * Returns a VideoEncoderSelector if implemented by the VideoEncoderFactory,
+ * null otherwise.
+ */
+ @CalledByNative
+ default VideoEncoderSelector getEncoderSelector() {
+ return null;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java
new file mode 100644
index 0000000000..fa36b7c989
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * A combined video encoder that falls back on a secondary encoder if the primary encoder fails.
+ */
+public class VideoEncoderFallback extends WrappedNativeVideoEncoder {
+ private final VideoEncoder fallback;
+ private final VideoEncoder primary;
+
+ public VideoEncoderFallback(VideoEncoder fallback, VideoEncoder primary) {
+ this.fallback = fallback;
+ this.primary = primary;
+ }
+
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder(fallback, primary);
+ }
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return primary.isHardwareEncoder();
+ }
+
+ private static native long nativeCreateEncoder(VideoEncoder fallback, VideoEncoder primary);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java
new file mode 100644
index 0000000000..aef8030459
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.Handler;
+import android.os.HandlerThread;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Can be used to save the video frames to file.
+ */
+public class VideoFileRenderer implements VideoSink {
+ private static final String TAG = "VideoFileRenderer";
+
+ private final HandlerThread renderThread;
+ private final Handler renderThreadHandler;
+ private final HandlerThread fileThread;
+ private final Handler fileThreadHandler;
+ private final FileOutputStream videoOutFile;
+ private final String outputFileName;
+ private final int outputFileWidth;
+ private final int outputFileHeight;
+ private final int outputFrameSize;
+ private final ByteBuffer outputFrameBuffer;
+ private EglBase eglBase;
+ private YuvConverter yuvConverter;
+ private int frameCount;
+
+ public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
+ final EglBase.Context sharedContext) throws IOException {
+ if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
+ throw new IllegalArgumentException("Does not support uneven width or height");
+ }
+
+ this.outputFileName = outputFile;
+ this.outputFileWidth = outputFileWidth;
+ this.outputFileHeight = outputFileHeight;
+
+ outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
+ outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
+
+ videoOutFile = new FileOutputStream(outputFile);
+ videoOutFile.write(
+ ("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
+ .getBytes(Charset.forName("US-ASCII")));
+
+ renderThread = new HandlerThread(TAG + "RenderThread");
+ renderThread.start();
+ renderThreadHandler = new Handler(renderThread.getLooper());
+
+ fileThread = new HandlerThread(TAG + "FileThread");
+ fileThread.start();
+ fileThreadHandler = new Handler(fileThread.getLooper());
+
+ ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() {
+ @Override
+ public void run() {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+ yuvConverter = new YuvConverter();
+ }
+ });
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ frame.retain();
+ renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
+ }
+
+ private void renderFrameOnRenderThread(VideoFrame frame) {
+ final VideoFrame.Buffer buffer = frame.getBuffer();
+
+ // If the frame is rotated, it will be applied after cropAndScale. Therefore, if the frame is
+ // rotated by 90 degrees, swap width and height.
+ final int targetWidth = frame.getRotation() % 180 == 0 ? outputFileWidth : outputFileHeight;
+ final int targetHeight = frame.getRotation() % 180 == 0 ? outputFileHeight : outputFileWidth;
+
+ final float frameAspectRatio = (float) buffer.getWidth() / (float) buffer.getHeight();
+ final float fileAspectRatio = (float) targetWidth / (float) targetHeight;
+
+ // Calculate cropping to equalize the aspect ratio.
+ int cropWidth = buffer.getWidth();
+ int cropHeight = buffer.getHeight();
+ if (fileAspectRatio > frameAspectRatio) {
+ cropHeight = (int) (cropHeight * (frameAspectRatio / fileAspectRatio));
+ } else {
+ cropWidth = (int) (cropWidth * (fileAspectRatio / frameAspectRatio));
+ }
+
+ final int cropX = (buffer.getWidth() - cropWidth) / 2;
+ final int cropY = (buffer.getHeight() - cropHeight) / 2;
+
+ final VideoFrame.Buffer scaledBuffer =
+ buffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, targetWidth, targetHeight);
+ frame.release();
+
+ final VideoFrame.I420Buffer i420 = scaledBuffer.toI420();
+ scaledBuffer.release();
+
+ fileThreadHandler.post(() -> {
+ YuvHelper.I420Rotate(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), outputFrameBuffer, i420.getWidth(), i420.getHeight(),
+ frame.getRotation());
+ i420.release();
+
+ try {
+ videoOutFile.write("FRAME\n".getBytes(Charset.forName("US-ASCII")));
+ videoOutFile.write(
+ outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
+ } catch (IOException e) {
+ throw new RuntimeException("Error writing video to disk", e);
+ }
+ frameCount++;
+ });
+ }
+
+ /**
+ * Release all resources. All already posted frames will be rendered first.
+ */
+ public void release() {
+ final CountDownLatch cleanupBarrier = new CountDownLatch(1);
+ renderThreadHandler.post(() -> {
+ yuvConverter.release();
+ eglBase.release();
+ renderThread.quit();
+ cleanupBarrier.countDown();
+ });
+ ThreadUtils.awaitUninterruptibly(cleanupBarrier);
+ fileThreadHandler.post(() -> {
+ try {
+ videoOutFile.close();
+ Logging.d(TAG,
+ "Video written to disk as " + outputFileName + ". The number of frames is " + frameCount
+ + " and the dimensions of the frames are " + outputFileWidth + "x"
+ + outputFileHeight + ".");
+ } catch (IOException e) {
+ throw new RuntimeException("Error closing output file", e);
+ }
+ fileThread.quit();
+ });
+ try {
+ fileThread.join();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ Logging.e(TAG, "Interrupted while waiting for the write to disk to complete.", e);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrame.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrame.java
new file mode 100644
index 0000000000..e9f3b52455
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrame.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Matrix;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+/**
+ * Java version of webrtc::VideoFrame and webrtc::VideoFrameBuffer. A difference from the C++
+ * version is that no explicit tag is used, and clients are expected to use 'instanceof' to find the
+ * right subclass of the buffer. This allows clients to create custom VideoFrame.Buffer in
+ * arbitrary format in their custom VideoSources, and then cast it back to the correct subclass in
+ * their custom VideoSinks. All implementations must also implement the toI420() function,
+ * converting from the underlying representation if necessary. I420 is the most widely accepted
+ * format and serves as a fallback for video sinks that can only handle I420, e.g. the internal
+ * WebRTC software encoders.
+ */
+public class VideoFrame implements RefCounted {
+ /**
+ * Implements image storage medium. Might be for example an OpenGL texture or a memory region
+ * containing I420-data.
+ *
+ * <p>Reference counting is needed since a video buffer can be shared between multiple VideoSinks,
+ * and the buffer needs to be returned to the VideoSource as soon as all references are gone.
+ */
+ public interface Buffer extends RefCounted {
+ /**
+ * Representation of the underlying buffer. Currently, only NATIVE and I420 are supported.
+ */
+ @CalledByNative("Buffer")
+ @VideoFrameBufferType
+ default int getBufferType() {
+ return VideoFrameBufferType.NATIVE;
+ }
+
+ /**
+ * Resolution of the buffer in pixels.
+ */
+ @CalledByNative("Buffer") int getWidth();
+ @CalledByNative("Buffer") int getHeight();
+
+ /**
+ * Returns a memory-backed frame in I420 format. If the pixel data is in another format, a
+ * conversion will take place. All implementations must provide a fallback to I420 for
+ * compatibility with e.g. the internal WebRTC software encoders.
+ *
+ * <p> Conversion may fail, for example if reading the pixel data from a texture fails. If the
+ * conversion fails, null is returned.
+ */
+ @Nullable @CalledByNative("Buffer") I420Buffer toI420();
+
+ @Override @CalledByNative("Buffer") void retain();
+ @Override @CalledByNative("Buffer") void release();
+
+ /**
+ * Crops a region defined by `cropx`, `cropY`, `cropWidth` and `cropHeight`. Scales it to size
+ * `scaleWidth` x `scaleHeight`.
+ */
+ @CalledByNative("Buffer")
+ Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
+ }
+
+ /**
+ * Interface for I420 buffers.
+ */
+ public interface I420Buffer extends Buffer {
+ @Override
+ default int getBufferType() {
+ return VideoFrameBufferType.I420;
+ }
+
+ /**
+ * Returns a direct ByteBuffer containing Y-plane data. The buffer capacity is at least
+ * getStrideY() * getHeight() bytes. The position of the returned buffer is ignored and must
+ * be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
+ * implementations must return a new ByteBuffer or slice for each call.
+ */
+ @CalledByNative("I420Buffer") ByteBuffer getDataY();
+ /**
+ * Returns a direct ByteBuffer containing U-plane data. The buffer capacity is at least
+ * getStrideU() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
+ * and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
+ * implementations must return a new ByteBuffer or slice for each call.
+ */
+ @CalledByNative("I420Buffer") ByteBuffer getDataU();
+ /**
+ * Returns a direct ByteBuffer containing V-plane data. The buffer capacity is at least
+ * getStrideV() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
+ * and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
+ * implementations must return a new ByteBuffer or slice for each call.
+ */
+ @CalledByNative("I420Buffer") ByteBuffer getDataV();
+
+ @CalledByNative("I420Buffer") int getStrideY();
+ @CalledByNative("I420Buffer") int getStrideU();
+ @CalledByNative("I420Buffer") int getStrideV();
+ }
+
+ /**
+ * Interface for buffers that are stored as a single texture, either in OES or RGB format.
+ */
+ public interface TextureBuffer extends Buffer {
+ enum Type {
+ OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
+ RGB(GLES20.GL_TEXTURE_2D);
+
+ private final int glTarget;
+
+ private Type(final int glTarget) {
+ this.glTarget = glTarget;
+ }
+
+ public int getGlTarget() {
+ return glTarget;
+ }
+ }
+
+ Type getType();
+ int getTextureId();
+
+ /**
+ * Retrieve the transform matrix associated with the frame. This transform matrix maps 2D
+ * homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to
+ * the coordinate that should be used to sample that location from the buffer.
+ */
+ Matrix getTransformMatrix();
+ }
+
+ private final Buffer buffer;
+ private final int rotation;
+ private final long timestampNs;
+
+ /**
+ * Constructs a new VideoFrame backed by the given {@code buffer}.
+ *
+ * @note Ownership of the buffer object is tranferred to the new VideoFrame.
+ */
+ @CalledByNative
+ public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
+ if (buffer == null) {
+ throw new IllegalArgumentException("buffer not allowed to be null");
+ }
+ if (rotation % 90 != 0) {
+ throw new IllegalArgumentException("rotation must be a multiple of 90");
+ }
+ this.buffer = buffer;
+ this.rotation = rotation;
+ this.timestampNs = timestampNs;
+ }
+
+ @CalledByNative
+ public Buffer getBuffer() {
+ return buffer;
+ }
+
+ /**
+ * Rotation of the frame in degrees.
+ */
+ @CalledByNative
+ public int getRotation() {
+ return rotation;
+ }
+
+ /**
+ * Timestamp of the frame in nano seconds.
+ */
+ @CalledByNative
+ public long getTimestampNs() {
+ return timestampNs;
+ }
+
+ public int getRotatedWidth() {
+ if (rotation % 180 == 0) {
+ return buffer.getWidth();
+ }
+ return buffer.getHeight();
+ }
+
+ public int getRotatedHeight() {
+ if (rotation % 180 == 0) {
+ return buffer.getHeight();
+ }
+ return buffer.getWidth();
+ }
+
+ @Override
+ public void retain() {
+ buffer.retain();
+ }
+
+ @Override
+ @CalledByNative
+ public void release() {
+ buffer.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameBufferType.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameBufferType.java
new file mode 100644
index 0000000000..7b05b88cba
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameBufferType.java
@@ -0,0 +1,33 @@
+
+// Copyright 2022 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// java_cpp_enum.py
+// From
+// ../../api/video/video_frame_buffer.h
+
+package org.webrtc;
+
+import androidx.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+ VideoFrameBufferType.NATIVE, VideoFrameBufferType.I420, VideoFrameBufferType.I420A,
+ VideoFrameBufferType.I422, VideoFrameBufferType.I444, VideoFrameBufferType.I010,
+ VideoFrameBufferType.I210, VideoFrameBufferType.NV12
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface VideoFrameBufferType {
+ int NATIVE = 0;
+ int I420 = 1;
+ int I420A = 2;
+ int I422 = 3;
+ int I444 = 4;
+ int I010 = 5;
+ int I210 = 6;
+ int NV12 = 7;
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java
new file mode 100644
index 0000000000..af32587886
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Matrix;
+import android.graphics.Point;
+import android.opengl.GLES20;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+/**
+ * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
+ * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
+ * taken into account. You can supply an additional render matrix for custom transformations.
+ */
+public class VideoFrameDrawer {
+ public static final String TAG = "VideoFrameDrawer";
+ /**
+ * Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
+ * depending on the type of the buffer. You can supply an additional render matrix. This is
+ * used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
+ * transformationMatrix)
+ */
+ public static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
+ Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
+ int viewportWidth, int viewportHeight) {
+ Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
+ finalMatrix.preConcat(renderMatrix);
+ float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
+ switch (buffer.getType()) {
+ case OES:
+ drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
+ viewportY, viewportWidth, viewportHeight);
+ break;
+ case RGB:
+ drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
+ viewportY, viewportWidth, viewportHeight);
+ break;
+ default:
+ throw new RuntimeException("Unknown texture type.");
+ }
+ }
+
+ /**
+ * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
+ * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
+ */
+ private static class YuvUploader {
+ // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+ // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
+ // that handles stride and compare performance with intermediate copy.
+ @Nullable private ByteBuffer copyBuffer;
+ @Nullable private int[] yuvTextures;
+
+ /**
+ * Upload `planes` into OpenGL textures, taking stride into consideration.
+ *
+ * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
+ */
+ @Nullable
+ public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
+ final int[] planeWidths = new int[] {width, width / 2, width / 2};
+ final int[] planeHeights = new int[] {height, height / 2, height / 2};
+ // Make a first pass to see if we need a temporary copy buffer.
+ int copyCapacityNeeded = 0;
+ for (int i = 0; i < 3; ++i) {
+ if (strides[i] > planeWidths[i]) {
+ copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
+ }
+ }
+ // Allocate copy buffer if necessary.
+ if (copyCapacityNeeded > 0
+ && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+ copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+ }
+ // Make sure YUV textures are allocated.
+ if (yuvTextures == null) {
+ yuvTextures = new int[3];
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ }
+ // Upload each plane.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ // GLES only accepts packed data, i.e. stride == planeWidth.
+ final ByteBuffer packedByteBuffer;
+ if (strides[i] == planeWidths[i]) {
+ // Input is packed already.
+ packedByteBuffer = planes[i];
+ } else {
+ YuvHelper.copyPlane(
+ planes[i], strides[i], copyBuffer, planeWidths[i], planeWidths[i], planeHeights[i]);
+ packedByteBuffer = copyBuffer;
+ }
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
+ planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ }
+ return yuvTextures;
+ }
+
+ @Nullable
+ public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
+ int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
+ ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
+ return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
+ }
+
+ @Nullable
+ public int[] getYuvTextures() {
+ return yuvTextures;
+ }
+
+ /**
+ * Releases cached resources. Uploader can still be used and the resources will be reallocated
+ * on first use.
+ */
+ public void release() {
+ copyBuffer = null;
+ if (yuvTextures != null) {
+ GLES20.glDeleteTextures(3, yuvTextures, 0);
+ yuvTextures = null;
+ }
+ }
+ }
+
+ private static int distance(float x0, float y0, float x1, float y1) {
+ return (int) Math.round(Math.hypot(x1 - x0, y1 - y0));
+ }
+
+ // These points are used to calculate the size of the part of the frame we are rendering.
+ final static float[] srcPoints =
+ new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */};
+ private final float[] dstPoints = new float[6];
+ private final Point renderSize = new Point();
+ private int renderWidth;
+ private int renderHeight;
+
+ // Calculate the frame size after `renderMatrix` is applied. Stores the output in member variables
+ // `renderWidth` and `renderHeight` to avoid allocations since this function is called for every
+ // frame.
+ private void calculateTransformedRenderSize(
+ int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) {
+ if (renderMatrix == null) {
+ renderWidth = frameWidth;
+ renderHeight = frameHeight;
+ return;
+ }
+ // Transform the texture coordinates (in the range [0, 1]) according to `renderMatrix`.
+ renderMatrix.mapPoints(dstPoints, srcPoints);
+
+ // Multiply with the width and height to get the positions in terms of pixels.
+ for (int i = 0; i < 3; ++i) {
+ dstPoints[i * 2 + 0] *= frameWidth;
+ dstPoints[i * 2 + 1] *= frameHeight;
+ }
+
+ // Get the length of the sides of the transformed rectangle in terms of pixels.
+ renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]);
+ renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]);
+ }
+
+ private final YuvUploader yuvUploader = new YuvUploader();
+ // This variable will only be used for checking reference equality and is used for caching I420
+ // textures.
+ @Nullable private VideoFrame lastI420Frame;
+ private final Matrix renderMatrix = new Matrix();
+
+ public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
+ drawFrame(frame, drawer, null /* additionalRenderMatrix */);
+ }
+
+ public void drawFrame(
+ VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
+ drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
+ frame.getRotatedWidth(), frame.getRotatedHeight());
+ }
+
+ public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
+ @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
+ int viewportHeight) {
+ final int width = frame.getRotatedWidth();
+ final int height = frame.getRotatedHeight();
+ calculateTransformedRenderSize(width, height, additionalRenderMatrix);
+ if (renderWidth <= 0 || renderHeight <= 0) {
+ Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);
+ return;
+ }
+
+ final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
+ renderMatrix.reset();
+ renderMatrix.preTranslate(0.5f, 0.5f);
+ if (!isTextureFrame) {
+ renderMatrix.preScale(1f, -1f); // I420-frames are upside down
+ }
+ renderMatrix.preRotate(frame.getRotation());
+ renderMatrix.preTranslate(-0.5f, -0.5f);
+ if (additionalRenderMatrix != null) {
+ renderMatrix.preConcat(additionalRenderMatrix);
+ }
+
+ if (isTextureFrame) {
+ lastI420Frame = null;
+ drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth,
+ renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
+ } else {
+ // Only upload the I420 data to textures once per frame, if we are called multiple times
+ // with the same frame.
+ if (frame != lastI420Frame) {
+ lastI420Frame = frame;
+ final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420();
+ yuvUploader.uploadFromBuffer(i420Buffer);
+ i420Buffer.release();
+ }
+
+ drawer.drawYuv(yuvUploader.getYuvTextures(),
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth,
+ renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
+ }
+ }
+
+ public VideoFrame.Buffer prepareBufferForViewportSize(
+ VideoFrame.Buffer buffer, int width, int height) {
+ buffer.retain();
+ return buffer;
+ }
+
+ public void release() {
+ yuvUploader.release();
+ lastI420Frame = null;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoProcessor.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoProcessor.java
new file mode 100644
index 0000000000..c39a55c27e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoProcessor.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/**
+ * Lightweight abstraction for an object that can receive video frames, process them, and pass them
+ * on to another object. This object is also allowed to observe capturer start/stop.
+ */
+public interface VideoProcessor extends CapturerObserver {
+ public static class FrameAdaptationParameters {
+ public final int cropX;
+ public final int cropY;
+ public final int cropWidth;
+ public final int cropHeight;
+ public final int scaleWidth;
+ public final int scaleHeight;
+ public final long timestampNs;
+ public final boolean drop;
+
+ public FrameAdaptationParameters(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, long timestampNs, boolean drop) {
+ this.cropX = cropX;
+ this.cropY = cropY;
+ this.cropWidth = cropWidth;
+ this.cropHeight = cropHeight;
+ this.scaleWidth = scaleWidth;
+ this.scaleHeight = scaleHeight;
+ this.timestampNs = timestampNs;
+ this.drop = drop;
+ }
+ }
+
+ /**
+ * This is a chance to access an unadapted frame. The default implementation applies the
+ * adaptation and forwards the frame to {@link #onFrameCaptured(VideoFrame)}.
+ */
+ default void onFrameCaptured(VideoFrame frame, FrameAdaptationParameters parameters) {
+ VideoFrame adaptedFrame = applyFrameAdaptationParameters(frame, parameters);
+ if (adaptedFrame != null) {
+ onFrameCaptured(adaptedFrame);
+ adaptedFrame.release();
+ }
+ }
+
+ /**
+ * Set the sink that receives the output from this processor. Null can be passed in to unregister
+ * a sink.
+ */
+ void setSink(@Nullable VideoSink sink);
+
+ /**
+ * Applies the frame adaptation parameters to a frame. Returns null if the frame is meant to be
+ * dropped. Returns a new frame. The caller is responsible for releasing the returned frame.
+ */
+ public static @Nullable VideoFrame applyFrameAdaptationParameters(
+ VideoFrame frame, FrameAdaptationParameters parameters) {
+ if (parameters.drop) {
+ return null;
+ }
+
+ final VideoFrame.Buffer adaptedBuffer =
+ frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth,
+ parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight);
+ return new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSink.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSink.java
new file mode 100644
index 0000000000..5a0a6c719c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSink.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Java version of rtc::VideoSinkInterface.
+ */
+public interface VideoSink {
+ /**
+ * Implementations should call frame.retain() if they need to hold a reference to the frame after
+ * this function returns. Each call to retain() should be followed by a call to frame.release()
+ * when the reference is no longer needed.
+ */
+ @CalledByNative void onFrame(VideoFrame frame);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSource.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSource.java
new file mode 100644
index 0000000000..2e22d1a2db
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSource.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/**
+ * Java wrapper of native AndroidVideoTrackSource.
+ */
+public class VideoSource extends MediaSource {
+ /** Simple aspect ratio clas for use in constraining output format. */
+ public static class AspectRatio {
+ public static final AspectRatio UNDEFINED = new AspectRatio(/* width= */ 0, /* height= */ 0);
+
+ public final int width;
+ public final int height;
+
+ public AspectRatio(int width, int height) {
+ this.width = width;
+ this.height = height;
+ }
+ }
+
+ private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
+ private final Object videoProcessorLock = new Object();
+ @Nullable private VideoProcessor videoProcessor;
+ private boolean isCapturerRunning;
+
+ private final CapturerObserver capturerObserver = new CapturerObserver() {
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeAndroidVideoTrackSource.setState(success);
+ synchronized (videoProcessorLock) {
+ isCapturerRunning = success;
+ if (videoProcessor != null) {
+ videoProcessor.onCapturerStarted(success);
+ }
+ }
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
+ synchronized (videoProcessorLock) {
+ isCapturerRunning = false;
+ if (videoProcessor != null) {
+ videoProcessor.onCapturerStopped();
+ }
+ }
+ }
+
+ @Override
+ public void onFrameCaptured(VideoFrame frame) {
+ final VideoProcessor.FrameAdaptationParameters parameters =
+ nativeAndroidVideoTrackSource.adaptFrame(frame);
+ synchronized (videoProcessorLock) {
+ if (videoProcessor != null) {
+ videoProcessor.onFrameCaptured(frame, parameters);
+ return;
+ }
+ }
+
+ VideoFrame adaptedFrame = VideoProcessor.applyFrameAdaptationParameters(frame, parameters);
+ if (adaptedFrame != null) {
+ nativeAndroidVideoTrackSource.onFrameCaptured(adaptedFrame);
+ adaptedFrame.release();
+ }
+ }
+ };
+
+ public VideoSource(long nativeSource) {
+ super(nativeSource);
+ this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
+ }
+
+ /**
+ * Calling this function will cause frames to be scaled down to the requested resolution. Also,
+ * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
+ * the requested fps. The requested aspect ratio is orientation agnostic and will be adjusted to
+ * maintain the input orientation, so it doesn't matter if e.g. 1280x720 or 720x1280 is requested.
+ */
+ public void adaptOutputFormat(int width, int height, int fps) {
+ final int maxSide = Math.max(width, height);
+ final int minSide = Math.min(width, height);
+ adaptOutputFormat(maxSide, minSide, minSide, maxSide, fps);
+ }
+
+ /**
+ * Same as above, but allows setting two different target resolutions depending on incoming
+ * frame orientation. This gives more fine-grained control and can e.g. be used to force landscape
+ * video to be cropped to portrait video.
+ */
+ public void adaptOutputFormat(
+ int landscapeWidth, int landscapeHeight, int portraitWidth, int portraitHeight, int fps) {
+ adaptOutputFormat(new AspectRatio(landscapeWidth, landscapeHeight),
+ /* maxLandscapePixelCount= */ landscapeWidth * landscapeHeight,
+ new AspectRatio(portraitWidth, portraitHeight),
+ /* maxPortraitPixelCount= */ portraitWidth * portraitHeight, fps);
+ }
+
+ /** Same as above, with even more control as each constraint is optional. */
+ public void adaptOutputFormat(AspectRatio targetLandscapeAspectRatio,
+ @Nullable Integer maxLandscapePixelCount, AspectRatio targetPortraitAspectRatio,
+ @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
+ nativeAndroidVideoTrackSource.adaptOutputFormat(targetLandscapeAspectRatio,
+ maxLandscapePixelCount, targetPortraitAspectRatio, maxPortraitPixelCount, maxFps);
+ }
+
+ public void setIsScreencast(boolean isScreencast) {
+ nativeAndroidVideoTrackSource.setIsScreencast(isScreencast);
+ }
+
+ /**
+ * Hook for injecting a custom video processor before frames are passed onto WebRTC. The frames
+ * will be cropped and scaled depending on CPU and network conditions before they are passed to
+ * the video processor. Frames will be delivered to the video processor on the same thread they
+ * are passed to this object. The video processor is allowed to deliver the processed frames
+ * back on any thread.
+ */
+ public void setVideoProcessor(@Nullable VideoProcessor newVideoProcessor) {
+ synchronized (videoProcessorLock) {
+ if (videoProcessor != null) {
+ videoProcessor.setSink(/* sink= */ null);
+ if (isCapturerRunning) {
+ videoProcessor.onCapturerStopped();
+ }
+ }
+ videoProcessor = newVideoProcessor;
+ if (newVideoProcessor != null) {
+ newVideoProcessor.setSink(
+ (frame)
+ -> runWithReference(() -> nativeAndroidVideoTrackSource.onFrameCaptured(frame)));
+ if (isCapturerRunning) {
+ newVideoProcessor.onCapturerStarted(/* success= */ true);
+ }
+ }
+ }
+ }
+
+ public CapturerObserver getCapturerObserver() {
+ return capturerObserver;
+ }
+
+ /** Returns a pointer to webrtc::VideoTrackSourceInterface. */
+ long getNativeVideoTrackSource() {
+ return getNativeMediaSource();
+ }
+
+ @Override
+ public void dispose() {
+ setVideoProcessor(/* newVideoProcessor= */ null);
+ super.dispose();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoTrack.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoTrack.java
new file mode 100644
index 0000000000..5593d424f3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoTrack.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.IdentityHashMap;
+
+/** Java version of VideoTrackInterface. */
+public class VideoTrack extends MediaStreamTrack {
+ private final IdentityHashMap<VideoSink, Long> sinks = new IdentityHashMap<VideoSink, Long>();
+
+ public VideoTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+
+ /**
+ * Adds a VideoSink to the track.
+ *
+ * A track can have any number of VideoSinks. VideoSinks will replace
+ * renderers. However, converting old style texture frames will involve costly
+ * conversion to I420 so it is not recommended to upgrade before all your
+ * sources produce VideoFrames.
+ */
+ public void addSink(VideoSink sink) {
+ if (sink == null) {
+ throw new IllegalArgumentException("The VideoSink is not allowed to be null");
+ }
+ // We allow calling addSink() with the same sink multiple times. This is similar to the C++
+ // VideoTrack::AddOrUpdateSink().
+ if (!sinks.containsKey(sink)) {
+ final long nativeSink = nativeWrapSink(sink);
+ sinks.put(sink, nativeSink);
+ nativeAddSink(getNativeMediaStreamTrack(), nativeSink);
+ }
+ }
+
+ /**
+ * Removes a VideoSink from the track.
+ *
+ * If the VideoSink was not attached to the track, this is a no-op.
+ */
+ public void removeSink(VideoSink sink) {
+ final Long nativeSink = sinks.remove(sink);
+ if (nativeSink != null) {
+ nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink);
+ nativeFreeSink(nativeSink);
+ }
+ }
+
+ @Override
+ public void dispose() {
+ for (long nativeSink : sinks.values()) {
+ nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink);
+ nativeFreeSink(nativeSink);
+ }
+ sinks.clear();
+ super.dispose();
+ }
+
+ /** Returns a pointer to webrtc::VideoTrackInterface. */
+ long getNativeVideoTrack() {
+ return getNativeMediaStreamTrack();
+ }
+
+ private static native void nativeAddSink(long track, long nativeSink);
+ private static native void nativeRemoveSink(long track, long nativeSink);
+ private static native long nativeWrapSink(VideoSink sink);
+ private static native void nativeFreeSink(long sink);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoDecoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoDecoder.java
new file mode 100644
index 0000000000..027120e48e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoDecoder.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Wraps a native webrtc::VideoDecoder.
+ */
+public abstract class WrappedNativeVideoDecoder implements VideoDecoder {
+ @Override public abstract long createNativeVideoDecoder();
+
+ @Override
+ public final VideoCodecStatus initDecode(Settings settings, Callback decodeCallback) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus release() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final String getImplementationName() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoEncoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoEncoder.java
new file mode 100644
index 0000000000..7d0908a6ac
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoEncoder.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Wraps a native webrtc::VideoEncoder.
+ */
+public abstract class WrappedNativeVideoEncoder implements VideoEncoder {
+ @Override public abstract long createNativeVideoEncoder();
+ @Override public abstract boolean isHardwareEncoder();
+
+ @Override
+ public final VideoCodecStatus initEncode(Settings settings, Callback encodeCallback) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus release() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus encode(VideoFrame frame, EncodeInfo info) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final ScalingSettings getScalingSettings() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final String getImplementationName() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvConverter.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvConverter.java
new file mode 100644
index 0000000000..c855d4be41
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvConverter.java
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Matrix;
+import android.opengl.GLES20;
+import android.opengl.GLException;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import org.webrtc.VideoFrame.I420Buffer;
+import org.webrtc.VideoFrame.TextureBuffer;
+
+/**
+ * Class for converting OES textures to a YUV ByteBuffer. It can be constructed on any thread, but
+ * should only be operated from a single thread with an active EGL context.
+ */
+public final class YuvConverter {
+ private static final String TAG = "YuvConverter";
+
+ private static final String FRAGMENT_SHADER =
+ // Difference in texture coordinate corresponding to one
+ // sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n"
+ // Color conversion coefficients, including constant term
+ + "uniform vec4 coeffs;\n"
+ + "\n"
+ + "void main() {\n"
+ // Since the alpha read from the texture is always 1, this could
+ // be written as a mat4 x vec4 multiply. However, that seems to
+ // give a worse framerate, possibly because the additional
+ // multiplies by 1.0 consume resources.
+ + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ + " sample(tc - 1.5 * xUnit).rgb);\n"
+ + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ + " sample(tc - 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ + " sample(tc + 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ + " sample(tc + 1.5 * xUnit).rgb);\n"
+ + "}\n";
+
+ private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks {
+ // Y'UV444 to RGB888, see https://en.wikipedia.org/wiki/YUV#Y%E2%80%B2UV444_to_RGB888_conversion
+ // We use the ITU-R BT.601 coefficients for Y, U and V.
+ // The values in Wikipedia are inaccurate, the accurate values derived from the spec are:
+ // Y = 0.299 * R + 0.587 * G + 0.114 * B
+ // U = -0.168736 * R - 0.331264 * G + 0.5 * B + 0.5
+ // V = 0.5 * R - 0.418688 * G - 0.0813124 * B + 0.5
+ // To map the Y-values to range [16-235] and U- and V-values to range [16-240], the matrix has
+ // been multiplied with matrix:
+ // {{219 / 255, 0, 0, 16 / 255},
+ // {0, 224 / 255, 0, 16 / 255},
+ // {0, 0, 224 / 255, 16 / 255},
+ // {0, 0, 0, 1}}
+ private static final float[] yCoeffs =
+ new float[] {0.256788f, 0.504129f, 0.0979059f, 0.0627451f};
+ private static final float[] uCoeffs =
+ new float[] {-0.148223f, -0.290993f, 0.439216f, 0.501961f};
+ private static final float[] vCoeffs =
+ new float[] {0.439216f, -0.367788f, -0.0714274f, 0.501961f};
+
+ private int xUnitLoc;
+ private int coeffsLoc;
+
+ private float[] coeffs;
+ private float stepSize;
+
+ public void setPlaneY() {
+ coeffs = yCoeffs;
+ stepSize = 1.0f;
+ }
+
+ public void setPlaneU() {
+ coeffs = uCoeffs;
+ stepSize = 2.0f;
+ }
+
+ public void setPlaneV() {
+ coeffs = vCoeffs;
+ stepSize = 2.0f;
+ }
+
+ @Override
+ public void onNewShader(GlShader shader) {
+ xUnitLoc = shader.getUniformLocation("xUnit");
+ coeffsLoc = shader.getUniformLocation("coeffs");
+ }
+
+ @Override
+ public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight) {
+ GLES20.glUniform4fv(coeffsLoc, /* count= */ 1, coeffs, /* offset= */ 0);
+ // Matrix * (1;0;0;0) / (width / stepSize). Note that OpenGL uses column major order.
+ GLES20.glUniform2f(
+ xUnitLoc, stepSize * texMatrix[0] / frameWidth, stepSize * texMatrix[1] / frameWidth);
+ }
+ }
+
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+ private final GlTextureFrameBuffer i420TextureFrameBuffer =
+ new GlTextureFrameBuffer(GLES20.GL_RGBA);
+ private final ShaderCallbacks shaderCallbacks = new ShaderCallbacks();
+ private final GlGenericDrawer drawer = new GlGenericDrawer(FRAGMENT_SHADER, shaderCallbacks);
+ private final VideoFrameDrawer videoFrameDrawer;
+
+ /**
+ * This class should be constructed on a thread that has an active EGL context.
+ */
+ public YuvConverter() {
+ this(new VideoFrameDrawer());
+ }
+
+ public YuvConverter(VideoFrameDrawer videoFrameDrawer) {
+ this.videoFrameDrawer = videoFrameDrawer;
+ threadChecker.detachThread();
+ }
+
+ /** Converts the texture buffer to I420. */
+ @Nullable
+ public I420Buffer convert(TextureBuffer inputTextureBuffer) {
+ try {
+ return convertInternal(inputTextureBuffer);
+ } catch (GLException e) {
+ Logging.w(TAG, "Failed to convert TextureBuffer", e);
+ }
+ return null;
+ }
+
+ private I420Buffer convertInternal(TextureBuffer inputTextureBuffer) {
+ TextureBuffer preparedBuffer = (TextureBuffer) videoFrameDrawer.prepareBufferForViewportSize(
+ inputTextureBuffer, inputTextureBuffer.getWidth(), inputTextureBuffer.getHeight());
+
+ // We draw into a buffer laid out like
+ //
+ // +---------+
+ // | |
+ // | Y |
+ // | |
+ // | |
+ // +----+----+
+ // | U | V |
+ // | | |
+ // +----+----+
+ //
+ // In memory, we use the same stride for all of Y, U and V. The
+ // U data starts at offset `height` * `stride` from the Y data,
+ // and the V data starts at at offset |stride/2| from the U
+ // data, with rows of U and V data alternating.
+ //
+ // Now, it would have made sense to allocate a pixel buffer with
+ // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
+ // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
+ // unsupported by devices. So do the following hack: Allocate an
+ // RGBA buffer, of width `stride`/4. To render each of these
+ // large pixels, sample the texture at 4 different x coordinates
+ // and store the results in the four components.
+ //
+ // Since the V data needs to start on a boundary of such a
+ // larger pixel, it is not sufficient that `stride` is even, it
+ // has to be a multiple of 8 pixels.
+ final int frameWidth = preparedBuffer.getWidth();
+ final int frameHeight = preparedBuffer.getHeight();
+ final int stride = ((frameWidth + 7) / 8) * 8;
+ final int uvHeight = (frameHeight + 1) / 2;
+ // Total height of the combined memory layout.
+ final int totalHeight = frameHeight + uvHeight;
+ final ByteBuffer i420ByteBuffer = JniCommon.nativeAllocateByteBuffer(stride * totalHeight);
+ // Viewport width is divided by four since we are squeezing in four color bytes in each RGBA
+ // pixel.
+ final int viewportWidth = stride / 4;
+
+ // Produce a frame buffer starting at top-left corner, not bottom-left.
+ final Matrix renderMatrix = new Matrix();
+ renderMatrix.preTranslate(0.5f, 0.5f);
+ renderMatrix.preScale(1f, -1f);
+ renderMatrix.preTranslate(-0.5f, -0.5f);
+
+ i420TextureFrameBuffer.setSize(viewportWidth, totalHeight);
+
+ // Bind our framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, i420TextureFrameBuffer.getFrameBufferId());
+ GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+ // Draw Y.
+ shaderCallbacks.setPlaneY();
+ VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
+ /* viewportX= */ 0, /* viewportY= */ 0, viewportWidth,
+ /* viewportHeight= */ frameHeight);
+
+ // Draw U.
+ shaderCallbacks.setPlaneU();
+ VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
+ /* viewportX= */ 0, /* viewportY= */ frameHeight, viewportWidth / 2,
+ /* viewportHeight= */ uvHeight);
+
+ // Draw V.
+ shaderCallbacks.setPlaneV();
+ VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
+ /* viewportX= */ viewportWidth / 2, /* viewportY= */ frameHeight, viewportWidth / 2,
+ /* viewportHeight= */ uvHeight);
+
+ GLES20.glReadPixels(0, 0, i420TextureFrameBuffer.getWidth(), i420TextureFrameBuffer.getHeight(),
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, i420ByteBuffer);
+
+ GlUtil.checkNoGLES2Error("YuvConverter.convert");
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+
+ // Prepare Y, U, and V ByteBuffer slices.
+ final int yPos = 0;
+ final int uPos = yPos + stride * frameHeight;
+ // Rows of U and V alternate in the buffer, so V data starts after the first row of U.
+ final int vPos = uPos + stride / 2;
+
+ i420ByteBuffer.position(yPos);
+ i420ByteBuffer.limit(yPos + stride * frameHeight);
+ final ByteBuffer dataY = i420ByteBuffer.slice();
+
+ i420ByteBuffer.position(uPos);
+ // The last row does not have padding.
+ final int uvSize = stride * (uvHeight - 1) + stride / 2;
+ i420ByteBuffer.limit(uPos + uvSize);
+ final ByteBuffer dataU = i420ByteBuffer.slice();
+
+ i420ByteBuffer.position(vPos);
+ i420ByteBuffer.limit(vPos + uvSize);
+ final ByteBuffer dataV = i420ByteBuffer.slice();
+
+ preparedBuffer.release();
+
+ return JavaI420Buffer.wrap(frameWidth, frameHeight, dataY, stride, dataU, stride, dataV, stride,
+ () -> { JniCommon.nativeFreeByteBuffer(i420ByteBuffer); });
+ }
+
+ public void release() {
+ threadChecker.checkIsOnValidThread();
+ drawer.release();
+ i420TextureFrameBuffer.release();
+ videoFrameDrawer.release();
+ // Allow this class to be reused.
+ threadChecker.detachThread();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvHelper.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvHelper.java
new file mode 100644
index 0000000000..afb8e837d1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvHelper.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Wraps libyuv methods to Java. All passed byte buffers must be direct byte buffers. */
+public class YuvHelper {
+ /**
+ * Copy I420 Buffer to a contiguously allocated buffer.
+ * <p> In Android, MediaCodec can request a buffer of a specific layout with the stride and
+ * slice-height (or plane height), and this function is used in this case.
+ * <p> For more information, see
+ * https://cs.android.com/android/platform/superproject/+/64fea7e5726daebc40f46890100837c01091100d:frameworks/base/media/java/android/media/MediaFormat.java;l=568
+ * @param dstStrideY the stride of output buffers' Y plane.
+ * @param dstSliceHeightY the slice-height of output buffer's Y plane.
+ * @param dstStrideU the stride of output buffers' U (and V) plane.
+ * @param dstSliceHeightU the slice-height of output buffer's U (and V) plane
+ */
+ public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStrideY,
+ int dstSliceHeightY, int dstStrideU, int dstSliceHeightU) {
+ final int chromaWidth = (dstWidth + 1) / 2;
+ final int chromaHeight = (dstHeight + 1) / 2;
+
+ final int dstStartY = 0;
+ final int dstEndY = dstStartY + dstStrideY * dstHeight;
+ final int dstStartU = dstStartY + dstStrideY * dstSliceHeightY;
+ final int dstEndU = dstStartU + dstStrideU * chromaHeight;
+ final int dstStartV = dstStartU + dstStrideU * dstSliceHeightU;
+ // The last line doesn't need any padding, so use chromaWidth
+ // to calculate the exact end position.
+ final int dstEndV = dstStartV + dstStrideU * (chromaHeight - 1) + chromaWidth;
+ if (dst.capacity() < dstEndV) {
+ throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ + dstEndV + " was " + dst.capacity());
+ }
+
+ dst.limit(dstEndY);
+ dst.position(dstStartY);
+ final ByteBuffer dstY = dst.slice();
+ dst.limit(dstEndU);
+ dst.position(dstStartU);
+ final ByteBuffer dstU = dst.slice();
+ dst.limit(dstEndV);
+ dst.position(dstStartV);
+ final ByteBuffer dstV = dst.slice();
+
+ I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
+ dstStrideU, dstV, dstStrideU, dstWidth, dstHeight);
+ }
+
+ /** Helper method for copying I420 to tightly packed destination buffer. */
+ public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight) {
+ I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight,
+ dstWidth, dstHeight, (dstWidth + 1) / 2, (dstHeight + 1) / 2);
+ }
+
+ /**
+ * Copy I420 Buffer to a contiguously allocated buffer.
+ * @param dstStrideY the stride of output buffers' Y plane.
+ * @param dstSliceHeightY the slice-height of output buffer's Y plane.
+ */
+ public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStrideY,
+ int dstSliceHeightY) {
+ final int chromaHeight = (dstHeight + 1) / 2;
+ final int chromaWidth = (dstWidth + 1) / 2;
+
+ final int dstStartY = 0;
+ final int dstEndY = dstStartY + dstStrideY * dstHeight;
+ final int dstStartUV = dstStartY + dstStrideY * dstSliceHeightY;
+ final int dstEndUV = dstStartUV + chromaWidth * chromaHeight * 2;
+ if (dst.capacity() < dstEndUV) {
+ throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ + dstEndUV + " was " + dst.capacity());
+ }
+
+ dst.limit(dstEndY);
+ dst.position(dstStartY);
+ final ByteBuffer dstY = dst.slice();
+ dst.limit(dstEndUV);
+ dst.position(dstStartUV);
+ final ByteBuffer dstUV = dst.slice();
+
+ I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV,
+ chromaWidth * 2, dstWidth, dstHeight);
+ }
+
+ /** Helper method for copying I420 to tightly packed NV12 destination buffer. */
+ public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight) {
+ I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight,
+ dstWidth, dstHeight);
+ }
+
+ /** Helper method for rotating I420 to tightly packed destination buffer. */
+ public static void I420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int srcWidth, int srcHeight,
+ int rotationMode) {
+ final int dstWidth = rotationMode % 180 == 0 ? srcWidth : srcHeight;
+ final int dstHeight = rotationMode % 180 == 0 ? srcHeight : srcWidth;
+
+ final int dstChromaHeight = (dstHeight + 1) / 2;
+ final int dstChromaWidth = (dstWidth + 1) / 2;
+
+ final int minSize = dstWidth * dstHeight + dstChromaWidth * dstChromaHeight * 2;
+ if (dst.capacity() < minSize) {
+ throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ + minSize + " was " + dst.capacity());
+ }
+
+ final int startY = 0;
+ final int startU = dstHeight * dstWidth;
+ final int startV = startU + dstChromaHeight * dstChromaWidth;
+
+ dst.position(startY);
+ final ByteBuffer dstY = dst.slice();
+ dst.position(startU);
+ final ByteBuffer dstU = dst.slice();
+ dst.position(startV);
+ final ByteBuffer dstV = dst.slice();
+
+ nativeI420Rotate(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstWidth, dstU,
+ dstChromaWidth, dstV, dstChromaWidth, srcWidth, srcHeight, rotationMode);
+ }
+
+ /** Helper method for copying a single colour plane. */
+ public static void copyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
+ nativeCopyPlane(src, srcStride, dst, dstStride, width, height);
+ }
+
+ /** Converts ABGR little endian (rgba in memory) to I420. */
+ public static void ABGRToI420(ByteBuffer src, int srcStride, ByteBuffer dstY, int dstStrideY,
+ ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) {
+ nativeABGRToI420(
+ src, srcStride, dstY, dstStrideY, dstU, dstStrideU, dstV, dstStrideV, width, height);
+ }
+
+ /**
+ * Copies I420 to the I420 dst buffer.
+ * <p> Unlike `libyuv::I420Copy`, this function checks if the height <= 0, so flipping is not
+ * supported.
+ */
+ public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
+ int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) {
+ if (srcY == null || srcU == null || srcV == null || dstY == null || dstU == null || dstV == null
+ || width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Invalid I420Copy input arguments");
+ }
+ nativeI420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
+ dstStrideU, dstV, dstStrideV, width, height);
+ }
+
+ public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstUV,
+ int dstStrideUV, int width, int height) {
+ if (srcY == null || srcU == null || srcV == null || dstY == null || dstUV == null || width <= 0
+ || height <= 0) {
+ throw new IllegalArgumentException("Invalid I420ToNV12 input arguments");
+ }
+ nativeI420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV,
+ dstStrideUV, width, height);
+ }
+
+ public static void I420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
+ int dstStrideU, ByteBuffer dstV, int dstStrideV, int srcWidth, int srcHeight,
+ int rotationMode) {
+ nativeI420Rotate(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
+ dstStrideU, dstV, dstStrideV, srcWidth, srcHeight, rotationMode);
+ }
+
+ private static native void nativeCopyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height);
+ private static native void nativeI420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
+ int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
+ ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height);
+ private static native void nativeI420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
+ int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
+ ByteBuffer dstUV, int dstStrideUV, int width, int height);
+ private static native void nativeI420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
+ int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
+ ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int srcWidth, int srcHeight,
+ int rotationMode);
+ private static native void nativeABGRToI420(ByteBuffer src, int srcStride, ByteBuffer dstY,
+ int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width,
+ int height);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java
new file mode 100644
index 0000000000..502c68cc9a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+/**
+ * This interface is a thin wrapper on top of a native C++ webrtc::AudioDeviceModule (ADM). The
+ * reason for basing it on a native ADM instead of a pure Java interface is that we have two native
+ * Android implementations (OpenSLES and AAudio) that does not make sense to wrap through JNI.
+ *
+ * <p>Note: This class is still under development and may change without notice.
+ */
+public interface AudioDeviceModule {
+ /**
+ * Returns a C++ pointer to a webrtc::AudioDeviceModule. Caller does _not_ take ownership and
+ * lifetime is handled through the release() call.
+ */
+ long getNativeAudioDeviceModulePointer();
+
+ /**
+ * Release resources for this AudioDeviceModule, including native resources. The object should not
+ * be used after this call.
+ */
+ void release();
+
+ /** Control muting/unmuting the speaker. */
+ void setSpeakerMute(boolean mute);
+
+ /** Control muting/unmuting the microphone. */
+ void setMicrophoneMute(boolean mute);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java
new file mode 100644
index 0000000000..d3d57602a8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java
@@ -0,0 +1,436 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.content.Context;
+import android.media.AudioAttributes;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.os.Build;
+import androidx.annotation.RequiresApi;
+import java.util.concurrent.ScheduledExecutorService;
+import org.webrtc.JniCommon;
+import org.webrtc.Logging;
+
+/**
+ * AudioDeviceModule implemented using android.media.AudioRecord as input and
+ * android.media.AudioTrack as output.
+ */
+public class JavaAudioDeviceModule implements AudioDeviceModule {
+ private static final String TAG = "JavaAudioDeviceModule";
+
+ public static Builder builder(Context context) {
+ return new Builder(context);
+ }
+
+ public static class Builder {
+ private final Context context;
+ private ScheduledExecutorService scheduler;
+ private final AudioManager audioManager;
+ private int inputSampleRate;
+ private int outputSampleRate;
+ private int audioSource = WebRtcAudioRecord.DEFAULT_AUDIO_SOURCE;
+ private int audioFormat = WebRtcAudioRecord.DEFAULT_AUDIO_FORMAT;
+ private AudioTrackErrorCallback audioTrackErrorCallback;
+ private AudioRecordErrorCallback audioRecordErrorCallback;
+ private SamplesReadyCallback samplesReadyCallback;
+ private AudioTrackStateCallback audioTrackStateCallback;
+ private AudioRecordStateCallback audioRecordStateCallback;
+ private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported();
+ private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported();
+ private boolean useStereoInput;
+ private boolean useStereoOutput;
+ private AudioAttributes audioAttributes;
+ private boolean useLowLatency;
+ private boolean enableVolumeLogger;
+
+ private Builder(Context context) {
+ this.context = context;
+ this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ this.inputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
+ this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
+ this.useLowLatency = false;
+ this.enableVolumeLogger = true;
+ }
+
+ public Builder setScheduler(ScheduledExecutorService scheduler) {
+ this.scheduler = scheduler;
+ return this;
+ }
+
+ /**
+ * Call this method if the default handling of querying the native sample rate shall be
+ * overridden. Can be useful on some devices where the available Android APIs are known to
+ * return invalid results.
+ */
+ public Builder setSampleRate(int sampleRate) {
+ Logging.d(TAG, "Input/Output sample rate overridden to: " + sampleRate);
+ this.inputSampleRate = sampleRate;
+ this.outputSampleRate = sampleRate;
+ return this;
+ }
+
+ /**
+ * Call this method to specifically override input sample rate.
+ */
+ public Builder setInputSampleRate(int inputSampleRate) {
+ Logging.d(TAG, "Input sample rate overridden to: " + inputSampleRate);
+ this.inputSampleRate = inputSampleRate;
+ return this;
+ }
+
+ /**
+ * Call this method to specifically override output sample rate.
+ */
+ public Builder setOutputSampleRate(int outputSampleRate) {
+ Logging.d(TAG, "Output sample rate overridden to: " + outputSampleRate);
+ this.outputSampleRate = outputSampleRate;
+ return this;
+ }
+
+ /**
+ * Call this to change the audio source. The argument should be one of the values from
+ * android.media.MediaRecorder.AudioSource. The default is AudioSource.VOICE_COMMUNICATION.
+ */
+ public Builder setAudioSource(int audioSource) {
+ this.audioSource = audioSource;
+ return this;
+ }
+
+ /**
+ * Call this to change the audio format. The argument should be one of the values from
+ * android.media.AudioFormat ENCODING_PCM_8BIT, ENCODING_PCM_16BIT or ENCODING_PCM_FLOAT.
+ * Default audio data format is PCM 16 bit per sample.
+ * Guaranteed to be supported by all devices.
+ */
+ public Builder setAudioFormat(int audioFormat) {
+ this.audioFormat = audioFormat;
+ return this;
+ }
+
+ /**
+ * Set a callback to retrieve errors from the AudioTrack.
+ */
+ public Builder setAudioTrackErrorCallback(AudioTrackErrorCallback audioTrackErrorCallback) {
+ this.audioTrackErrorCallback = audioTrackErrorCallback;
+ return this;
+ }
+
+ /**
+ * Set a callback to retrieve errors from the AudioRecord.
+ */
+ public Builder setAudioRecordErrorCallback(AudioRecordErrorCallback audioRecordErrorCallback) {
+ this.audioRecordErrorCallback = audioRecordErrorCallback;
+ return this;
+ }
+
+ /**
+ * Set a callback to listen to the raw audio input from the AudioRecord.
+ */
+ public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback) {
+ this.samplesReadyCallback = samplesReadyCallback;
+ return this;
+ }
+
+ /**
+ * Set a callback to retrieve information from the AudioTrack on when audio starts and stop.
+ */
+ public Builder setAudioTrackStateCallback(AudioTrackStateCallback audioTrackStateCallback) {
+ this.audioTrackStateCallback = audioTrackStateCallback;
+ return this;
+ }
+
+ /**
+ * Set a callback to retrieve information from the AudioRecord on when audio starts and stops.
+ */
+ public Builder setAudioRecordStateCallback(AudioRecordStateCallback audioRecordStateCallback) {
+ this.audioRecordStateCallback = audioRecordStateCallback;
+ return this;
+ }
+
+ /**
+ * Control if the built-in HW noise suppressor should be used or not. The default is on if it is
+ * supported. It is possible to query support by calling isBuiltInNoiseSuppressorSupported().
+ */
+ public Builder setUseHardwareNoiseSuppressor(boolean useHardwareNoiseSuppressor) {
+ if (useHardwareNoiseSuppressor && !isBuiltInNoiseSuppressorSupported()) {
+ Logging.e(TAG, "HW NS not supported");
+ useHardwareNoiseSuppressor = false;
+ }
+ this.useHardwareNoiseSuppressor = useHardwareNoiseSuppressor;
+ return this;
+ }
+
+ /**
+ * Control if the built-in HW acoustic echo canceler should be used or not. The default is on if
+ * it is supported. It is possible to query support by calling
+ * isBuiltInAcousticEchoCancelerSupported().
+ */
+ public Builder setUseHardwareAcousticEchoCanceler(boolean useHardwareAcousticEchoCanceler) {
+ if (useHardwareAcousticEchoCanceler && !isBuiltInAcousticEchoCancelerSupported()) {
+ Logging.e(TAG, "HW AEC not supported");
+ useHardwareAcousticEchoCanceler = false;
+ }
+ this.useHardwareAcousticEchoCanceler = useHardwareAcousticEchoCanceler;
+ return this;
+ }
+
+ /**
+ * Control if stereo input should be used or not. The default is mono.
+ */
+ public Builder setUseStereoInput(boolean useStereoInput) {
+ this.useStereoInput = useStereoInput;
+ return this;
+ }
+
+ /**
+ * Control if stereo output should be used or not. The default is mono.
+ */
+ public Builder setUseStereoOutput(boolean useStereoOutput) {
+ this.useStereoOutput = useStereoOutput;
+ return this;
+ }
+
+ /**
+ * Control if the low-latency mode should be used. The default is disabled.
+ */
+ public Builder setUseLowLatency(boolean useLowLatency) {
+ this.useLowLatency = useLowLatency;
+ return this;
+ }
+
+ /**
+ * Set custom {@link AudioAttributes} to use.
+ */
+ public Builder setAudioAttributes(AudioAttributes audioAttributes) {
+ this.audioAttributes = audioAttributes;
+ return this;
+ }
+
+ /** Disables the volume logger on the audio output track. */
+ public Builder setEnableVolumeLogger(boolean enableVolumeLogger) {
+ this.enableVolumeLogger = enableVolumeLogger;
+ return this;
+ }
+
+ /**
+ * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership
+ * and is responsible for calling release().
+ */
+ public JavaAudioDeviceModule createAudioDeviceModule() {
+ Logging.d(TAG, "createAudioDeviceModule");
+ if (useHardwareNoiseSuppressor) {
+ Logging.d(TAG, "HW NS will be used.");
+ } else {
+ if (isBuiltInNoiseSuppressorSupported()) {
+ Logging.d(TAG, "Overriding default behavior; now using WebRTC NS!");
+ }
+ Logging.d(TAG, "HW NS will not be used.");
+ }
+ if (useHardwareAcousticEchoCanceler) {
+ Logging.d(TAG, "HW AEC will be used.");
+ } else {
+ if (isBuiltInAcousticEchoCancelerSupported()) {
+ Logging.d(TAG, "Overriding default behavior; now using WebRTC AEC!");
+ }
+ Logging.d(TAG, "HW AEC will not be used.");
+ }
+ // Low-latency mode was introduced in API version 26, see
+ // https://developer.android.com/reference/android/media/AudioTrack#PERFORMANCE_MODE_LOW_LATENCY
+ final int MIN_LOW_LATENCY_SDK_VERSION = 26;
+ if (useLowLatency && Build.VERSION.SDK_INT >= MIN_LOW_LATENCY_SDK_VERSION) {
+ Logging.d(TAG, "Low latency mode will be used.");
+ }
+ ScheduledExecutorService executor = this.scheduler;
+ if (executor == null) {
+ executor = WebRtcAudioRecord.newDefaultScheduler();
+ }
+ final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager,
+ audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback,
+ samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
+ final WebRtcAudioTrack audioOutput =
+ new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback,
+ audioTrackStateCallback, useLowLatency, enableVolumeLogger);
+ return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput,
+ inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
+ }
+ }
+
+ /* AudioRecord */
+ // Audio recording error handler functions.
+ public enum AudioRecordStartErrorCode {
+ AUDIO_RECORD_START_EXCEPTION,
+ AUDIO_RECORD_START_STATE_MISMATCH,
+ }
+
+ public static interface AudioRecordErrorCallback {
+ void onWebRtcAudioRecordInitError(String errorMessage);
+ void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage);
+ void onWebRtcAudioRecordError(String errorMessage);
+ }
+
+ /** Called when audio recording starts and stops. */
+ public static interface AudioRecordStateCallback {
+ void onWebRtcAudioRecordStart();
+ void onWebRtcAudioRecordStop();
+ }
+
+ /**
+ * Contains audio sample information.
+ */
+ public static class AudioSamples {
+ /** See {@link AudioRecord#getAudioFormat()} */
+ private final int audioFormat;
+ /** See {@link AudioRecord#getChannelCount()} */
+ private final int channelCount;
+ /** See {@link AudioRecord#getSampleRate()} */
+ private final int sampleRate;
+
+ private final byte[] data;
+
+ public AudioSamples(int audioFormat, int channelCount, int sampleRate, byte[] data) {
+ this.audioFormat = audioFormat;
+ this.channelCount = channelCount;
+ this.sampleRate = sampleRate;
+ this.data = data;
+ }
+
+ public int getAudioFormat() {
+ return audioFormat;
+ }
+
+ public int getChannelCount() {
+ return channelCount;
+ }
+
+ public int getSampleRate() {
+ return sampleRate;
+ }
+
+ public byte[] getData() {
+ return data;
+ }
+ }
+
+ /** Called when new audio samples are ready. This should only be set for debug purposes */
+ public static interface SamplesReadyCallback {
+ void onWebRtcAudioRecordSamplesReady(AudioSamples samples);
+ }
+
+ /* AudioTrack */
+ // Audio playout/track error handler functions.
+ public enum AudioTrackStartErrorCode {
+ AUDIO_TRACK_START_EXCEPTION,
+ AUDIO_TRACK_START_STATE_MISMATCH,
+ }
+
+ public static interface AudioTrackErrorCallback {
+ void onWebRtcAudioTrackInitError(String errorMessage);
+ void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage);
+ void onWebRtcAudioTrackError(String errorMessage);
+ }
+
+ /** Called when audio playout starts and stops. */
+ public static interface AudioTrackStateCallback {
+ void onWebRtcAudioTrackStart();
+ void onWebRtcAudioTrackStop();
+ }
+
+ /**
+ * Returns true if the device supports built-in HW AEC, and the UUID is approved (some UUIDs can
+ * be excluded).
+ */
+ public static boolean isBuiltInAcousticEchoCancelerSupported() {
+ return WebRtcAudioEffects.isAcousticEchoCancelerSupported();
+ }
+
+ /**
+ * Returns true if the device supports built-in HW NS, and the UUID is approved (some UUIDs can be
+ * excluded).
+ */
+ public static boolean isBuiltInNoiseSuppressorSupported() {
+ return WebRtcAudioEffects.isNoiseSuppressorSupported();
+ }
+
+ private final Context context;
+ private final AudioManager audioManager;
+ private final WebRtcAudioRecord audioInput;
+ private final WebRtcAudioTrack audioOutput;
+ private final int inputSampleRate;
+ private final int outputSampleRate;
+ private final boolean useStereoInput;
+ private final boolean useStereoOutput;
+
+ private final Object nativeLock = new Object();
+ private long nativeAudioDeviceModule;
+
+ private JavaAudioDeviceModule(Context context, AudioManager audioManager,
+ WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate,
+ int outputSampleRate, boolean useStereoInput, boolean useStereoOutput) {
+ this.context = context;
+ this.audioManager = audioManager;
+ this.audioInput = audioInput;
+ this.audioOutput = audioOutput;
+ this.inputSampleRate = inputSampleRate;
+ this.outputSampleRate = outputSampleRate;
+ this.useStereoInput = useStereoInput;
+ this.useStereoOutput = useStereoOutput;
+ }
+
+ @Override
+ public long getNativeAudioDeviceModulePointer() {
+ synchronized (nativeLock) {
+ if (nativeAudioDeviceModule == 0) {
+ nativeAudioDeviceModule = nativeCreateAudioDeviceModule(context, audioManager, audioInput,
+ audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
+ }
+ return nativeAudioDeviceModule;
+ }
+ }
+
+ @Override
+ public void release() {
+ synchronized (nativeLock) {
+ if (nativeAudioDeviceModule != 0) {
+ JniCommon.nativeReleaseRef(nativeAudioDeviceModule);
+ nativeAudioDeviceModule = 0;
+ }
+ }
+ }
+
+ @Override
+ public void setSpeakerMute(boolean mute) {
+ Logging.d(TAG, "setSpeakerMute: " + mute);
+ audioOutput.setSpeakerMute(mute);
+ }
+
+ @Override
+ public void setMicrophoneMute(boolean mute) {
+ Logging.d(TAG, "setMicrophoneMute: " + mute);
+ audioInput.setMicrophoneMute(mute);
+ }
+
+ /**
+ * Start to prefer a specific {@link AudioDeviceInfo} device for recording. Typically this should
+ * only be used if a client gives an explicit option for choosing a physical device to record
+ * from. Otherwise the best-matching device for other parameters will be used. Calling after
+ * recording is started may cause a temporary interruption if the audio routing changes.
+ */
+ @RequiresApi(Build.VERSION_CODES.M)
+ public void setPreferredInputDevice(AudioDeviceInfo preferredInputDevice) {
+ Logging.d(TAG, "setPreferredInputDevice: " + preferredInputDevice);
+ audioInput.setPreferredDevice(preferredInputDevice);
+ }
+
+ private static native long nativeCreateAudioDeviceModule(Context context,
+ AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput,
+ int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java
new file mode 100644
index 0000000000..de0d0d61f9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import org.webrtc.voiceengine.WebRtcAudioRecord;
+import org.webrtc.voiceengine.WebRtcAudioTrack;
+
+/**
+ * This class represents the legacy AudioDeviceModule that is currently hardcoded into C++ WebRTC.
+ * It will return a null native AudioDeviceModule pointer, leading to an internal object being
+ * created inside WebRTC that is controlled by static calls to the classes under the voiceengine
+ * package. Please use the new JavaAudioDeviceModule instead of this class.
+ */
+@Deprecated
+public class LegacyAudioDeviceModule implements AudioDeviceModule {
+ @Override
+ public long getNativeAudioDeviceModulePointer() {
+ // Returning a null pointer will make WebRTC construct the built-in legacy AudioDeviceModule for
+ // Android internally.
+ return 0;
+ }
+
+ @Override
+ public void release() {
+ // All control for this ADM goes through static global methods and the C++ object is owned
+ // internally by WebRTC.
+ }
+
+ @Override
+ public void setSpeakerMute(boolean mute) {
+ WebRtcAudioTrack.setSpeakerMute(mute);
+ }
+
+ @Override
+ public void setMicrophoneMute(boolean mute) {
+ WebRtcAudioRecord.setMicrophoneMute(mute);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/AndroidManifest.xml b/third_party/libwebrtc/sdk/android/instrumentationtests/AndroidManifest.xml
new file mode 100644
index 0000000000..55028da703
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/AndroidManifest.xml
@@ -0,0 +1,38 @@
+<!--
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+-->
+
+<manifest
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ package="org.webrtc">
+ <uses-feature android:name="android.hardware.camera" />
+ <uses-feature android:name="android.hardware.camera.autofocus" />
+ <uses-feature android:glEsVersion="0x00020000" android:required="true" />
+
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="21" />
+
+ <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+ <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
+ <uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
+ <uses-permission android:name="android.permission.RUN_INSTRUMENTATION" />
+
+ <application>
+ <uses-library android:name="android.test.runner" />
+ </application>
+
+ <instrumentation android:name="org.chromium.base.test.BaseChromiumAndroidJUnitRunner"
+ tools:ignore="MissingPrefix"
+ android:targetPackage="org.webrtc"
+ android:label="Tests for WebRTC Android SDK"/>
+</manifest>
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/ant.properties b/third_party/libwebrtc/sdk/android/instrumentationtests/ant.properties
new file mode 100644
index 0000000000..bc05353865
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/ant.properties
@@ -0,0 +1,18 @@
+# This file is used to override default values used by the Ant build system.
+#
+# This file must be checked into Version Control Systems, as it is
+# integral to the build system of your project.
+
+# This file is only used by the Ant script.
+
+# You can use this to override default values such as
+# 'source.dir' for the location of your java source folder and
+# 'out.dir' for the location of your output folder.
+
+# You can also use it define how the release builds are signed by declaring
+# the following properties:
+# 'key.store' for the location of your keystore and
+# 'key.alias' for the name of the key to use.
+# The password will be asked during the build when you use the 'release' target.
+
+source.dir=../java/testcommon/src;src \ No newline at end of file
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/build.xml b/third_party/libwebrtc/sdk/android/instrumentationtests/build.xml
new file mode 100644
index 0000000000..cb4cb7ac94
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="libjingle_peerconnection_android_unittest" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- if sdk.dir was not set from one of the property file, then
+ get it from the ANDROID_HOME env var.
+ This must be done before we load project.properties since
+ the proguard config can use sdk.dir -->
+ <property environment="env" />
+ <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+ <isset property="env.ANDROID_SDK_ROOT" />
+ </condition>
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
+ unless="sdk.dir"
+ />
+
+ <!--
+ Import per project custom build rules if present at the root of the project.
+ This is the place to put custom intermediary targets such as:
+ -pre-build
+ -pre-compile
+ -post-compile (This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir})
+ -post-package
+ -post-build
+ -pre-clean
+ -->
+ <import file="custom_rules.xml" optional="true" />
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/loggable_test.cc b/third_party/libwebrtc/sdk/android/instrumentationtests/loggable_test.cc
new file mode 100644
index 0000000000..1a11075216
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/loggable_test.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "rtc_base/logging.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+JNI_FUNCTION_DECLARATION(void,
+ LoggableTest_nativeLogInfoTestMessage,
+ JNIEnv* jni,
+ jclass,
+ jstring j_message) {
+ std::string message =
+ JavaToNativeString(jni, JavaParamRef<jstring>(j_message));
+ RTC_LOG(LS_INFO) << message;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/project.properties b/third_party/libwebrtc/sdk/android/instrumentationtests/project.properties
new file mode 100644
index 0000000000..a6ca533fe3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+
+java.compilerargs=-Xlint:all -Werror
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java
new file mode 100644
index 0000000000..8166f5b544
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import androidx.annotation.Nullable;
+import androidx.test.filters.SmallTest;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+/** Unit tests for {@link AndroidVideoDecoder}. */
+@RunWith(Parameterized.class)
+public final class AndroidVideoDecoderInstrumentationTest {
+ @Parameters(name = "{0};useEglContext={1}")
+ public static Collection<Object[]> parameters() {
+ return Arrays.asList(new Object[] {/*codecName=*/"VP8", /*useEglContext=*/false},
+ new Object[] {/*codecName=*/"VP8", /*useEglContext=*/true},
+ new Object[] {/*codecName=*/"H264", /*useEglContext=*/false},
+ new Object[] {/*codecName=*/"H264", /*useEglContext=*/true});
+ }
+
+ private final VideoCodecInfo codecType;
+ private final boolean useEglContext;
+
+ public AndroidVideoDecoderInstrumentationTest(String codecName, boolean useEglContext) {
+ if (codecName.equals("H264")) {
+ this.codecType = H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC;
+ } else {
+ this.codecType = new VideoCodecInfo(codecName, new HashMap<>());
+ }
+ this.useEglContext = useEglContext;
+ }
+
+ private static final String TAG = "AndroidVideoDecoderInstrumentationTest";
+
+ private static final int TEST_FRAME_COUNT = 10;
+ private static final int TEST_FRAME_WIDTH = 640;
+ private static final int TEST_FRAME_HEIGHT = 360;
+ private VideoFrame.I420Buffer[] TEST_FRAMES;
+
+ private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
+ private static final boolean ENABLE_H264_HIGH_PROFILE = true;
+ private static final VideoEncoder.Settings ENCODER_SETTINGS = new VideoEncoder.Settings(
+ 1 /* core */,
+ getAlignedNumber(TEST_FRAME_WIDTH, HardwareVideoEncoderTest.getPixelAlignmentRequired()),
+ getAlignedNumber(TEST_FRAME_HEIGHT, HardwareVideoEncoderTest.getPixelAlignmentRequired()),
+ 300 /* kbps */, 30 /* fps */, 1 /* numberOfSimulcastStreams */, true /* automaticResizeOn */,
+ /* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */));
+
+ private static final int DECODE_TIMEOUT_MS = 1000;
+ private static final VideoDecoder.Settings SETTINGS = new VideoDecoder.Settings(1 /* core */,
+ getAlignedNumber(TEST_FRAME_WIDTH, HardwareVideoEncoderTest.getPixelAlignmentRequired()),
+ getAlignedNumber(TEST_FRAME_HEIGHT, HardwareVideoEncoderTest.getPixelAlignmentRequired()));
+
+ private static class MockDecodeCallback implements VideoDecoder.Callback {
+ private BlockingQueue<VideoFrame> frameQueue = new LinkedBlockingQueue<>();
+
+ @Override
+ public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
+ assertNotNull(frame);
+ frameQueue.offer(frame);
+ }
+
+ public void assertFrameDecoded(EncodedImage testImage, VideoFrame.I420Buffer testBuffer) {
+ VideoFrame decodedFrame = poll();
+ VideoFrame.Buffer decodedBuffer = decodedFrame.getBuffer();
+ assertEquals(testImage.encodedWidth, decodedBuffer.getWidth());
+ assertEquals(testImage.encodedHeight, decodedBuffer.getHeight());
+ // TODO(sakal): Decoder looses the nanosecond precision. This is not a problem in practice
+ // because C++ EncodedImage stores the timestamp in milliseconds.
+ assertEquals(testImage.captureTimeNs / 1000, decodedFrame.getTimestampNs() / 1000);
+ assertEquals(testImage.rotation, decodedFrame.getRotation());
+ }
+
+ public VideoFrame poll() {
+ try {
+ VideoFrame frame = frameQueue.poll(DECODE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+ assertNotNull("Timed out waiting for the frame to be decoded.", frame);
+ return frame;
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+ private static VideoFrame.I420Buffer[] generateTestFrames() {
+ VideoFrame.I420Buffer[] result = new VideoFrame.I420Buffer[TEST_FRAME_COUNT];
+ for (int i = 0; i < TEST_FRAME_COUNT; i++) {
+ result[i] = JavaI420Buffer.allocate(
+ getAlignedNumber(TEST_FRAME_WIDTH, HardwareVideoEncoderTest.getPixelAlignmentRequired()),
+ getAlignedNumber(
+ TEST_FRAME_HEIGHT, HardwareVideoEncoderTest.getPixelAlignmentRequired()));
+ // TODO(sakal): Generate content for the test frames.
+ }
+ return result;
+ }
+
+ private final EncodedImage[] encodedTestFrames = new EncodedImage[TEST_FRAME_COUNT];
+ private EglBase14 eglBase;
+
+ private VideoDecoderFactory createDecoderFactory(EglBase.Context eglContext) {
+ return new HardwareVideoDecoderFactory(eglContext);
+ }
+
+ private @Nullable VideoDecoder createDecoder() {
+ VideoDecoderFactory factory =
+ createDecoderFactory(useEglContext ? eglBase.getEglBaseContext() : null);
+ return factory.createDecoder(codecType);
+ }
+
+ private void encodeTestFrames() {
+ VideoEncoderFactory encoderFactory = new HardwareVideoEncoderFactory(
+ eglBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
+ VideoEncoder encoder = encoderFactory.createEncoder(codecType);
+ HardwareVideoEncoderTest.MockEncoderCallback encodeCallback =
+ new HardwareVideoEncoderTest.MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(ENCODER_SETTINGS, encodeCallback));
+
+ long lastTimestampNs = 0;
+ for (int i = 0; i < TEST_FRAME_COUNT; i++) {
+ lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / ENCODER_SETTINGS.maxFramerate;
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+ HardwareVideoEncoderTest.testEncodeFrame(
+ encoder, new VideoFrame(TEST_FRAMES[i], 0 /* rotation */, lastTimestampNs), info);
+ encodedTestFrames[i] = encodeCallback.poll();
+ }
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ private static int getAlignedNumber(int number, int alignment) {
+ return (number / alignment) * alignment;
+ }
+
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+
+ TEST_FRAMES = generateTestFrames();
+
+ eglBase = EglBase.createEgl14(EglBase.CONFIG_PLAIN);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ encodeTestFrames();
+ }
+
+ @After
+ public void tearDown() {
+ eglBase.release();
+ }
+
+ @Test
+ @SmallTest
+ public void testInitialize() {
+ VideoDecoder decoder = createDecoder();
+ assertEquals(VideoCodecStatus.OK, decoder.initDecode(SETTINGS, null /* decodeCallback */));
+ assertEquals(VideoCodecStatus.OK, decoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testDecode() {
+ VideoDecoder decoder = createDecoder();
+ MockDecodeCallback callback = new MockDecodeCallback();
+ assertEquals(VideoCodecStatus.OK, decoder.initDecode(SETTINGS, callback));
+
+ for (int i = 0; i < TEST_FRAME_COUNT; i++) {
+ assertEquals(VideoCodecStatus.OK,
+ decoder.decode(encodedTestFrames[i],
+ new VideoDecoder.DecodeInfo(false /* isMissingFrames */, 0 /* renderTimeMs */)));
+ callback.assertFrameDecoded(encodedTestFrames[i], TEST_FRAMES[i]);
+ }
+
+ assertEquals(VideoCodecStatus.OK, decoder.release());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java
new file mode 100644
index 0000000000..8c9119eb7b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import androidx.test.filters.SmallTest;
+import org.junit.Before;
+import org.junit.Test;
+
+public final class BuiltinAudioCodecsFactoryFactoryTest {
+ @Before
+ public void setUp() {
+ System.loadLibrary(TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testAudioEncoderFactoryFactoryTest() throws Exception {
+ BuiltinAudioEncoderFactoryFactory factory = new BuiltinAudioEncoderFactoryFactory();
+ long aef = 0;
+ try {
+ aef = factory.createNativeAudioEncoderFactory();
+ assertThat(aef).isNotEqualTo(0);
+ } finally {
+ if (aef != 0) {
+ JniCommon.nativeReleaseRef(aef);
+ }
+ }
+ }
+
+ @Test
+ @SmallTest
+ public void testAudioDecoderFactoryFactoryTest() throws Exception {
+ BuiltinAudioDecoderFactoryFactory factory = new BuiltinAudioDecoderFactoryFactory();
+ long adf = 0;
+ try {
+ adf = factory.createNativeAudioDecoderFactory();
+ assertThat(adf).isNotEqualTo(0);
+ } finally {
+ if (adf != 0) {
+ JniCommon.nativeReleaseRef(adf);
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
new file mode 100644
index 0000000000..37d03d99d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class Camera1CapturerUsingByteBufferTest {
+ static final String TAG = "Camera1CapturerUsingByteBufferTest";
+
+ private static class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+ @Override
+ public boolean isCapturingToTexture() {
+ return false;
+ }
+
+ @Override
+ public CameraEnumerator getCameraEnumerator() {
+ return new Camera1Enumerator(false);
+ }
+
+ @Override
+ public Context getAppContext() {
+ return InstrumentationRegistry.getTargetContext();
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public Object rawOpenCamera(String cameraName) {
+ return android.hardware.Camera.open(Camera1Enumerator.getCameraIndex(cameraName));
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void rawCloseCamera(Object camera) {
+ ((android.hardware.Camera) camera).release();
+ }
+ }
+
+ private CameraVideoCapturerTestFixtures fixtures;
+
+ @Before
+ public void setUp() {
+ fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
+ }
+
+ @After
+ public void tearDown() {
+ fixtures.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateAndDispose() throws InterruptedException {
+ fixtures.createCapturerAndDispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateNonExistingCamera() throws InterruptedException {
+ fixtures.createNonExistingCamera();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using a "default" capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testCreateCapturerAndRender() throws InterruptedException {
+ fixtures.createCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the front facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartFrontFacingVideoCapturer() throws InterruptedException {
+ fixtures.createFrontFacingCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the back facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartBackFacingVideoCapturer() throws InterruptedException {
+ fixtures.createBackFacingCapturerAndRender();
+ }
+
+ // This test that the default camera can be started and that the camera can
+ // later be switched to another camera.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturer() throws InterruptedException {
+ fixtures.switchCamera();
+ }
+
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturerToSpecificCameraName() throws InterruptedException {
+ fixtures.switchCamera(true /* specifyCameraName */);
+ }
+
+ @Test
+ @MediumTest
+ public void testCameraEvents() throws InterruptedException {
+ fixtures.cameraEventsInvoked();
+ }
+
+ // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
+ @Test
+ @MediumTest
+ public void testCameraCallsAfterStop() throws InterruptedException {
+ fixtures.cameraCallsAfterStop();
+ }
+
+ // This test that the VideoSource that the CameraVideoCapturer is connected to can
+ // be stopped and restarted. It tests both the Java and the C++ layer.
+ @Test
+ @LargeTest
+ public void testStopRestartVideoSource() throws InterruptedException {
+ fixtures.stopRestartVideoSource();
+ }
+
+ // This test that the camera can be started at different resolutions.
+ // It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testStartStopWithDifferentResolutions() throws InterruptedException {
+ fixtures.startStopWithDifferentResolutions();
+ }
+
+ // This test what happens if buffers are returned after the capturer have
+ // been stopped and restarted. It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testReturnBufferLate() throws InterruptedException {
+ fixtures.returnBufferLate();
+ }
+
+ // This test that we can capture frames, keep the frames in a local renderer, stop capturing,
+ // and then return the frames. The difference between the test testReturnBufferLate() is that we
+ // also test the JNI and C++ AndroidVideoCapturer parts.
+ @Test
+ @MediumTest
+ public void testReturnBufferLateEndToEnd() throws InterruptedException {
+ fixtures.returnBufferLateEndToEnd();
+ }
+
+ // This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
+ // called. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testScaleCameraOutput() throws InterruptedException {
+ fixtures.scaleCameraOutput();
+ }
+
+ // This test that frames forwarded to a renderer is cropped to a new orientation if
+ // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testCropCameraOutput() throws InterruptedException {
+ fixtures.cropCameraOutput();
+ }
+
+ // This test that an error is reported if the camera is already opened
+ // when CameraVideoCapturer is started.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpen();
+ }
+
+ // This test that CameraVideoCapturer can be started, even if the camera is already opened
+ // if the camera is closed while CameraVideoCapturer is re-trying to start.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
+ }
+
+ // This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
+ // re-trying to start.
+ @Test
+ @MediumTest
+ public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndStop();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
new file mode 100644
index 0000000000..e0419178c6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class Camera1CapturerUsingTextureTest {
+ static final String TAG = "Camera1CapturerUsingTextureTest";
+
+ private static class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+ @Override
+ public CameraEnumerator getCameraEnumerator() {
+ return new Camera1Enumerator();
+ }
+
+ @Override
+ public Context getAppContext() {
+ return InstrumentationRegistry.getTargetContext();
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public Object rawOpenCamera(String cameraName) {
+ return android.hardware.Camera.open(Camera1Enumerator.getCameraIndex(cameraName));
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void rawCloseCamera(Object camera) {
+ ((android.hardware.Camera) camera).release();
+ }
+ }
+
+ private CameraVideoCapturerTestFixtures fixtures;
+
+ @Before
+ public void setUp() {
+ fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
+ }
+
+ @After
+ public void tearDown() {
+ fixtures.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateAndDispose() throws InterruptedException {
+ fixtures.createCapturerAndDispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateNonExistingCamera() throws InterruptedException {
+ fixtures.createNonExistingCamera();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using a "default" capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testCreateCapturerAndRender() throws InterruptedException {
+ fixtures.createCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the front facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartFrontFacingVideoCapturer() throws InterruptedException {
+ fixtures.createFrontFacingCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the back facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartBackFacingVideoCapturer() throws InterruptedException {
+ fixtures.createBackFacingCapturerAndRender();
+ }
+
+ // This test that the default camera can be started and that the camera can
+ // later be switched to another camera.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturer() throws InterruptedException {
+ fixtures.switchCamera();
+ }
+
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturerToSpecificCameraName() throws InterruptedException {
+ fixtures.switchCamera(true /* specifyCameraName */);
+ }
+
+ @Test
+ @MediumTest
+ public void testCameraEvents() throws InterruptedException {
+ fixtures.cameraEventsInvoked();
+ }
+
+ // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
+ @Test
+ @MediumTest
+ public void testCameraCallsAfterStop() throws InterruptedException {
+ fixtures.cameraCallsAfterStop();
+ }
+
+ // This test that the VideoSource that the CameraVideoCapturer is connected to can
+ // be stopped and restarted. It tests both the Java and the C++ layer.
+ @Test
+ @LargeTest
+ public void testStopRestartVideoSource() throws InterruptedException {
+ fixtures.stopRestartVideoSource();
+ }
+
+ // This test that the camera can be started at different resolutions.
+ // It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testStartStopWithDifferentResolutions() throws InterruptedException {
+ fixtures.startStopWithDifferentResolutions();
+ }
+
+ // This test what happens if buffers are returned after the capturer have
+ // been stopped and restarted. It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testReturnBufferLate() throws InterruptedException {
+ fixtures.returnBufferLate();
+ }
+
+ // This test that we can capture frames, keep the frames in a local renderer, stop capturing,
+ // and then return the frames. The difference between the test testReturnBufferLate() is that we
+ // also test the JNI and C++ AndroidVideoCapturer parts.
+ @Test
+ @MediumTest
+ public void testReturnBufferLateEndToEnd() throws InterruptedException {
+ fixtures.returnBufferLateEndToEnd();
+ }
+
+ // This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
+ // the capturer.
+ @Test
+ @LargeTest
+ public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
+ fixtures.cameraFreezedEventOnBufferStarvation();
+ }
+
+ // This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
+ // called. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testScaleCameraOutput() throws InterruptedException {
+ fixtures.scaleCameraOutput();
+ }
+
+ // This test that frames forwarded to a renderer is cropped to a new orientation if
+ // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testCropCameraOutput() throws InterruptedException {
+ fixtures.cropCameraOutput();
+ }
+
+ // This test that an error is reported if the camera is already opened
+ // when CameraVideoCapturer is started.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpen();
+ }
+
+ // This test that CameraVideoCapturer can be started, even if the camera is already opened
+ // if the camera is closed while CameraVideoCapturer is re-trying to start.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
+ }
+
+ // This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
+ // re-trying to start.
+ @Test
+ @MediumTest
+ public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndStop();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
new file mode 100644
index 0000000000..b01737197a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
@@ -0,0 +1,334 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.fail;
+
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.os.Handler;
+import android.os.Looper;
+import android.support.test.InstrumentationRegistry;
+import androidx.annotation.Nullable;
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.util.concurrent.CountDownLatch;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class Camera2CapturerTest {
+ static final String TAG = "Camera2CapturerTest";
+
+ /**
+ * Simple camera2 implementation that only knows how to open the camera and close it.
+ */
+ private class SimpleCamera2 {
+ final CameraManager cameraManager;
+ final LooperThread looperThread;
+ final CountDownLatch openDoneSignal;
+ final Object cameraDeviceLock;
+ @Nullable CameraDevice cameraDevice; // Guarded by cameraDeviceLock
+ boolean openSucceeded; // Guarded by cameraDeviceLock
+
+ private class LooperThread extends Thread {
+ final CountDownLatch startedSignal = new CountDownLatch(1);
+ private Handler handler;
+
+ @Override
+ public void run() {
+ Looper.prepare();
+ handler = new Handler();
+ startedSignal.countDown();
+ Looper.loop();
+ }
+
+ public void waitToStart() {
+ ThreadUtils.awaitUninterruptibly(startedSignal);
+ }
+
+ public void requestStop() {
+ handler.getLooper().quit();
+ }
+
+ public Handler getHandler() {
+ return handler;
+ }
+ }
+
+ private class CameraStateCallback extends CameraDevice.StateCallback {
+ @Override
+ public void onClosed(CameraDevice cameraDevice) {
+ Logging.d(TAG, "Simple camera2 closed.");
+
+ synchronized (cameraDeviceLock) {
+ SimpleCamera2.this.cameraDevice = null;
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice cameraDevice) {
+ Logging.d(TAG, "Simple camera2 disconnected.");
+
+ synchronized (cameraDeviceLock) {
+ SimpleCamera2.this.cameraDevice = null;
+ }
+ }
+
+ @Override
+ public void onError(CameraDevice cameraDevice, int errorCode) {
+ Logging.w(TAG, "Simple camera2 error: " + errorCode);
+
+ synchronized (cameraDeviceLock) {
+ SimpleCamera2.this.cameraDevice = cameraDevice;
+ openSucceeded = false;
+ }
+
+ openDoneSignal.countDown();
+ }
+
+ @Override
+ public void onOpened(CameraDevice cameraDevice) {
+ Logging.d(TAG, "Simple camera2 opened.");
+
+ synchronized (cameraDeviceLock) {
+ SimpleCamera2.this.cameraDevice = cameraDevice;
+ openSucceeded = true;
+ }
+
+ openDoneSignal.countDown();
+ }
+ }
+
+ SimpleCamera2(Context context, String deviceName) {
+ cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ looperThread = new LooperThread();
+ looperThread.start();
+ looperThread.waitToStart();
+ cameraDeviceLock = new Object();
+ openDoneSignal = new CountDownLatch(1);
+ cameraDevice = null;
+ Logging.d(TAG, "Opening simple camera2.");
+ try {
+ cameraManager.openCamera(deviceName, new CameraStateCallback(), looperThread.getHandler());
+ } catch (CameraAccessException e) {
+ fail("Simple camera2 CameraAccessException: " + e.getMessage());
+ }
+
+ Logging.d(TAG, "Waiting for simple camera2 to open.");
+ ThreadUtils.awaitUninterruptibly(openDoneSignal);
+ synchronized (cameraDeviceLock) {
+ if (!openSucceeded) {
+ fail("Opening simple camera2 failed.");
+ }
+ }
+ }
+
+ public void close() {
+ Logging.d(TAG, "Closing simple camera2.");
+ synchronized (cameraDeviceLock) {
+ if (cameraDevice != null) {
+ cameraDevice.close();
+ }
+ }
+
+ looperThread.requestStop();
+ ThreadUtils.joinUninterruptibly(looperThread);
+ }
+ }
+
+ private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+ @Override
+ public CameraEnumerator getCameraEnumerator() {
+ return new Camera2Enumerator(getAppContext());
+ }
+
+ @Override
+ public Context getAppContext() {
+ return InstrumentationRegistry.getTargetContext();
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public Object rawOpenCamera(String cameraName) {
+ return new SimpleCamera2(getAppContext(), cameraName);
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void rawCloseCamera(Object camera) {
+ ((SimpleCamera2) camera).close();
+ }
+ }
+
+ private CameraVideoCapturerTestFixtures fixtures;
+
+ @Before
+ public void setUp() {
+ fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
+ }
+
+ @After
+ public void tearDown() {
+ fixtures.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateAndDispose() throws InterruptedException {
+ fixtures.createCapturerAndDispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateNonExistingCamera() throws InterruptedException {
+ fixtures.createNonExistingCamera();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using a "default" capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testCreateCapturerAndRender() throws InterruptedException {
+ fixtures.createCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the front facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartFrontFacingVideoCapturer() throws InterruptedException {
+ fixtures.createFrontFacingCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the back facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartBackFacingVideoCapturer() throws InterruptedException {
+ fixtures.createBackFacingCapturerAndRender();
+ }
+
+ // This test that the default camera can be started and that the camera can
+ // later be switched to another camera.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturer() throws InterruptedException {
+ fixtures.switchCamera();
+ }
+
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturerToSpecificCameraName() throws InterruptedException {
+ fixtures.switchCamera(true /* specifyCameraName */);
+ }
+
+ @Test
+ @MediumTest
+ public void testCameraEvents() throws InterruptedException {
+ fixtures.cameraEventsInvoked();
+ }
+
+ // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
+ @Test
+ @MediumTest
+ public void testCameraCallsAfterStop() throws InterruptedException {
+ fixtures.cameraCallsAfterStop();
+ }
+
+ // This test that the VideoSource that the CameraVideoCapturer is connected to can
+ // be stopped and restarted. It tests both the Java and the C++ layer.
+ @Test
+ @LargeTest
+ public void testStopRestartVideoSource() throws InterruptedException {
+ fixtures.stopRestartVideoSource();
+ }
+
+ // This test that the camera can be started at different resolutions.
+ // It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testStartStopWithDifferentResolutions() throws InterruptedException {
+ fixtures.startStopWithDifferentResolutions();
+ }
+
+ // This test what happens if buffers are returned after the capturer have
+ // been stopped and restarted. It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testReturnBufferLate() throws InterruptedException {
+ fixtures.returnBufferLate();
+ }
+
+ // This test that we can capture frames, keep the frames in a local renderer, stop capturing,
+ // and then return the frames. The difference between the test testReturnBufferLate() is that we
+ // also test the JNI and C++ AndroidVideoCapturer parts.
+ @Test
+ @MediumTest
+ public void testReturnBufferLateEndToEnd() throws InterruptedException {
+ fixtures.returnBufferLateEndToEnd();
+ }
+
+ // This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
+ // the capturer.
+ @Test
+ @LargeTest
+ public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
+ fixtures.cameraFreezedEventOnBufferStarvation();
+ }
+
+ // This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
+ // called. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testScaleCameraOutput() throws InterruptedException {
+ fixtures.scaleCameraOutput();
+ }
+
+ // This test that frames forwarded to a renderer is cropped to a new orientation if
+ // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testCropCameraOutput() throws InterruptedException {
+ fixtures.cropCameraOutput();
+ }
+
+ // This test that an error is reported if the camera is already opened
+ // when CameraVideoCapturer is started.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpen();
+ }
+
+ // This test that CameraVideoCapturer can be started, even if the camera is already opened
+ // if the camera is closed while CameraVideoCapturer is re-trying to start.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
+ }
+
+ // This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
+ // re-trying to start.
+ @Test
+ @MediumTest
+ public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndStop();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
new file mode 100644
index 0000000000..aa5fb0c1c9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
@@ -0,0 +1,793 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.content.Context;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.VideoFrame;
+
+class CameraVideoCapturerTestFixtures {
+ static final String TAG = "CameraVideoCapturerTestFixtures";
+ // Default values used for starting capturing
+ static final int DEFAULT_WIDTH = 640;
+ static final int DEFAULT_HEIGHT = 480;
+ static final int DEFAULT_FPS = 15;
+
+ static private class RendererCallbacks implements VideoSink {
+ private final Object frameLock = new Object();
+ private int framesRendered;
+ private int width;
+ private int height;
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ synchronized (frameLock) {
+ ++framesRendered;
+ width = frame.getRotatedWidth();
+ height = frame.getRotatedHeight();
+ frameLock.notify();
+ }
+ }
+
+ public int frameWidth() {
+ synchronized (frameLock) {
+ return width;
+ }
+ }
+
+ public int frameHeight() {
+ synchronized (frameLock) {
+ return height;
+ }
+ }
+
+ public int waitForNextFrameToRender() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the next frame to render");
+ synchronized (frameLock) {
+ final int framesRenderedStart = framesRendered;
+ while (framesRendered == framesRenderedStart) {
+ frameLock.wait();
+ }
+ return framesRendered;
+ }
+ }
+ }
+
+ static private class FakeAsyncRenderer implements VideoSink {
+ private final List<VideoFrame> pendingFrames = new ArrayList<VideoFrame>();
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ synchronized (pendingFrames) {
+ frame.retain();
+ pendingFrames.add(frame);
+ pendingFrames.notifyAll();
+ }
+ }
+
+ // Wait until at least one frame have been received, before returning them.
+ public List<VideoFrame> waitForPendingFrames() throws InterruptedException {
+ Logging.d(TAG, "Waiting for pending frames");
+ synchronized (pendingFrames) {
+ while (pendingFrames.isEmpty()) {
+ pendingFrames.wait();
+ }
+ return new ArrayList<VideoFrame>(pendingFrames);
+ }
+ }
+ }
+
+ static private class FakeCapturerObserver implements CapturerObserver {
+ private int framesCaptured;
+ private @Nullable VideoFrame videoFrame;
+ final private Object frameLock = new Object();
+ final private Object capturerStartLock = new Object();
+ private Boolean capturerStartResult;
+ final private List<Long> timestamps = new ArrayList<Long>();
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ Logging.d(TAG, "onCapturerStarted: " + success);
+
+ synchronized (capturerStartLock) {
+ capturerStartResult = success;
+ capturerStartLock.notifyAll();
+ }
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ Logging.d(TAG, "onCapturerStopped");
+ }
+
+ @Override
+ public void onFrameCaptured(VideoFrame frame) {
+ synchronized (frameLock) {
+ ++framesCaptured;
+ if (videoFrame != null) {
+ videoFrame.release();
+ }
+ videoFrame = frame;
+ videoFrame.retain();
+ timestamps.add(videoFrame.getTimestampNs());
+ frameLock.notify();
+ }
+ }
+
+ public boolean waitForCapturerToStart() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the capturer to start");
+ synchronized (capturerStartLock) {
+ while (capturerStartResult == null) {
+ capturerStartLock.wait();
+ }
+ return capturerStartResult;
+ }
+ }
+
+ public int waitForNextCapturedFrame() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the next captured frame");
+ synchronized (frameLock) {
+ final int framesCapturedStart = framesCaptured;
+ while (framesCaptured == framesCapturedStart) {
+ frameLock.wait();
+ }
+ return framesCaptured;
+ }
+ }
+
+ int frameWidth() {
+ synchronized (frameLock) {
+ return videoFrame.getBuffer().getWidth();
+ }
+ }
+
+ int frameHeight() {
+ synchronized (frameLock) {
+ return videoFrame.getBuffer().getHeight();
+ }
+ }
+
+ void releaseFrame() {
+ synchronized (frameLock) {
+ if (videoFrame != null) {
+ videoFrame.release();
+ videoFrame = null;
+ }
+ }
+ }
+
+ List<Long> getCopyAndResetListOftimeStamps() {
+ synchronized (frameLock) {
+ ArrayList<Long> list = new ArrayList<Long>(timestamps);
+ timestamps.clear();
+ return list;
+ }
+ }
+ }
+
+ static class CameraEvents implements CameraVideoCapturer.CameraEventsHandler {
+ public boolean onCameraOpeningCalled;
+ public boolean onFirstFrameAvailableCalled;
+ private final Object onCameraFreezedLock = new Object();
+ private String onCameraFreezedDescription;
+ private final Object cameraClosedLock = new Object();
+ private boolean cameraClosed = true;
+
+ @Override
+ public void onCameraError(String errorDescription) {
+ Logging.w(TAG, "Camera error: " + errorDescription);
+ cameraClosed = true;
+ }
+
+ @Override
+ public void onCameraDisconnected() {}
+
+ @Override
+ public void onCameraFreezed(String errorDescription) {
+ synchronized (onCameraFreezedLock) {
+ onCameraFreezedDescription = errorDescription;
+ onCameraFreezedLock.notifyAll();
+ }
+ }
+
+ @Override
+ public void onCameraOpening(String cameraName) {
+ onCameraOpeningCalled = true;
+ synchronized (cameraClosedLock) {
+ cameraClosed = false;
+ }
+ }
+
+ @Override
+ public void onFirstFrameAvailable() {
+ onFirstFrameAvailableCalled = true;
+ }
+
+ @Override
+ public void onCameraClosed() {
+ synchronized (cameraClosedLock) {
+ cameraClosed = true;
+ cameraClosedLock.notifyAll();
+ }
+ }
+
+ public String waitForCameraFreezed() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the camera to freeze");
+ synchronized (onCameraFreezedLock) {
+ while (onCameraFreezedDescription == null) {
+ onCameraFreezedLock.wait();
+ }
+ return onCameraFreezedDescription;
+ }
+ }
+
+ public void waitForCameraClosed() throws InterruptedException {
+ synchronized (cameraClosedLock) {
+ while (!cameraClosed) {
+ Logging.d(TAG, "Waiting for the camera to close.");
+ cameraClosedLock.wait();
+ }
+ }
+ }
+ }
+
+ /**
+ * Class to collect all classes related to single capturer instance.
+ */
+ static private class CapturerInstance {
+ public CameraVideoCapturer capturer;
+ public CameraEvents cameraEvents;
+ public SurfaceTextureHelper surfaceTextureHelper;
+ public FakeCapturerObserver observer;
+ public List<CaptureFormat> supportedFormats;
+ public CaptureFormat format;
+ }
+
+ /**
+ * Class used for collecting a VideoSource, a VideoTrack and a renderer. The class
+ * is used for testing local rendering from a capturer.
+ */
+ static private class VideoTrackWithRenderer {
+ public SurfaceTextureHelper surfaceTextureHelper;
+ public VideoSource source;
+ public VideoTrack track;
+ public RendererCallbacks rendererCallbacks;
+ public FakeAsyncRenderer fakeAsyncRenderer;
+ }
+
+ public abstract static class TestObjectFactory {
+ final CameraEnumerator cameraEnumerator;
+
+ TestObjectFactory() {
+ cameraEnumerator = getCameraEnumerator();
+ }
+
+ public CameraVideoCapturer createCapturer(
+ String name, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return cameraEnumerator.createCapturer(name, eventsHandler);
+ }
+
+ public @Nullable String getNameOfFrontFacingDevice() {
+ for (String deviceName : cameraEnumerator.getDeviceNames()) {
+ if (cameraEnumerator.isFrontFacing(deviceName)) {
+ return deviceName;
+ }
+ }
+
+ return null;
+ }
+
+ public @Nullable String getNameOfBackFacingDevice() {
+ for (String deviceName : cameraEnumerator.getDeviceNames()) {
+ if (cameraEnumerator.isBackFacing(deviceName)) {
+ return deviceName;
+ }
+ }
+
+ return null;
+ }
+
+ public boolean haveTwoCameras() {
+ return cameraEnumerator.getDeviceNames().length >= 2;
+ }
+
+ public boolean isCapturingToTexture() {
+ // In the future, we plan to only support capturing to texture, so default to true
+ return true;
+ }
+
+ abstract public CameraEnumerator getCameraEnumerator();
+ abstract public Context getAppContext();
+
+ // CameraVideoCapturer API is too slow for some of our tests where we need to open a competing
+ // camera. These methods are used instead.
+ abstract public Object rawOpenCamera(String cameraName);
+ abstract public void rawCloseCamera(Object camera);
+ }
+
+ private PeerConnectionFactory peerConnectionFactory;
+ private TestObjectFactory testObjectFactory;
+
+ CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory) {
+ PeerConnectionFactory.initialize(
+ PeerConnectionFactory.InitializationOptions.builder(testObjectFactory.getAppContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+
+ this.peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ this.testObjectFactory = testObjectFactory;
+ }
+
+ public void dispose() {
+ this.peerConnectionFactory.dispose();
+ }
+
+ // Internal helper methods
+ private CapturerInstance createCapturer(String name, boolean initialize) {
+ CapturerInstance instance = new CapturerInstance();
+ instance.cameraEvents = new CameraEvents();
+ instance.capturer = testObjectFactory.createCapturer(name, instance.cameraEvents);
+ instance.surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
+ instance.observer = new FakeCapturerObserver();
+ if (initialize) {
+ instance.capturer.initialize(
+ instance.surfaceTextureHelper, testObjectFactory.getAppContext(), instance.observer);
+ }
+ instance.supportedFormats = testObjectFactory.cameraEnumerator.getSupportedFormats(name);
+ return instance;
+ }
+
+ private CapturerInstance createCapturer(boolean initialize) {
+ String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
+ return createCapturer(name, initialize);
+ }
+
+ private void startCapture(CapturerInstance instance) {
+ startCapture(instance, 0);
+ }
+
+ private void startCapture(CapturerInstance instance, int formatIndex) {
+ final CameraEnumerationAndroid.CaptureFormat format =
+ instance.supportedFormats.get(formatIndex);
+
+ instance.capturer.startCapture(format.width, format.height, format.framerate.max);
+ instance.format = format;
+ }
+
+ private void disposeCapturer(CapturerInstance instance) throws InterruptedException {
+ instance.capturer.stopCapture();
+ instance.cameraEvents.waitForCameraClosed();
+ instance.capturer.dispose();
+ instance.observer.releaseFrame();
+ instance.surfaceTextureHelper.dispose();
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithRenderer(
+ CameraVideoCapturer capturer, VideoSink rendererCallbacks) {
+ VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
+ videoTrackWithRenderer.surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
+ videoTrackWithRenderer.source =
+ peerConnectionFactory.createVideoSource(/* isScreencast= */ false);
+ capturer.initialize(videoTrackWithRenderer.surfaceTextureHelper,
+ testObjectFactory.getAppContext(), videoTrackWithRenderer.source.getCapturerObserver());
+ capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
+ videoTrackWithRenderer.track =
+ peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source);
+ videoTrackWithRenderer.track.addSink(rendererCallbacks);
+ return videoTrackWithRenderer;
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer) {
+ RendererCallbacks rendererCallbacks = new RendererCallbacks();
+ VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturer, rendererCallbacks);
+ videoTrackWithRenderer.rendererCallbacks = rendererCallbacks;
+ return videoTrackWithRenderer;
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithFakeAsyncRenderer(
+ CameraVideoCapturer capturer) {
+ FakeAsyncRenderer fakeAsyncRenderer = new FakeAsyncRenderer();
+ VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturer, fakeAsyncRenderer);
+ videoTrackWithRenderer.fakeAsyncRenderer = fakeAsyncRenderer;
+ return videoTrackWithRenderer;
+ }
+
+ private void disposeVideoTrackWithRenderer(VideoTrackWithRenderer videoTrackWithRenderer) {
+ videoTrackWithRenderer.track.dispose();
+ videoTrackWithRenderer.source.dispose();
+ }
+
+ private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException {
+ final CountDownLatch barrier = new CountDownLatch(1);
+ capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() {
+ @Override
+ public void run() {
+ barrier.countDown();
+ }
+ });
+ barrier.await();
+ }
+
+ private void createCapturerAndRender(String name) throws InterruptedException {
+ if (name == null) {
+ Logging.w(TAG, "Skipping video capturer test because device name is null.");
+ return;
+ }
+
+ final CapturerInstance capturerInstance = createCapturer(name, false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ }
+
+ // Test methods
+ public void createCapturerAndDispose() throws InterruptedException {
+ disposeCapturer(createCapturer(true /* initialize */));
+ }
+
+ public void createNonExistingCamera() throws InterruptedException {
+ try {
+ disposeCapturer(createCapturer("non-existing camera", false /* initialize */));
+ } catch (IllegalArgumentException e) {
+ return;
+ }
+
+ fail("Expected illegal argument exception when creating non-existing camera.");
+ }
+
+ public void createCapturerAndRender() throws InterruptedException {
+ String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
+ createCapturerAndRender(name);
+ }
+
+ public void createFrontFacingCapturerAndRender() throws InterruptedException {
+ createCapturerAndRender(testObjectFactory.getNameOfFrontFacingDevice());
+ }
+
+ public void createBackFacingCapturerAndRender() throws InterruptedException {
+ createCapturerAndRender(testObjectFactory.getNameOfBackFacingDevice());
+ }
+
+ public void switchCamera() throws InterruptedException {
+ switchCamera(false /* specifyCameraName */);
+ }
+
+ public void switchCamera(boolean specifyCameraName) throws InterruptedException {
+ if (!testObjectFactory.haveTwoCameras()) {
+ Logging.w(
+ TAG, "Skipping test switch video capturer because the device doesn't have two cameras.");
+ return;
+ }
+
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ // Wait for the camera to start so we can switch it
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+
+ // Array with one element to avoid final problem in nested classes.
+ final boolean[] cameraSwitchSuccessful = new boolean[1];
+ final CountDownLatch barrier = new CountDownLatch(1);
+ final CameraVideoCapturer.CameraSwitchHandler cameraSwitchHandler =
+ new CameraVideoCapturer.CameraSwitchHandler() {
+ @Override
+ public void onCameraSwitchDone(boolean isFrontCamera) {
+ cameraSwitchSuccessful[0] = true;
+ barrier.countDown();
+ }
+ @Override
+ public void onCameraSwitchError(String errorDescription) {
+ cameraSwitchSuccessful[0] = false;
+ barrier.countDown();
+ }
+ };
+ if (specifyCameraName) {
+ String expectedCameraName = testObjectFactory.cameraEnumerator.getDeviceNames()[1];
+ capturerInstance.capturer.switchCamera(cameraSwitchHandler, expectedCameraName);
+ } else {
+ capturerInstance.capturer.switchCamera(cameraSwitchHandler);
+ }
+ // Wait until the camera has been switched.
+ barrier.await();
+
+ // Check result.
+ assertTrue(cameraSwitchSuccessful[0]);
+ // Ensure that frames are received.
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ }
+
+ public void cameraEventsInvoked() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+ startCapture(capturerInstance);
+ // Make sure camera is started and first frame is received and then stop it.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.observer.waitForNextCapturedFrame();
+ disposeCapturer(capturerInstance);
+
+ assertTrue(capturerInstance.cameraEvents.onCameraOpeningCalled);
+ assertTrue(capturerInstance.cameraEvents.onFirstFrameAvailableCalled);
+ }
+
+ public void cameraCallsAfterStop() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+ startCapture(capturerInstance);
+ // Make sure camera is started and then stop it.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.capturer.stopCapture();
+ capturerInstance.observer.releaseFrame();
+
+ // We can't change `capturer` at this point, but we should not crash.
+ capturerInstance.capturer.switchCamera(null /* switchEventsHandler */);
+ capturerInstance.capturer.changeCaptureFormat(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
+
+ disposeCapturer(capturerInstance);
+ }
+
+ public void stopRestartVideoSource() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
+
+ capturerInstance.capturer.stopCapture();
+ assertEquals(MediaSource.State.ENDED, videoTrackWithRenderer.source.state());
+
+ startCapture(capturerInstance);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
+
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ }
+
+ public void startStopWithDifferentResolutions() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+
+ for (int i = 0; i < 3; ++i) {
+ startCapture(capturerInstance, i);
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.observer.waitForNextCapturedFrame();
+
+ // Check the frame size. The actual width and height depend on how the capturer is mounted.
+ final boolean identicalResolution =
+ (capturerInstance.observer.frameWidth() == capturerInstance.format.width
+ && capturerInstance.observer.frameHeight() == capturerInstance.format.height);
+ final boolean flippedResolution =
+ (capturerInstance.observer.frameWidth() == capturerInstance.format.height
+ && capturerInstance.observer.frameHeight() == capturerInstance.format.width);
+ if (!identicalResolution && !flippedResolution) {
+ fail("Wrong resolution, got: " + capturerInstance.observer.frameWidth() + "x"
+ + capturerInstance.observer.frameHeight() + " expected: "
+ + capturerInstance.format.width + "x" + capturerInstance.format.height + " or "
+ + capturerInstance.format.height + "x" + capturerInstance.format.width);
+ }
+
+ capturerInstance.capturer.stopCapture();
+ capturerInstance.observer.releaseFrame();
+ }
+ disposeCapturer(capturerInstance);
+ }
+
+ public void returnBufferLate() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+ startCapture(capturerInstance);
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+
+ capturerInstance.observer.waitForNextCapturedFrame();
+ capturerInstance.capturer.stopCapture();
+ List<Long> listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
+ assertTrue(listOftimestamps.size() >= 1);
+
+ startCapture(capturerInstance, 1);
+ capturerInstance.observer.waitForCapturerToStart();
+ capturerInstance.observer.releaseFrame();
+
+ capturerInstance.observer.waitForNextCapturedFrame();
+ capturerInstance.capturer.stopCapture();
+
+ listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
+ assertTrue(listOftimestamps.size() >= 1);
+
+ disposeCapturer(capturerInstance);
+ }
+
+ public void returnBufferLateEndToEnd() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
+ // Wait for at least one frame that has not been returned.
+ assertFalse(videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames().isEmpty());
+
+ capturerInstance.capturer.stopCapture();
+
+ // Dispose everything.
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+
+ // Return the frame(s), on a different thread out of spite.
+ final List<VideoFrame> pendingFrames =
+ videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames();
+ final Thread returnThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ for (VideoFrame frame : pendingFrames) {
+ frame.release();
+ }
+ }
+ });
+ returnThread.start();
+ returnThread.join();
+ }
+
+ public void cameraFreezedEventOnBufferStarvation() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+ startCapture(capturerInstance);
+ // Make sure camera is started.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ // Since we don't return the buffer, we should get a starvation message if we are
+ // capturing to a texture.
+ assertEquals("Camera failure. Client must return video buffers.",
+ capturerInstance.cameraEvents.waitForCameraFreezed());
+
+ capturerInstance.capturer.stopCapture();
+ disposeCapturer(capturerInstance);
+ }
+
+ public void scaleCameraOutput() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+
+ final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth();
+ final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight();
+ final int frameRate = 30;
+ final int scaledWidth = startWidth / 2;
+ final int scaledHeight = startHeight / 2;
+
+ // Request the captured frames to be scaled.
+ videoTrackWithRenderer.source.adaptOutputFormat(scaledWidth, scaledHeight, frameRate);
+
+ boolean gotExpectedResolution = false;
+ int numberOfInspectedFrames = 0;
+
+ do {
+ videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
+ ++numberOfInspectedFrames;
+
+ gotExpectedResolution = (videoTrackWithRenderer.rendererCallbacks.frameWidth() == scaledWidth
+ && videoTrackWithRenderer.rendererCallbacks.frameHeight() == scaledHeight);
+ } while (!gotExpectedResolution && numberOfInspectedFrames < 30);
+
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+
+ assertTrue(gotExpectedResolution);
+ }
+
+ public void cropCameraOutput() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+
+ final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth();
+ final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight();
+ final int frameRate = 30;
+ final int cropWidth;
+ final int cropHeight;
+ if (startWidth > startHeight) {
+ // Landscape input, request portrait output.
+ cropWidth = 360;
+ cropHeight = 640;
+ } else {
+ // Portrait input, request landscape output.
+ cropWidth = 640;
+ cropHeight = 630;
+ }
+
+ // Request different output orientation than input.
+ videoTrackWithRenderer.source.adaptOutputFormat(
+ cropWidth, cropHeight, cropWidth, cropHeight, frameRate);
+
+ boolean gotExpectedOrientation = false;
+ int numberOfInspectedFrames = 0;
+
+ do {
+ videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
+ ++numberOfInspectedFrames;
+
+ gotExpectedOrientation = (cropWidth > cropHeight)
+ == (videoTrackWithRenderer.rendererCallbacks.frameWidth()
+ > videoTrackWithRenderer.rendererCallbacks.frameHeight());
+ } while (!gotExpectedOrientation && numberOfInspectedFrames < 30);
+
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+
+ assertTrue(gotExpectedOrientation);
+ }
+
+ public void startWhileCameraIsAlreadyOpen() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
+
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ startCapture(capturerInstance);
+
+ if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
+ // The first opened camera client will be evicted.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ } else {
+ assertFalse(capturerInstance.observer.waitForCapturerToStart());
+ }
+
+ testObjectFactory.rawCloseCamera(competingCamera);
+ disposeCapturer(capturerInstance);
+ }
+
+ public void startWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName, false /* initialize */);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening competing camera.");
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening camera.");
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ waitUntilIdle(capturerInstance);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Closing competing camera.");
+ testObjectFactory.rawCloseCamera(competingCamera);
+
+ // Make sure camera is started and first frame is received and then stop it.
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Waiting for capture to start.");
+ videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Stopping capture.");
+ disposeCapturer(capturerInstance);
+ }
+
+ public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
+
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ startCapture(capturerInstance);
+ disposeCapturer(capturerInstance);
+
+ testObjectFactory.rawCloseCamera(competingCamera);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java
new file mode 100644
index 0000000000..9721cbd818
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+
+import androidx.annotation.Nullable;
+import androidx.test.filters.SmallTest;
+import java.util.ArrayList;
+import java.util.HashMap;
+import org.junit.Before;
+import org.junit.Test;
+
+/** Unit tests for {@link DefaultVideoEncoderFactory}. */
+public class DefaultVideoEncoderFactoryTest {
+ static class CustomHardwareVideoEncoderFactory implements VideoEncoderFactory {
+ private ArrayList<VideoCodecInfo> codecs = new ArrayList<>();
+
+ public CustomHardwareVideoEncoderFactory(boolean includeVP8, boolean includeH264High) {
+ if (includeVP8) {
+ codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
+ }
+ codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
+
+ HashMap<String, String> baselineParams = new HashMap<String, String>();
+ baselineParams.put("profile-level-id", "42e01f");
+ baselineParams.put("level-asymmetry-allowed", "1");
+ baselineParams.put("packetization-mode", "1");
+ codecs.add(new VideoCodecInfo("H264", baselineParams));
+
+ if (includeH264High) {
+ HashMap<String, String> highParams = new HashMap<String, String>();
+ highParams.put("profile-level-id", "640c1f");
+ highParams.put("level-asymmetry-allowed", "1");
+ highParams.put("packetization-mode", "1");
+ codecs.add(new VideoCodecInfo("H264", highParams));
+ }
+ }
+
+ @Override
+ public @Nullable VideoEncoder createEncoder(VideoCodecInfo info) {
+ return null;
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ return codecs.toArray(new VideoCodecInfo[codecs.size()]);
+ }
+ }
+
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ @SmallTest
+ @Test
+ public void testGetSupportedCodecsWithHardwareH264HighProfile() {
+ VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(true, true);
+ DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
+ VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
+ assertEquals(5, videoCodecs.length);
+ assertEquals("VP8", videoCodecs[0].name);
+ assertEquals("VP9", videoCodecs[1].name);
+ assertEquals("AV1", videoCodecs[2].name);
+ assertEquals("H264", videoCodecs[3].name);
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
+ assertEquals("H264", videoCodecs[4].name);
+ assertEquals("640c1f", videoCodecs[4].params.get("profile-level-id"));
+ }
+
+ @SmallTest
+ @Test
+ public void testGetSupportedCodecsWithoutHardwareH264HighProfile() {
+ VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(true, false);
+ DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
+ VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
+ assertEquals(4, videoCodecs.length);
+ assertEquals("VP8", videoCodecs[0].name);
+ assertEquals("VP9", videoCodecs[1].name);
+ assertEquals("AV1", videoCodecs[2].name);
+ assertEquals("H264", videoCodecs[3].name);
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
+ }
+
+ @SmallTest
+ @Test
+ public void testGetSupportedCodecsWithoutHardwareVP8() {
+ VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(false, true);
+ DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
+ VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
+ assertEquals(5, videoCodecs.length);
+ assertEquals("VP8", videoCodecs[0].name);
+ assertEquals("VP9", videoCodecs[1].name);
+ assertEquals("AV1", videoCodecs[2].name);
+ assertEquals("H264", videoCodecs[3].name);
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
+ assertEquals("H264", videoCodecs[4].name);
+ assertEquals("640c1f", videoCodecs[4].params.get("profile-level-id"));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java
new file mode 100644
index 0000000000..8b5e95b855
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java
@@ -0,0 +1,366 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.graphics.Bitmap;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.concurrent.CountDownLatch;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+// EmptyActivity is needed for the surface.
+public class EglRendererTest {
+ private final static String TAG = "EglRendererTest";
+ private final static int RENDER_WAIT_MS = 1000;
+ private final static int SURFACE_WAIT_MS = 1000;
+ private final static int TEST_FRAME_WIDTH = 4;
+ private final static int TEST_FRAME_HEIGHT = 4;
+ private final static int REMOVE_FRAME_LISTENER_RACY_NUM_TESTS = 10;
+ // Some arbitrary frames.
+ private final static byte[][][] TEST_FRAMES_DATA = {
+ {
+ new byte[] {
+ -99, -93, -88, -83, -78, -73, -68, -62, -56, -52, -46, -41, -36, -31, -26, -20},
+ new byte[] {110, 113, 116, 118}, new byte[] {31, 45, 59, 73},
+ },
+ {
+ new byte[] {
+ -108, -103, -98, -93, -87, -82, -77, -72, -67, -62, -56, -50, -45, -40, -35, -30},
+ new byte[] {120, 123, 125, -127}, new byte[] {87, 100, 114, 127},
+ },
+ {
+ new byte[] {
+ -117, -112, -107, -102, -97, -92, -87, -81, -75, -71, -65, -60, -55, -50, -44, -39},
+ new byte[] {113, 116, 118, 120}, new byte[] {45, 59, 73, 87},
+ },
+ };
+ private final static ByteBuffer[][] TEST_FRAMES =
+ copyTestDataToDirectByteBuffers(TEST_FRAMES_DATA);
+
+ private static class TestFrameListener implements EglRenderer.FrameListener {
+ final private ArrayList<Bitmap> bitmaps = new ArrayList<Bitmap>();
+ boolean bitmapReceived;
+ Bitmap storedBitmap;
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrame(Bitmap bitmap) {
+ if (bitmapReceived) {
+ fail("Unexpected bitmap was received.");
+ }
+
+ bitmapReceived = true;
+ storedBitmap = bitmap;
+ notify();
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized boolean waitForBitmap(int timeoutMs) throws InterruptedException {
+ final long endTimeMs = System.currentTimeMillis() + timeoutMs;
+ while (!bitmapReceived) {
+ final long waitTimeMs = endTimeMs - System.currentTimeMillis();
+ if (waitTimeMs < 0) {
+ return false;
+ }
+ wait(timeoutMs);
+ }
+ return true;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized Bitmap resetAndGetBitmap() {
+ bitmapReceived = false;
+ return storedBitmap;
+ }
+ }
+
+ final TestFrameListener testFrameListener = new TestFrameListener();
+
+ EglRenderer eglRenderer;
+ CountDownLatch surfaceReadyLatch = new CountDownLatch(1);
+ int oesTextureId;
+ SurfaceTexture surfaceTexture;
+
+ @Before
+ public void setUp() throws Exception {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ eglRenderer = new EglRenderer("TestRenderer: ");
+ eglRenderer.init(null /* sharedContext */, EglBase.CONFIG_RGBA, new GlRectDrawer());
+ oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ surfaceTexture = new SurfaceTexture(oesTextureId);
+ surfaceTexture.setDefaultBufferSize(1 /* width */, 1 /* height */);
+ eglRenderer.createEglSurface(surfaceTexture);
+ }
+
+ @After
+ public void tearDown() {
+ surfaceTexture.release();
+ GLES20.glDeleteTextures(1 /* n */, new int[] {oesTextureId}, 0 /* offset */);
+ eglRenderer.release();
+ }
+
+ /** Checks the bitmap is not null and the correct size. */
+ private static void checkBitmap(Bitmap bitmap, float scale) {
+ assertNotNull(bitmap);
+ assertEquals((int) (TEST_FRAME_WIDTH * scale), bitmap.getWidth());
+ assertEquals((int) (TEST_FRAME_HEIGHT * scale), bitmap.getHeight());
+ }
+
+ /**
+ * Does linear sampling on U/V plane of test data.
+ *
+ * @param data Plane data to be sampled from.
+ * @param planeWidth Width of the plane data. This is also assumed to be the stride.
+ * @param planeHeight Height of the plane data.
+ * @param x X-coordinate in range [0, 1].
+ * @param y Y-coordinate in range [0, 1].
+ */
+ private static float linearSample(
+ ByteBuffer plane, int planeWidth, int planeHeight, float x, float y) {
+ final int stride = planeWidth;
+
+ final float coordX = x * planeWidth;
+ final float coordY = y * planeHeight;
+
+ int lowIndexX = (int) Math.floor(coordX - 0.5f);
+ int lowIndexY = (int) Math.floor(coordY - 0.5f);
+ int highIndexX = lowIndexX + 1;
+ int highIndexY = lowIndexY + 1;
+
+ final float highWeightX = coordX - lowIndexX - 0.5f;
+ final float highWeightY = coordY - lowIndexY - 0.5f;
+ final float lowWeightX = 1f - highWeightX;
+ final float lowWeightY = 1f - highWeightY;
+
+ // Clamp on the edges.
+ lowIndexX = Math.max(0, lowIndexX);
+ lowIndexY = Math.max(0, lowIndexY);
+ highIndexX = Math.min(planeWidth - 1, highIndexX);
+ highIndexY = Math.min(planeHeight - 1, highIndexY);
+
+ float lowYValue = (plane.get(lowIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
+ + (plane.get(lowIndexY * stride + highIndexX) & 0xFF) * highWeightX;
+ float highYValue = (plane.get(highIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
+ + (plane.get(highIndexY * stride + highIndexX) & 0xFF) * highWeightX;
+
+ return lowWeightY * lowYValue + highWeightY * highYValue;
+ }
+
+ private static byte saturatedFloatToByte(float c) {
+ return (byte) Math.round(255f * Math.max(0f, Math.min(1f, c)));
+ }
+
+ /**
+ * Converts test data YUV frame to expected RGBA frame. Tries to match the behavior of OpenGL
+ * YUV drawer shader. Does linear sampling on the U- and V-planes.
+ *
+ * @param yuvFrame Array of size 3 containing Y-, U-, V-planes for image of size
+ * (TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT). U- and V-planes should be half the size
+ * of the Y-plane.
+ */
+ private static byte[] convertYUVFrameToRGBA(ByteBuffer[] yuvFrame) {
+ final byte[] argbFrame = new byte[TEST_FRAME_WIDTH * TEST_FRAME_HEIGHT * 4];
+ final int argbStride = TEST_FRAME_WIDTH * 4;
+ final int yStride = TEST_FRAME_WIDTH;
+
+ final int vStride = TEST_FRAME_WIDTH / 2;
+
+ for (int y = 0; y < TEST_FRAME_HEIGHT; y++) {
+ for (int x = 0; x < TEST_FRAME_WIDTH; x++) {
+ final float yC = ((yuvFrame[0].get(y * yStride + x) & 0xFF) - 16f) / 219f;
+ final float uC = (linearSample(yuvFrame[1], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
+ (x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
+ - 16f)
+ / 224f
+ - 0.5f;
+ final float vC = (linearSample(yuvFrame[2], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
+ (x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
+ - 16f)
+ / 224f
+ - 0.5f;
+ final float rC = yC + 1.403f * vC;
+ final float gC = yC - 0.344f * uC - 0.714f * vC;
+ final float bC = yC + 1.77f * uC;
+
+ argbFrame[y * argbStride + x * 4 + 0] = saturatedFloatToByte(rC);
+ argbFrame[y * argbStride + x * 4 + 1] = saturatedFloatToByte(gC);
+ argbFrame[y * argbStride + x * 4 + 2] = saturatedFloatToByte(bC);
+ argbFrame[y * argbStride + x * 4 + 3] = (byte) 255;
+ }
+ }
+
+ return argbFrame;
+ }
+
+ /** Checks that the bitmap content matches the test frame with the given index. */
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ private static void checkBitmapContent(Bitmap bitmap, int frame) {
+ checkBitmap(bitmap, 1f);
+
+ byte[] expectedRGBA = convertYUVFrameToRGBA(TEST_FRAMES[frame]);
+ ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(bitmap.getByteCount());
+ bitmap.copyPixelsToBuffer(bitmapBuffer);
+
+ for (int i = 0; i < expectedRGBA.length; i++) {
+ int expected = expectedRGBA[i] & 0xFF;
+ int value = bitmapBuffer.get(i) & 0xFF;
+ // Due to unknown conversion differences check value matches +-1.
+ if (Math.abs(value - expected) > 1) {
+ Logging.d(TAG, "Expected bitmap content: " + Arrays.toString(expectedRGBA));
+ Logging.d(TAG, "Bitmap content: " + Arrays.toString(bitmapBuffer.array()));
+ fail("Frame doesn't match original frame on byte " + i + ". Expected: " + expected
+ + " Result: " + value);
+ }
+ }
+ }
+
+ /** Tells eglRenderer to render test frame with given index. */
+ private void feedFrame(int i) {
+ final VideoFrame.I420Buffer buffer = JavaI420Buffer.wrap(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT,
+ TEST_FRAMES[i][0], TEST_FRAME_WIDTH, TEST_FRAMES[i][1], TEST_FRAME_WIDTH / 2,
+ TEST_FRAMES[i][2], TEST_FRAME_WIDTH / 2, null /* releaseCallback */);
+ final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, 0 /* timestamp */);
+ eglRenderer.onFrame(frame);
+ frame.release();
+ }
+
+ @Test
+ @SmallTest
+ public void testAddFrameListener() throws Exception {
+ eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
+ feedFrame(0);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ assertNull(testFrameListener.resetAndGetBitmap());
+ eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
+ feedFrame(1);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ assertNull(testFrameListener.resetAndGetBitmap());
+ feedFrame(2);
+ // Check we get no more bitmaps than two.
+ assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ }
+
+ @Test
+ @SmallTest
+ public void testAddFrameListenerBitmap() throws Exception {
+ eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
+ feedFrame(0);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
+ eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
+ feedFrame(1);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
+ }
+
+ @Test
+ @SmallTest
+ public void testAddFrameListenerBitmapScale() throws Exception {
+ for (int i = 0; i < 3; ++i) {
+ float scale = i * 0.5f + 0.5f;
+ eglRenderer.addFrameListener(testFrameListener, scale);
+ feedFrame(i);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmap(testFrameListener.resetAndGetBitmap(), scale);
+ }
+ }
+
+ /**
+ * Checks that the frame listener will not be called with a frame that was delivered before the
+ * frame listener was added.
+ */
+ @Test
+ @SmallTest
+ public void testFrameListenerNotCalledWithOldFrames() throws Exception {
+ feedFrame(0);
+ eglRenderer.addFrameListener(testFrameListener, 0f);
+ // Check the old frame does not trigger frame listener.
+ assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ }
+
+ /** Checks that the frame listener will not be called after it is removed. */
+ @Test
+ @SmallTest
+ public void testRemoveFrameListenerNotRacy() throws Exception {
+ for (int i = 0; i < REMOVE_FRAME_LISTENER_RACY_NUM_TESTS; i++) {
+ feedFrame(0);
+ eglRenderer.addFrameListener(testFrameListener, 0f);
+ eglRenderer.removeFrameListener(testFrameListener);
+ feedFrame(1);
+ }
+ // Check the frame listener hasn't triggered.
+ assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ }
+
+ @Test
+ @SmallTest
+ public void testFrameListenersFpsReduction() throws Exception {
+ // Test that normal frame listeners receive frames while the renderer is paused.
+ eglRenderer.pauseVideo();
+ eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
+ feedFrame(0);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
+
+ // Test that frame listeners with FPS reduction applied receive frames while the renderer is not
+ // paused.
+ eglRenderer.disableFpsReduction();
+ eglRenderer.addFrameListener(
+ testFrameListener, 1f /* scaleFactor */, null, true /* applyFpsReduction */);
+ feedFrame(1);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
+
+ // Test that frame listeners with FPS reduction applied will not receive frames while the
+ // renderer is paused.
+ eglRenderer.pauseVideo();
+ eglRenderer.addFrameListener(
+ testFrameListener, 1f /* scaleFactor */, null, true /* applyFpsReduction */);
+ feedFrame(1);
+ assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ }
+
+ private static ByteBuffer[][] copyTestDataToDirectByteBuffers(byte[][][] testData) {
+ final ByteBuffer[][] result = new ByteBuffer[testData.length][];
+
+ for (int i = 0; i < testData.length; i++) {
+ result[i] = new ByteBuffer[testData[i].length];
+ for (int j = 0; j < testData[i].length; j++) {
+ result[i][j] = ByteBuffer.allocateDirect(testData[i][j].length);
+ result[i][j].put(testData[i][j]);
+ result[i][j].rewind();
+ }
+ }
+ return result;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java
new file mode 100644
index 0000000000..8584ddf464
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import android.os.Environment;
+import androidx.test.filters.SmallTest;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import org.junit.Before;
+import org.junit.Test;
+
+public class FileVideoCapturerTest {
+ public static class MockCapturerObserver implements CapturerObserver {
+ private final ArrayList<VideoFrame> frames = new ArrayList<VideoFrame>();
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ assertTrue(success);
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ // Empty on purpose.
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrameCaptured(VideoFrame frame) {
+ frame.retain();
+ frames.add(frame);
+ notify();
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized ArrayList<VideoFrame> getMinimumFramesBlocking(int minFrames)
+ throws InterruptedException {
+ while (frames.size() < minFrames) {
+ wait();
+ }
+ return new ArrayList<VideoFrame>(frames);
+ }
+ }
+
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testVideoCaptureFromFile() throws InterruptedException, IOException {
+ final int FRAME_WIDTH = 4;
+ final int FRAME_HEIGHT = 4;
+ final int FRAME_CHROMA_WIDTH = (FRAME_WIDTH + 1) / 2;
+ final int FRAME_CHROMA_HEIGHT = (FRAME_HEIGHT + 1) / 2;
+ final int FRAME_SIZE_Y = FRAME_WIDTH * FRAME_HEIGHT;
+ final int FRAME_SIZE_CHROMA = FRAME_CHROMA_WIDTH * FRAME_CHROMA_HEIGHT;
+
+ final FileVideoCapturer fileVideoCapturer =
+ new FileVideoCapturer(Environment.getExternalStorageDirectory().getPath()
+ + "/chromium_tests_root/sdk/android/instrumentationtests/src/org/webrtc/"
+ + "capturetestvideo.y4m");
+ final MockCapturerObserver capturerObserver = new MockCapturerObserver();
+ fileVideoCapturer.initialize(
+ null /* surfaceTextureHelper */, null /* applicationContext */, capturerObserver);
+ fileVideoCapturer.startCapture(FRAME_WIDTH, FRAME_HEIGHT, 33 /* fps */);
+
+ final String[] expectedFrames = {
+ "THIS IS JUST SOME TEXT x", "THE SECOND FRAME qwerty.", "HERE IS THE THRID FRAME!"};
+
+ final ArrayList<VideoFrame> frames =
+ capturerObserver.getMinimumFramesBlocking(expectedFrames.length);
+ assertEquals(expectedFrames.length, frames.size());
+
+ fileVideoCapturer.stopCapture();
+ fileVideoCapturer.dispose();
+
+ // Check the content of the frames.
+ for (int i = 0; i < expectedFrames.length; ++i) {
+ final VideoFrame frame = frames.get(i);
+ final VideoFrame.Buffer buffer = frame.getBuffer();
+ assertTrue(buffer instanceof VideoFrame.I420Buffer);
+ final VideoFrame.I420Buffer i420Buffer = (VideoFrame.I420Buffer) buffer;
+
+ assertEquals(FRAME_WIDTH, i420Buffer.getWidth());
+ assertEquals(FRAME_HEIGHT, i420Buffer.getHeight());
+
+ final ByteBuffer dataY = i420Buffer.getDataY();
+ final ByteBuffer dataU = i420Buffer.getDataU();
+ final ByteBuffer dataV = i420Buffer.getDataV();
+
+ assertEquals(FRAME_SIZE_Y, dataY.remaining());
+ assertEquals(FRAME_SIZE_CHROMA, dataU.remaining());
+ assertEquals(FRAME_SIZE_CHROMA, dataV.remaining());
+
+ ByteBuffer frameContents = ByteBuffer.allocate(FRAME_SIZE_Y + 2 * FRAME_SIZE_CHROMA);
+ frameContents.put(dataY);
+ frameContents.put(dataU);
+ frameContents.put(dataV);
+ frameContents.rewind(); // Move back to the beginning.
+
+ assertByteBufferContents(
+ expectedFrames[i].getBytes(Charset.forName("US-ASCII")), frameContents);
+ frame.release();
+ }
+ }
+
+ private static void assertByteBufferContents(byte[] expected, ByteBuffer actual) {
+ assertEquals("Unexpected ByteBuffer size.", expected.length, actual.remaining());
+ for (int i = 0; i < expected.length; i++) {
+ assertEquals("Unexpected byte at index: " + i, expected[i], actual.get());
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java
new file mode 100644
index 0000000000..4cee3bdf71
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java
@@ -0,0 +1,318 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.opengl.GLES20;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.Random;
+import org.junit.Test;
+
+public class GlRectDrawerTest {
+ // Resolution of the test image.
+ private static final int WIDTH = 16;
+ private static final int HEIGHT = 16;
+ // Seed for random pixel creation.
+ private static final int SEED = 42;
+ // When comparing pixels, allow some slack for float arithmetic and integer rounding.
+ private static final float MAX_DIFF = 1.5f;
+
+ // clang-format off
+ private static final float[] IDENTITY_MATRIX = {
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1};
+ // clang-format on
+
+ private static float normalizedByte(byte b) {
+ return (b & 0xFF) / 255.0f;
+ }
+
+ private static float saturatedConvert(float c) {
+ return 255.0f * Math.max(0, Math.min(c, 1));
+ }
+
+ // Assert RGB ByteBuffers are pixel perfect identical.
+ private static void assertByteBufferEquals(
+ int width, int height, ByteBuffer actual, ByteBuffer expected) {
+ actual.rewind();
+ expected.rewind();
+ assertEquals(actual.remaining(), width * height * 3);
+ assertEquals(expected.remaining(), width * height * 3);
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final int actualR = actual.get() & 0xFF;
+ final int actualG = actual.get() & 0xFF;
+ final int actualB = actual.get() & 0xFF;
+ final int expectedR = expected.get() & 0xFF;
+ final int expectedG = expected.get() & 0xFF;
+ final int expectedB = expected.get() & 0xFF;
+ if (actualR != expectedR || actualG != expectedG || actualB != expectedB) {
+ fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
+ + "(" + x + ", " + y + "). Expected color (R,G,B): "
+ + "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
+ + " but was: "
+ + "(" + actualR + ", " + actualG + ", " + actualB + ").");
+ }
+ }
+ }
+ }
+
+ // Convert RGBA ByteBuffer to RGB ByteBuffer.
+ private static ByteBuffer stripAlphaChannel(ByteBuffer rgbaBuffer) {
+ rgbaBuffer.rewind();
+ assertEquals(rgbaBuffer.remaining() % 4, 0);
+ final int numberOfPixels = rgbaBuffer.remaining() / 4;
+ final ByteBuffer rgbBuffer = ByteBuffer.allocateDirect(numberOfPixels * 3);
+ while (rgbaBuffer.hasRemaining()) {
+ // Copy RGB.
+ for (int channel = 0; channel < 3; ++channel) {
+ rgbBuffer.put(rgbaBuffer.get());
+ }
+ // Drop alpha.
+ rgbaBuffer.get();
+ }
+ return rgbBuffer;
+ }
+
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ @Test
+ @SmallTest
+ public void testRgbRendering() {
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+ eglBase.makeCurrent();
+
+ // Create RGB byte buffer plane with random content.
+ final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
+ final Random random = new Random(SEED);
+ random.nextBytes(rgbPlane.array());
+
+ // Upload the RGB byte buffer data as a texture.
+ final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
+ GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+ GlUtil.checkNoGLES2Error("glTexImage2D");
+
+ // Draw the RGB frame onto the pixel buffer.
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawRgb(rgbTexture, IDENTITY_MATRIX, WIDTH, HEIGHT, 0 /* viewportX */, 0 /* viewportY */,
+ WIDTH, HEIGHT);
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is pixel perfect to source RGB.
+ assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
+
+ drawer.release();
+ GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
+ eglBase.release();
+ }
+
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ @Test
+ @SmallTest
+ public void testYuvRendering() {
+ // Create EGL base with a pixel buffer as display output.
+ EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+ eglBase.makeCurrent();
+
+ // Create YUV byte buffer planes with random content.
+ final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+ final Random random = new Random(SEED);
+ for (int i = 0; i < 3; ++i) {
+ yuvPlanes[i] = ByteBuffer.allocateDirect(WIDTH * HEIGHT);
+ random.nextBytes(yuvPlanes[i].array());
+ }
+
+ // Generate 3 texture ids for Y/U/V.
+ final int yuvTextures[] = new int[3];
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+
+ // Upload the YUV byte buffer data as textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH, HEIGHT, 0,
+ GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
+ GlUtil.checkNoGLES2Error("glTexImage2D");
+ }
+
+ // Draw the YUV frame onto the pixel buffer.
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawYuv(yuvTextures, IDENTITY_MATRIX, WIDTH, HEIGHT, 0 /* viewportX */,
+ 0 /* viewportY */, WIDTH, HEIGHT);
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Compare the YUV data with the RGBA result.
+ for (int y = 0; y < HEIGHT; ++y) {
+ for (int x = 0; x < WIDTH; ++x) {
+ // YUV color space. Y in [0, 1], UV in [-0.5, 0.5]. The constants are taken from the YUV
+ // fragment shader code in GlGenericDrawer.
+ final float y_luma = normalizedByte(yuvPlanes[0].get());
+ final float u_chroma = normalizedByte(yuvPlanes[1].get());
+ final float v_chroma = normalizedByte(yuvPlanes[2].get());
+ // Expected color in unrounded RGB [0.0f, 255.0f].
+ final float expectedRed =
+ saturatedConvert(1.16438f * y_luma + 1.59603f * v_chroma - 0.874202f);
+ final float expectedGreen = saturatedConvert(
+ 1.16438f * y_luma - 0.391762f * u_chroma - 0.812968f * v_chroma + 0.531668f);
+ final float expectedBlue =
+ saturatedConvert(1.16438f * y_luma + 2.01723f * u_chroma - 1.08563f);
+
+ // Actual color in RGB8888.
+ final int actualRed = data.get() & 0xFF;
+ final int actualGreen = data.get() & 0xFF;
+ final int actualBlue = data.get() & 0xFF;
+ final int actualAlpha = data.get() & 0xFF;
+
+ // Assert rendered image is close to pixel perfect from source YUV.
+ assertTrue(Math.abs(actualRed - expectedRed) < MAX_DIFF);
+ assertTrue(Math.abs(actualGreen - expectedGreen) < MAX_DIFF);
+ assertTrue(Math.abs(actualBlue - expectedBlue) < MAX_DIFF);
+ assertEquals(actualAlpha, 255);
+ }
+ }
+
+ drawer.release();
+ GLES20.glDeleteTextures(3, yuvTextures, 0);
+ eglBase.release();
+ }
+
+ /**
+ * The purpose here is to test GlRectDrawer.oesDraw(). Unfortunately, there is no easy way to
+ * create an OES texture, which is needed for input to oesDraw(). Most of the test is concerned
+ * with creating OES textures in the following way:
+ * - Create SurfaceTexture with help from SurfaceTextureHelper.
+ * - Create an EglBase with the SurfaceTexture as EGLSurface.
+ * - Upload RGB texture with known content.
+ * - Draw the RGB texture onto the EglBase with the SurfaceTexture as target.
+ * - Wait for an OES texture to be produced.
+ * The actual oesDraw() test is this:
+ * - Create an EglBase with a pixel buffer as target.
+ * - Render the OES texture onto the pixel buffer.
+ * - Read back the pixel buffer and compare it with the known RGB data.
+ */
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ @Test
+ @MediumTest
+ public void testOesRendering() throws InterruptedException {
+ /**
+ * Stub class to convert RGB ByteBuffers to OES textures by drawing onto a SurfaceTexture.
+ */
+ class StubOesTextureProducer {
+ private final EglBase eglBase;
+ private final GlRectDrawer drawer;
+ private final int rgbTexture;
+
+ public StubOesTextureProducer(EglBase.Context sharedContext,
+ SurfaceTextureHelper surfaceTextureHelper, int width, int height) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
+ surfaceTextureHelper.setTextureSize(width, height);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglBase.surfaceWidth(), width);
+ assertEquals(eglBase.surfaceHeight(), height);
+
+ drawer = new GlRectDrawer();
+
+ eglBase.makeCurrent();
+ rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+
+ public void draw(ByteBuffer rgbPlane) {
+ eglBase.makeCurrent();
+
+ // Upload RGB data to texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
+ GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+ // Draw the RGB data onto the SurfaceTexture.
+ drawer.drawRgb(rgbTexture, IDENTITY_MATRIX, WIDTH, HEIGHT, 0 /* viewportX */,
+ 0 /* viewportY */, WIDTH, HEIGHT);
+ eglBase.swapBuffers();
+ }
+
+ public void release() {
+ eglBase.makeCurrent();
+ drawer.release();
+ GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
+ eglBase.release();
+ }
+ }
+
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+
+ // Create resources for generating OES textures.
+ final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
+ final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
+ eglBase.getEglBaseContext(), surfaceTextureHelper, WIDTH, HEIGHT);
+ final SurfaceTextureHelperTest.MockTextureListener listener =
+ new SurfaceTextureHelperTest.MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+
+ // Create RGB byte buffer plane with random content.
+ final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
+ final Random random = new Random(SEED);
+ random.nextBytes(rgbPlane.array());
+
+ // Draw the frame and block until an OES texture is delivered.
+ oesProducer.draw(rgbPlane);
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+
+ // Real test starts here.
+ // Draw the OES texture on the pixel buffer.
+ eglBase.makeCurrent();
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawOes(textureBuffer.getTextureId(),
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(textureBuffer.getTransformMatrix()),
+ WIDTH, HEIGHT, 0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is pixel perfect to source RGB.
+ assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
+
+ drawer.release();
+ textureBuffer.release();
+ oesProducer.release();
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
new file mode 100644
index 0000000000..092d617270
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
@@ -0,0 +1,507 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.graphics.Matrix;
+import android.opengl.GLES11Ext;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+@RunWith(Parameterized.class)
+public class HardwareVideoEncoderTest {
+ @Parameters(name = "textures={0};eglContext={1}")
+ public static Collection<Object[]> parameters() {
+ return Arrays.asList(new Object[] {/*textures=*/false, /*eglContext=*/false},
+ new Object[] {/*textures=*/true, /*eglContext=*/false},
+ new Object[] {/*textures=*/true, /*eglContext=*/true});
+ }
+
+ private final boolean useTextures;
+ private final boolean useEglContext;
+
+ public HardwareVideoEncoderTest(boolean useTextures, boolean useEglContext) {
+ this.useTextures = useTextures;
+ this.useEglContext = useEglContext;
+ }
+
+ final static String TAG = "HwVideoEncoderTest";
+
+ private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
+ private static final boolean ENABLE_H264_HIGH_PROFILE = true;
+ private static final VideoEncoder.Settings SETTINGS =
+ new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */,
+ 30 /* fps */, 1 /* numberOfSimulcastStreams */, true /* automaticResizeOn */,
+ /* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */));
+ private static final int ENCODE_TIMEOUT_MS = 1000;
+ private static final int NUM_TEST_FRAMES = 10;
+ private static final int NUM_ENCODE_TRIES = 100;
+ private static final int ENCODE_RETRY_SLEEP_MS = 1;
+ private static final int PIXEL_ALIGNMENT_REQUIRED = 16;
+ private static final boolean APPLY_ALIGNMENT_TO_ALL_SIMULCAST_LAYERS = false;
+
+ // # Mock classes
+ /**
+ * Mock encoder callback that allows easy verification of the general properties of the encoded
+ * frame such as width and height. Also used from AndroidVideoDecoderInstrumentationTest.
+ */
+ static class MockEncoderCallback implements VideoEncoder.Callback {
+ private BlockingQueue<EncodedImage> frameQueue = new LinkedBlockingQueue<>();
+
+ @Override
+ public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info) {
+ assertNotNull(frame);
+ assertNotNull(info);
+
+ // Make a copy because keeping a reference to the buffer is not allowed.
+ final ByteBuffer bufferCopy = ByteBuffer.allocateDirect(frame.buffer.remaining());
+ bufferCopy.put(frame.buffer);
+ bufferCopy.rewind();
+
+ frameQueue.offer(EncodedImage.builder()
+ .setBuffer(bufferCopy, null)
+ .setEncodedWidth(frame.encodedWidth)
+ .setEncodedHeight(frame.encodedHeight)
+ .setCaptureTimeNs(frame.captureTimeNs)
+ .setFrameType(frame.frameType)
+ .setRotation(frame.rotation)
+ .setQp(frame.qp)
+ .createEncodedImage());
+ }
+
+ public EncodedImage poll() {
+ try {
+ EncodedImage image = frameQueue.poll(ENCODE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+ assertNotNull("Timed out waiting for the frame to be encoded.", image);
+ return image;
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public void assertFrameEncoded(VideoFrame frame) {
+ final VideoFrame.Buffer buffer = frame.getBuffer();
+ final EncodedImage image = poll();
+ assertTrue(image.buffer.capacity() > 0);
+ assertEquals(image.encodedWidth, buffer.getWidth());
+ assertEquals(image.encodedHeight, buffer.getHeight());
+ assertEquals(image.captureTimeNs, frame.getTimestampNs());
+ assertEquals(image.rotation, frame.getRotation());
+ }
+ }
+
+ /** A common base class for the texture and I420 buffer that implements reference counting. */
+ private static abstract class MockBufferBase implements VideoFrame.Buffer {
+ protected final int width;
+ protected final int height;
+ private final Runnable releaseCallback;
+ private final Object refCountLock = new Object();
+ private int refCount = 1;
+
+ public MockBufferBase(int width, int height, Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.releaseCallback = releaseCallback;
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public void retain() {
+ synchronized (refCountLock) {
+ assertTrue("Buffer retained after being destroyed.", refCount > 0);
+ ++refCount;
+ }
+ }
+
+ @Override
+ public void release() {
+ synchronized (refCountLock) {
+ assertTrue("Buffer released too many times.", --refCount >= 0);
+ if (refCount == 0) {
+ releaseCallback.run();
+ }
+ }
+ }
+ }
+
+ private static class MockTextureBuffer
+ extends MockBufferBase implements VideoFrame.TextureBuffer {
+ private final int textureId;
+
+ public MockTextureBuffer(int textureId, int width, int height, Runnable releaseCallback) {
+ super(width, height, releaseCallback);
+ this.textureId = textureId;
+ }
+
+ @Override
+ public VideoFrame.TextureBuffer.Type getType() {
+ return VideoFrame.TextureBuffer.Type.OES;
+ }
+
+ @Override
+ public int getTextureId() {
+ return textureId;
+ }
+
+ @Override
+ public Matrix getTransformMatrix() {
+ return new Matrix();
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ return JavaI420Buffer.allocate(width, height);
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ retain();
+ return new MockTextureBuffer(textureId, scaleWidth, scaleHeight, this ::release);
+ }
+ }
+
+ private static class MockI420Buffer extends MockBufferBase implements VideoFrame.I420Buffer {
+ private final JavaI420Buffer realBuffer;
+
+ public MockI420Buffer(int width, int height, Runnable releaseCallback) {
+ super(width, height, releaseCallback);
+ realBuffer = JavaI420Buffer.allocate(width, height);
+ }
+
+ @Override
+ public ByteBuffer getDataY() {
+ return realBuffer.getDataY();
+ }
+
+ @Override
+ public ByteBuffer getDataU() {
+ return realBuffer.getDataU();
+ }
+
+ @Override
+ public ByteBuffer getDataV() {
+ return realBuffer.getDataV();
+ }
+
+ @Override
+ public int getStrideY() {
+ return realBuffer.getStrideY();
+ }
+
+ @Override
+ public int getStrideU() {
+ return realBuffer.getStrideU();
+ }
+
+ @Override
+ public int getStrideV() {
+ return realBuffer.getStrideV();
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ retain();
+ return this;
+ }
+
+ @Override
+ public void retain() {
+ super.retain();
+ realBuffer.retain();
+ }
+
+ @Override
+ public void release() {
+ super.release();
+ realBuffer.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ return realBuffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ }
+ }
+
+ // # Test fields
+ private final Object referencedFramesLock = new Object();
+ private int referencedFrames;
+
+ private Runnable releaseFrameCallback = new Runnable() {
+ @Override
+ public void run() {
+ synchronized (referencedFramesLock) {
+ --referencedFrames;
+ }
+ }
+ };
+
+ private EglBase14 eglBase;
+ private long lastTimestampNs;
+
+ // # Helper methods
+ private VideoEncoderFactory createEncoderFactory(EglBase.Context eglContext) {
+ return new HardwareVideoEncoderFactory(
+ eglContext, ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
+ }
+
+ private @Nullable VideoEncoder createEncoder() {
+ VideoEncoderFactory factory =
+ createEncoderFactory(useEglContext ? eglBase.getEglBaseContext() : null);
+ VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
+ return factory.createEncoder(supportedCodecs[0]);
+ }
+
+ private VideoFrame generateI420Frame(int width, int height) {
+ synchronized (referencedFramesLock) {
+ ++referencedFrames;
+ }
+ lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate;
+ VideoFrame.Buffer buffer = new MockI420Buffer(width, height, releaseFrameCallback);
+ return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs);
+ }
+
+ private VideoFrame generateTextureFrame(int width, int height) {
+ synchronized (referencedFramesLock) {
+ ++referencedFrames;
+ }
+ final int textureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate;
+ VideoFrame.Buffer buffer =
+ new MockTextureBuffer(textureId, width, height, releaseFrameCallback);
+ return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs);
+ }
+
+ private VideoFrame generateFrame(int width, int height) {
+ return useTextures ? generateTextureFrame(width, height) : generateI420Frame(width, height);
+ }
+
+ static VideoCodecStatus testEncodeFrame(
+ VideoEncoder encoder, VideoFrame frame, VideoEncoder.EncodeInfo info) {
+ int numTries = 0;
+
+ // It takes a while for the encoder to become ready so try until it accepts the frame.
+ while (true) {
+ ++numTries;
+
+ final VideoCodecStatus returnValue = encoder.encode(frame, info);
+ switch (returnValue) {
+ case OK: // Success
+ // Fall through
+ case ERR_SIZE: // Wrong size
+ return returnValue;
+ case NO_OUTPUT:
+ if (numTries >= NUM_ENCODE_TRIES) {
+ fail("encoder.encode keeps returning NO_OUTPUT");
+ }
+ try {
+ Thread.sleep(ENCODE_RETRY_SLEEP_MS); // Try again.
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ break;
+ default:
+ fail("encoder.encode returned: " + returnValue); // Error
+ }
+ }
+ }
+
+ private static int getAlignedNumber(int number, int alignment) {
+ return (number / alignment) * alignment;
+ }
+
+ public static int getPixelAlignmentRequired() {
+ return PIXEL_ALIGNMENT_REQUIRED;
+ }
+
+ // # Tests
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+
+ eglBase = EglBase.createEgl14(EglBase.CONFIG_PLAIN);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+ lastTimestampNs = System.nanoTime();
+ }
+
+ @After
+ public void tearDown() {
+ eglBase.release();
+ synchronized (referencedFramesLock) {
+ assertEquals("All frames were not released", 0, referencedFrames);
+ }
+ }
+
+ @Test
+ @SmallTest
+ public void testInitialize() {
+ VideoEncoder encoder = createEncoder();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testEncode() {
+ VideoEncoder encoder = createEncoder();
+ MockEncoderCallback callback = new MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ for (int i = 0; i < NUM_TEST_FRAMES; i++) {
+ Log.d(TAG, "Test frame: " + i);
+ VideoFrame frame = generateFrame(SETTINGS.width, SETTINGS.height);
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+ testEncodeFrame(encoder, frame, info);
+
+ callback.assertFrameEncoded(frame);
+ frame.release();
+ }
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testEncodeAltenatingBuffers() {
+ VideoEncoder encoder = createEncoder();
+ MockEncoderCallback callback = new MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ for (int i = 0; i < NUM_TEST_FRAMES; i++) {
+ Log.d(TAG, "Test frame: " + i);
+ VideoFrame frame;
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+
+ frame = generateTextureFrame(SETTINGS.width, SETTINGS.height);
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+
+ frame = generateI420Frame(SETTINGS.width, SETTINGS.height);
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+ }
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testEncodeDifferentSizes() {
+ VideoEncoder encoder = createEncoder();
+ MockEncoderCallback callback = new MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ VideoFrame frame;
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+
+ frame = generateFrame(SETTINGS.width / 2, SETTINGS.height / 2);
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+
+ frame = generateFrame(SETTINGS.width, SETTINGS.height);
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+
+ // Android MediaCodec only guarantees of proper operation with 16-pixel-aligned input frame.
+ // Force the size of input frame with the greatest multiple of 16 below the original size.
+ frame = generateFrame(getAlignedNumber(SETTINGS.width / 4, PIXEL_ALIGNMENT_REQUIRED),
+ getAlignedNumber(SETTINGS.height / 4, PIXEL_ALIGNMENT_REQUIRED));
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testEncodeAlignmentCheck() {
+ VideoEncoder encoder = createEncoder();
+ org.webrtc.HardwareVideoEncoderTest.MockEncoderCallback callback =
+ new org.webrtc.HardwareVideoEncoderTest.MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ VideoFrame frame;
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+
+ frame = generateFrame(SETTINGS.width / 2, SETTINGS.height / 2);
+ assertEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info));
+ frame.release();
+
+ // Android MediaCodec only guarantees of proper operation with 16-pixel-aligned input frame.
+ // Following input frame with non-aligned size would return ERR_SIZE.
+ frame = generateFrame(SETTINGS.width / 4, SETTINGS.height / 4);
+ assertNotEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info));
+ frame.release();
+
+ // Since our encoder has returned with an error, we reinitialize the encoder.
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ frame = generateFrame(getAlignedNumber(SETTINGS.width / 4, PIXEL_ALIGNMENT_REQUIRED),
+ getAlignedNumber(SETTINGS.height / 4, PIXEL_ALIGNMENT_REQUIRED));
+ assertEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info));
+ frame.release();
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testGetEncoderInfo() {
+ VideoEncoder encoder = createEncoder();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
+ VideoEncoder.EncoderInfo info = encoder.getEncoderInfo();
+ assertEquals(PIXEL_ALIGNMENT_REQUIRED, info.getRequestedResolutionAlignment());
+ assertEquals(
+ APPLY_ALIGNMENT_TO_ALL_SIMULCAST_LAYERS, info.getApplyAlignmentToAllSimulcastLayers());
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/LoggableTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/LoggableTest.java
new file mode 100644
index 0000000000..780eeb6197
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/LoggableTest.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.util.ArrayList;
+import org.junit.Test;
+import org.webrtc.Loggable;
+import org.webrtc.Logging.Severity;
+import org.webrtc.PeerConnectionFactory;
+
+public class LoggableTest {
+ private static String TAG = "LoggableTest";
+ private static String NATIVE_FILENAME_TAG = "loggable_test.cc";
+
+ private static class MockLoggable implements Loggable {
+ private ArrayList<String> messages = new ArrayList<>();
+ private ArrayList<Severity> sevs = new ArrayList<>();
+ private ArrayList<String> tags = new ArrayList<>();
+
+ @Override
+ public void onLogMessage(String message, Severity sev, String tag) {
+ messages.add(message);
+ sevs.add(sev);
+ tags.add(tag);
+ }
+
+ public boolean isMessageReceived(String message) {
+ for (int i = 0; i < messages.size(); i++) {
+ if (messages.get(i).contains(message)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public boolean isMessageReceived(String message, Severity sev, String tag) {
+ for (int i = 0; i < messages.size(); i++) {
+ if (messages.get(i).contains(message) && sevs.get(i) == sev && tags.get(i).equals(tag)) {
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+ private final MockLoggable mockLoggable = new MockLoggable();
+
+ @Test
+ @SmallTest
+ public void testLoggableSetWithoutError() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @Test
+ @SmallTest
+ public void testMessageIsLoggedCorrectly() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should be logged";
+ Logging.d(TAG, msg);
+ assertTrue(mockLoggable.isMessageReceived(msg, Severity.LS_INFO, TAG));
+ }
+
+ @Test
+ @SmallTest
+ public void testLowSeverityIsFiltered() throws InterruptedException {
+ // Set severity to LS_WARNING to filter out LS_INFO and below.
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_WARNING)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should NOT be logged";
+ Logging.d(TAG, msg);
+ assertFalse(mockLoggable.isMessageReceived(msg));
+ }
+
+ @Test
+ @SmallTest
+ public void testLoggableDoesNotReceiveMessagesAfterUnsetting() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ // Reinitialize without Loggable
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should NOT be logged";
+ Logging.d(TAG, msg);
+ assertFalse(mockLoggable.isMessageReceived(msg));
+ }
+
+ @Test
+ @SmallTest
+ public void testNativeMessageIsLoggedCorrectly() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should be logged";
+ nativeLogInfoTestMessage(msg);
+ assertTrue(mockLoggable.isMessageReceived(msg, Severity.LS_INFO, NATIVE_FILENAME_TAG));
+ }
+
+ @Test
+ @SmallTest
+ public void testNativeLowSeverityIsFiltered() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_WARNING)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should NOT be logged";
+ nativeLogInfoTestMessage(msg);
+ assertFalse(mockLoggable.isMessageReceived(msg));
+ }
+
+ @Test
+ @SmallTest
+ public void testNativeLoggableDoesNotReceiveMessagesAfterUnsetting() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ // Reinitialize without Loggable
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should NOT be logged";
+ nativeLogInfoTestMessage(msg);
+ assertFalse(mockLoggable.isMessageReceived(msg));
+ }
+
+ private static native void nativeLogInfoTestMessage(String message);
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java
new file mode 100644
index 0000000000..b646f1f4eb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java
@@ -0,0 +1,411 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.CALLS_REAL_METHODS;
+import static org.mockito.Mockito.mock;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.content.Intent;
+import android.net.ConnectivityManager;
+import android.net.Network;
+import android.net.NetworkCapabilities;
+import android.net.NetworkRequest;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.support.test.InstrumentationRegistry;
+import androidx.annotation.Nullable;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.webrtc.NetworkChangeDetector.ConnectionType;
+import org.webrtc.NetworkChangeDetector.NetworkInformation;
+import org.webrtc.NetworkMonitorAutoDetect.ConnectivityManagerDelegate;
+import org.webrtc.NetworkMonitorAutoDetect.NetworkState;
+import org.webrtc.NetworkMonitorAutoDetect.SimpleNetworkCallback;
+
+/**
+ * Tests for org.webrtc.NetworkMonitor.
+ *
+ * TODO(deadbeef): These tests don't cover the interaction between
+ * NetworkManager.java and androidnetworkmonitor.cc, which is how this
+ * class is used in practice in WebRTC.
+ */
+@SuppressLint("NewApi")
+public class NetworkMonitorTest {
+ private static final long INVALID_NET_ID = -1;
+ private NetworkChangeDetector detector;
+ private String fieldTrialsString = "";
+
+ /**
+ * Listens for alerts fired by the NetworkMonitor when network status changes.
+ */
+ private static class NetworkMonitorTestObserver implements NetworkMonitor.NetworkObserver {
+ private boolean receivedNotification;
+
+ @Override
+ public void onConnectionTypeChanged(ConnectionType connectionType) {
+ receivedNotification = true;
+ }
+
+ public boolean hasReceivedNotification() {
+ return receivedNotification;
+ }
+
+ public void resetHasReceivedNotification() {
+ receivedNotification = false;
+ }
+ }
+
+ /**
+ * Mocks out calls to the ConnectivityManager.
+ */
+ private static class MockConnectivityManagerDelegate extends ConnectivityManagerDelegate {
+ private boolean activeNetworkExists;
+ private int networkType;
+ private int networkSubtype;
+ private int underlyingNetworkTypeForVpn;
+ private int underlyingNetworkSubtypeForVpn;
+
+ MockConnectivityManagerDelegate() {
+ this(new HashSet<>(), "");
+ }
+
+ MockConnectivityManagerDelegate(Set<Network> availableNetworks, String fieldTrialsString) {
+ super((ConnectivityManager) null, availableNetworks, fieldTrialsString);
+ }
+
+ @Override
+ public NetworkState getNetworkState() {
+ return new NetworkState(activeNetworkExists, networkType, networkSubtype,
+ underlyingNetworkTypeForVpn, underlyingNetworkSubtypeForVpn);
+ }
+
+ // Dummy implementations to avoid NullPointerExceptions in default implementations:
+
+ @Override
+ public long getDefaultNetId() {
+ return INVALID_NET_ID;
+ }
+
+ @Override
+ public Network[] getAllNetworks() {
+ return new Network[0];
+ }
+
+ @Override
+ public NetworkState getNetworkState(Network network) {
+ return new NetworkState(false, -1, -1, -1, -1);
+ }
+
+ public void setActiveNetworkExists(boolean networkExists) {
+ activeNetworkExists = networkExists;
+ }
+
+ public void setNetworkType(int networkType) {
+ this.networkType = networkType;
+ }
+
+ public void setNetworkSubtype(int networkSubtype) {
+ this.networkSubtype = networkSubtype;
+ }
+
+ public void setUnderlyingNetworkType(int underlyingNetworkTypeForVpn) {
+ this.underlyingNetworkTypeForVpn = underlyingNetworkTypeForVpn;
+ }
+
+ public void setUnderlyingNetworkSubype(int underlyingNetworkSubtypeForVpn) {
+ this.underlyingNetworkSubtypeForVpn = underlyingNetworkSubtypeForVpn;
+ }
+ }
+
+ /**
+ * Mocks out calls to the WifiManager.
+ */
+ private static class MockWifiManagerDelegate
+ extends NetworkMonitorAutoDetect.WifiManagerDelegate {
+ private String wifiSSID;
+
+ @Override
+ public String getWifiSSID() {
+ return wifiSSID;
+ }
+
+ public void setWifiSSID(String wifiSSID) {
+ this.wifiSSID = wifiSSID;
+ }
+ }
+
+ // A dummy NetworkMonitorAutoDetect.Observer.
+ private static class TestNetworkMonitorAutoDetectObserver
+ extends NetworkMonitorAutoDetect.Observer {
+ final String fieldTrialsString;
+
+ TestNetworkMonitorAutoDetectObserver(String fieldTrialsString) {
+ this.fieldTrialsString = fieldTrialsString;
+ }
+
+ @Override
+ public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
+
+ @Override
+ public void onNetworkConnect(NetworkInformation networkInfo) {}
+
+ @Override
+ public void onNetworkDisconnect(long networkHandle) {}
+
+ @Override
+ public void onNetworkPreference(List<ConnectionType> types, @NetworkPreference int preference) {
+ }
+
+ // @Override
+ // public String getFieldTrialsString() {
+ // return fieldTrialsString;
+ // }
+ }
+
+ private NetworkMonitorAutoDetect receiver;
+ private MockConnectivityManagerDelegate connectivityDelegate;
+ private MockWifiManagerDelegate wifiDelegate;
+
+ /**
+ * Helper method to create a network monitor and delegates for testing.
+ */
+ private void createTestMonitor() {
+ Context context = InstrumentationRegistry.getTargetContext();
+
+ NetworkMonitor.getInstance().setNetworkChangeDetectorFactory(
+ new NetworkChangeDetectorFactory() {
+ @Override
+ public NetworkChangeDetector create(
+ NetworkChangeDetector.Observer observer, Context context) {
+ detector = new NetworkMonitorAutoDetect(observer, context);
+ return detector;
+ }
+ });
+
+ receiver = NetworkMonitor.createAndSetAutoDetectForTest(context, fieldTrialsString);
+ assertNotNull(receiver);
+
+ connectivityDelegate = new MockConnectivityManagerDelegate();
+ connectivityDelegate.setActiveNetworkExists(true);
+ receiver.setConnectivityManagerDelegateForTests(connectivityDelegate);
+
+ wifiDelegate = new MockWifiManagerDelegate();
+ receiver.setWifiManagerDelegateForTests(wifiDelegate);
+ wifiDelegate.setWifiSSID("foo");
+ }
+
+ private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
+ final NetworkMonitorAutoDetect.NetworkState networkState = receiver.getCurrentNetworkState();
+ return NetworkMonitorAutoDetect.getConnectionType(networkState);
+ }
+
+ @Before
+ public void setUp() {
+ ContextUtils.initialize(InstrumentationRegistry.getTargetContext());
+ createTestMonitor();
+ }
+
+ /**
+ * Tests that the receiver registers for connectivity intents during construction.
+ */
+ @Test
+ @SmallTest
+ public void testNetworkMonitorRegistersInConstructor() throws InterruptedException {
+ Context context = InstrumentationRegistry.getTargetContext();
+
+ NetworkMonitorAutoDetect.Observer observer =
+ new TestNetworkMonitorAutoDetectObserver(fieldTrialsString);
+
+ NetworkMonitorAutoDetect receiver = new NetworkMonitorAutoDetect(observer, context);
+
+ assertTrue(receiver.isReceiverRegisteredForTesting());
+ }
+
+ /**
+ * Tests that when there is an intent indicating a change in network connectivity, it sends a
+ * notification to Java observers.
+ */
+ @Test
+ @MediumTest
+ public void testNetworkMonitorJavaObservers() throws InterruptedException {
+ // Initialize the NetworkMonitor with a connection.
+ Intent connectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+
+ // We shouldn't be re-notified if the connection hasn't actually changed.
+ NetworkMonitorTestObserver observer = new NetworkMonitorTestObserver();
+ NetworkMonitor.addNetworkObserver(observer);
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // We shouldn't be notified if we're connected to non-Wifi and the Wifi SSID changes.
+ wifiDelegate.setWifiSSID("bar");
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // We should be notified when we change to Wifi.
+ connectivityDelegate.setNetworkType(ConnectivityManager.TYPE_WIFI);
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ observer.resetHasReceivedNotification();
+
+ // We should be notified when the Wifi SSID changes.
+ wifiDelegate.setWifiSSID("foo");
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ observer.resetHasReceivedNotification();
+
+ // We shouldn't be re-notified if the Wifi SSID hasn't actually changed.
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // Mimic that connectivity has been lost and ensure that the observer gets the notification.
+ connectivityDelegate.setActiveNetworkExists(false);
+ Intent noConnectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), noConnectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ }
+
+ /**
+ * Tests that ConnectivityManagerDelegate doesn't crash. This test cannot rely on having any
+ * active network connections so it cannot usefully check results, but it can at least check
+ * that the functions don't crash.
+ */
+ @Test
+ @SmallTest
+ public void testConnectivityManagerDelegateDoesNotCrash() {
+ ConnectivityManagerDelegate delegate = new ConnectivityManagerDelegate(
+ InstrumentationRegistry.getTargetContext(), new HashSet<>(), fieldTrialsString);
+ delegate.getNetworkState();
+ Network[] networks = delegate.getAllNetworks();
+ if (networks.length >= 1) {
+ delegate.getNetworkState(networks[0]);
+ delegate.hasInternetCapability(networks[0]);
+ }
+ delegate.getDefaultNetId();
+ }
+
+ /** Tests that ConnectivityManagerDelegate preferentially reads from the cache */
+ @Test
+ @SmallTest
+ public void testConnectivityManagerDelegatePreferentiallyReadsFromCache() {
+ final Set<Network> availableNetworks = new HashSet<>();
+ ConnectivityManagerDelegate delegate = new ConnectivityManagerDelegate(
+ (ConnectivityManager) InstrumentationRegistry.getTargetContext().getSystemService(
+ Context.CONNECTIVITY_SERVICE),
+ availableNetworks, "getAllNetworksFromCache:true");
+
+ Network[] networks = delegate.getAllNetworks();
+ assertTrue(networks.length == 0);
+
+ final Network mockNetwork = mock(Network.class);
+ availableNetworks.add(mockNetwork);
+
+ assertArrayEquals(new Network[] {mockNetwork}, delegate.getAllNetworks());
+ }
+
+ /** Tests field trial parsing */
+
+ @Test
+ @SmallTest
+ public void testConnectivityManager_requestVPN_disabled() {
+ NetworkRequest request =
+ getNetworkRequestForFieldTrials("anyothertext,requestVPN:false,anyothertext");
+ assertTrue(request.equals(new NetworkRequest.Builder()
+ .addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+ .build()));
+ }
+
+ @Test
+ @SmallTest
+ public void testConnectivityManager_requestVPN_enabled() {
+ NetworkRequest request = getNetworkRequestForFieldTrials("requestVPN:true");
+ assertTrue(request.equals(new NetworkRequest.Builder()
+ .addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+ .removeCapability(NetworkCapabilities.NET_CAPABILITY_NOT_VPN)
+ .build()));
+ }
+
+ @Test
+ @SmallTest
+ public void testConnectivityManager_includeOtherUidNetworks_disabled() {
+ NetworkRequest request = getNetworkRequestForFieldTrials("includeOtherUidNetworks:false");
+ assertTrue(request.equals(new NetworkRequest.Builder()
+ .addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+ .build()));
+ }
+
+ @Test
+ @SmallTest
+ public void testConnectivityManager_includeOtherUidNetworks_enabled() {
+ NetworkRequest request = getNetworkRequestForFieldTrials("includeOtherUidNetworks:true");
+ NetworkRequest.Builder builder =
+ new NetworkRequest.Builder().addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
+ builder.setIncludeOtherUidNetworks(true);
+ }
+ assertTrue(request.equals(builder.build()));
+ }
+
+ private NetworkRequest getNetworkRequestForFieldTrials(String fieldTrialsString) {
+ return new ConnectivityManagerDelegate(
+ (ConnectivityManager) null, new HashSet<>(), fieldTrialsString)
+ .createNetworkRequest();
+ }
+
+ /**
+ * Tests that NetworkMonitorAutoDetect queryable APIs don't crash. This test cannot rely
+ * on having any active network connections so it cannot usefully check results, but it can at
+ * least check that the functions don't crash.
+ */
+ @Test
+ @SmallTest
+ public void testQueryableAPIsDoNotCrash() {
+ NetworkMonitorAutoDetect.Observer observer =
+ new TestNetworkMonitorAutoDetectObserver(fieldTrialsString);
+ NetworkMonitorAutoDetect ncn =
+ new NetworkMonitorAutoDetect(observer, InstrumentationRegistry.getTargetContext());
+ ncn.getDefaultNetId();
+ }
+
+ /**
+ * Tests startMonitoring and stopMonitoring correctly set the autoDetect and number of observers.
+ */
+ @Test
+ @SmallTest
+ public void testStartStopMonitoring() {
+ NetworkMonitor networkMonitor = NetworkMonitor.getInstance();
+ Context context = ContextUtils.getApplicationContext();
+ networkMonitor.startMonitoring(context, fieldTrialsString);
+ assertEquals(1, networkMonitor.getNumObservers());
+ assertEquals(detector, networkMonitor.getNetworkChangeDetector());
+ networkMonitor.stopMonitoring();
+ assertEquals(0, networkMonitor.getNumObservers());
+ assertNull(networkMonitor.getNetworkChangeDetector());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java
new file mode 100644
index 0000000000..f71bd36063
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java
@@ -0,0 +1,1641 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.annotation.Nullable;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.TreeSet;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.PeerConnection.IceConnectionState;
+import org.webrtc.PeerConnection.IceGatheringState;
+import org.webrtc.PeerConnection.PeerConnectionState;
+import org.webrtc.PeerConnection.SignalingState;
+
+/** End-to-end tests for {@link PeerConnection}. */
+public class PeerConnectionEndToEndTest {
+ private static final String TAG = "PeerConnectionEndToEndTest";
+ private static final int DEFAULT_TIMEOUT_SECONDS = 20;
+ private static final int SHORT_TIMEOUT_SECONDS = 5;
+
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ private static class ObserverExpectations
+ implements PeerConnection.Observer, VideoSink, DataChannel.Observer, StatsObserver,
+ RTCStatsCollectorCallback, RtpReceiver.Observer {
+ private final String name;
+ private int expectedIceCandidates;
+ private int expectedErrors;
+ private int expectedRenegotiations;
+ private int expectedWidth;
+ private int expectedHeight;
+ private int expectedFramesDelivered;
+ private int expectedTracksAdded;
+ private Queue<SignalingState> expectedSignalingChanges = new ArrayDeque<>();
+ private Queue<IceConnectionState> expectedIceConnectionChanges = new ArrayDeque<>();
+ private Queue<IceConnectionState> expectedStandardizedIceConnectionChanges = new ArrayDeque<>();
+ private Queue<PeerConnectionState> expectedConnectionChanges = new ArrayDeque<>();
+ private Queue<IceGatheringState> expectedIceGatheringChanges = new ArrayDeque<>();
+ private Queue<String> expectedAddStreamLabels = new ArrayDeque<>();
+ private Queue<String> expectedRemoveStreamLabels = new ArrayDeque<>();
+ private final List<IceCandidate> gotIceCandidates = new ArrayList<>();
+ private Map<MediaStream, WeakReference<VideoSink>> videoSinks = new IdentityHashMap<>();
+ private DataChannel dataChannel;
+ private Queue<DataChannel.Buffer> expectedBuffers = new ArrayDeque<>();
+ private Queue<DataChannel.State> expectedStateChanges = new ArrayDeque<>();
+ private Queue<String> expectedRemoteDataChannelLabels = new ArrayDeque<>();
+ private int expectedOldStatsCallbacks;
+ private int expectedNewStatsCallbacks;
+ private List<StatsReport[]> gotStatsReports = new ArrayList<>();
+ private final HashSet<MediaStream> gotRemoteStreams = new HashSet<>();
+ private int expectedFirstAudioPacket;
+ private int expectedFirstVideoPacket;
+
+ public ObserverExpectations(String name) {
+ this.name = name;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void setDataChannel(DataChannel dataChannel) {
+ assertNull(this.dataChannel);
+ this.dataChannel = dataChannel;
+ this.dataChannel.registerObserver(this);
+ assertNotNull(this.dataChannel);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectIceCandidates(int count) {
+ expectedIceCandidates += count;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onIceCandidate(IceCandidate candidate) {
+ Logging.d(TAG, "onIceCandidate: " + candidate.toString());
+ --expectedIceCandidates;
+
+ // We don't assert expectedIceCandidates >= 0 because it's hard to know
+ // how many to expect, in general. We only use expectIceCandidates to
+ // assert a minimal count.
+ synchronized (gotIceCandidates) {
+ gotIceCandidates.add(candidate);
+ gotIceCandidates.notifyAll();
+ }
+ }
+
+ @Override
+ public void onIceCandidateError(IceCandidateErrorEvent event) {}
+
+ @Override
+ public void onIceCandidatesRemoved(IceCandidate[] candidates) {}
+
+ @Override
+ public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) {}
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void setExpectedResolution(int width, int height) {
+ expectedWidth = width;
+ expectedHeight = height;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectFramesDelivered(int count) {
+ expectedFramesDelivered += count;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrame(VideoFrame frame) {
+ if (expectedFramesDelivered <= 0) {
+ return;
+ }
+ assertTrue(expectedWidth > 0);
+ assertTrue(expectedHeight > 0);
+ assertEquals(expectedWidth, frame.getRotatedWidth());
+ assertEquals(expectedHeight, frame.getRotatedHeight());
+ --expectedFramesDelivered;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectSignalingChange(SignalingState newState) {
+ expectedSignalingChanges.add(newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onSignalingChange(SignalingState newState) {
+ assertEquals(expectedSignalingChanges.remove(), newState);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectIceConnectionChange(IceConnectionState newState) {
+ expectedIceConnectionChanges.add(newState);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectStandardizedIceConnectionChange(IceConnectionState newState) {
+ expectedStandardizedIceConnectionChanges.add(newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onIceConnectionChange(IceConnectionState newState) {
+ // TODO(bemasc): remove once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ if (newState.equals(IceConnectionState.COMPLETED)) {
+ return;
+ }
+
+ if (expectedIceConnectionChanges.isEmpty()) {
+ Logging.d(TAG, name + "Got an unexpected ICE connection change " + newState);
+ return;
+ }
+
+ assertEquals(expectedIceConnectionChanges.remove(), newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onStandardizedIceConnectionChange(IceConnectionState newState) {
+ if (newState.equals(IceConnectionState.COMPLETED)) {
+ return;
+ }
+
+ if (expectedIceConnectionChanges.isEmpty()) {
+ Logging.d(TAG, name + "Got an unexpected standardized ICE connection change " + newState);
+ return;
+ }
+
+ assertEquals(expectedStandardizedIceConnectionChanges.remove(), newState);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectConnectionChange(PeerConnectionState newState) {
+ expectedConnectionChanges.add(newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onConnectionChange(PeerConnectionState newState) {
+ if (expectedConnectionChanges.isEmpty()) {
+ Logging.d(TAG, name + " got an unexpected DTLS connection change " + newState);
+ return;
+ }
+
+ assertEquals(expectedConnectionChanges.remove(), newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onIceConnectionReceivingChange(boolean receiving) {
+ Logging.d(TAG, name + " got an ICE connection receiving change " + receiving);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectIceGatheringChange(IceGatheringState newState) {
+ expectedIceGatheringChanges.add(newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onIceGatheringChange(IceGatheringState newState) {
+ // It's fine to get a variable number of GATHERING messages before
+ // COMPLETE fires (depending on how long the test runs) so we don't assert
+ // any particular count.
+ if (newState == IceGatheringState.GATHERING) {
+ return;
+ }
+ if (expectedIceGatheringChanges.isEmpty()) {
+ Logging.d(TAG, name + "Got an unexpected ICE gathering change " + newState);
+ }
+ assertEquals(expectedIceGatheringChanges.remove(), newState);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectAddStream(String label) {
+ expectedAddStreamLabels.add(label);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onAddStream(MediaStream stream) {
+ assertEquals(expectedAddStreamLabels.remove(), stream.getId());
+ for (AudioTrack track : stream.audioTracks) {
+ assertEquals("audio", track.kind());
+ }
+ for (VideoTrack track : stream.videoTracks) {
+ assertEquals("video", track.kind());
+ track.addSink(this);
+ assertNull(videoSinks.put(stream, new WeakReference<VideoSink>(this)));
+ }
+ gotRemoteStreams.add(stream);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectRemoveStream(String label) {
+ expectedRemoveStreamLabels.add(label);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onRemoveStream(MediaStream stream) {
+ assertEquals(expectedRemoveStreamLabels.remove(), stream.getId());
+ WeakReference<VideoSink> videoSink = videoSinks.remove(stream);
+ assertNotNull(videoSink);
+ assertNotNull(videoSink.get());
+ for (VideoTrack videoTrack : stream.videoTracks) {
+ videoTrack.removeSink(videoSink.get());
+ }
+ gotRemoteStreams.remove(stream);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectDataChannel(String label) {
+ expectedRemoteDataChannelLabels.add(label);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onDataChannel(DataChannel remoteDataChannel) {
+ assertEquals(expectedRemoteDataChannelLabels.remove(), remoteDataChannel.label());
+ setDataChannel(remoteDataChannel);
+ assertEquals(DataChannel.State.CONNECTING, dataChannel.state());
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectRenegotiationNeeded() {
+ ++expectedRenegotiations;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onRenegotiationNeeded() {
+ assertTrue(--expectedRenegotiations >= 0);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectAddTrack(int expectedTracksAdded) {
+ this.expectedTracksAdded = expectedTracksAdded;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) {
+ expectedTracksAdded--;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectMessage(ByteBuffer expectedBuffer, boolean expectedBinary) {
+ expectedBuffers.add(new DataChannel.Buffer(expectedBuffer, expectedBinary));
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onMessage(DataChannel.Buffer buffer) {
+ DataChannel.Buffer expected = expectedBuffers.remove();
+ assertEquals(expected.binary, buffer.binary);
+ assertTrue(expected.data.equals(buffer.data));
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onBufferedAmountChange(long previousAmount) {
+ assertFalse(previousAmount == dataChannel.bufferedAmount());
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onStateChange() {
+ assertEquals(expectedStateChanges.remove(), dataChannel.state());
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectStateChange(DataChannel.State state) {
+ expectedStateChanges.add(state);
+ }
+
+ // Old getStats callback.
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onComplete(StatsReport[] reports) {
+ if (--expectedOldStatsCallbacks < 0) {
+ throw new RuntimeException("Unexpected stats report: " + Arrays.toString(reports));
+ }
+ gotStatsReports.add(reports);
+ }
+
+ // New getStats callback.
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onStatsDelivered(RTCStatsReport report) {
+ if (--expectedNewStatsCallbacks < 0) {
+ throw new RuntimeException("Unexpected stats report: " + report);
+ }
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFirstPacketReceived(MediaStreamTrack.MediaType mediaType) {
+ if (mediaType == MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO) {
+ expectedFirstAudioPacket--;
+ } else {
+ expectedFirstVideoPacket--;
+ }
+ if (expectedFirstAudioPacket < 0 || expectedFirstVideoPacket < 0) {
+ throw new RuntimeException("Unexpected call of onFirstPacketReceived");
+ }
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectFirstPacketReceived() {
+ expectedFirstAudioPacket = 1;
+ expectedFirstVideoPacket = 1;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectOldStatsCallback() {
+ ++expectedOldStatsCallbacks;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectNewStatsCallback() {
+ ++expectedNewStatsCallbacks;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized List<StatsReport[]> takeStatsReports() {
+ List<StatsReport[]> got = gotStatsReports;
+ gotStatsReports = new ArrayList<StatsReport[]>();
+ return got;
+ }
+
+ // Return a set of expectations that haven't been satisfied yet, possibly
+ // empty if no such expectations exist.
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized TreeSet<String> unsatisfiedExpectations() {
+ TreeSet<String> stillWaitingForExpectations = new TreeSet<String>();
+ if (expectedIceCandidates > 0) { // See comment in onIceCandidate.
+ stillWaitingForExpectations.add("expectedIceCandidates");
+ }
+ if (expectedErrors != 0) {
+ stillWaitingForExpectations.add("expectedErrors: " + expectedErrors);
+ }
+ if (expectedSignalingChanges.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedSignalingChanges: " + expectedSignalingChanges.size());
+ }
+ if (expectedIceConnectionChanges.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedIceConnectionChanges: " + expectedIceConnectionChanges.size());
+ }
+ if (expectedIceGatheringChanges.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedIceGatheringChanges: " + expectedIceGatheringChanges.size());
+ }
+ if (expectedAddStreamLabels.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedAddStreamLabels: " + expectedAddStreamLabels.size());
+ }
+ if (expectedRemoveStreamLabels.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size());
+ }
+ if (expectedFramesDelivered > 0) {
+ stillWaitingForExpectations.add("expectedFramesDelivered: " + expectedFramesDelivered);
+ }
+ if (!expectedBuffers.isEmpty()) {
+ stillWaitingForExpectations.add("expectedBuffers: " + expectedBuffers.size());
+ }
+ if (!expectedStateChanges.isEmpty()) {
+ stillWaitingForExpectations.add("expectedStateChanges: " + expectedStateChanges.size());
+ }
+ if (!expectedRemoteDataChannelLabels.isEmpty()) {
+ stillWaitingForExpectations.add(
+ "expectedRemoteDataChannelLabels: " + expectedRemoteDataChannelLabels.size());
+ }
+ if (expectedOldStatsCallbacks != 0) {
+ stillWaitingForExpectations.add("expectedOldStatsCallbacks: " + expectedOldStatsCallbacks);
+ }
+ if (expectedNewStatsCallbacks != 0) {
+ stillWaitingForExpectations.add("expectedNewStatsCallbacks: " + expectedNewStatsCallbacks);
+ }
+ if (expectedFirstAudioPacket > 0) {
+ stillWaitingForExpectations.add("expectedFirstAudioPacket: " + expectedFirstAudioPacket);
+ }
+ if (expectedFirstVideoPacket > 0) {
+ stillWaitingForExpectations.add("expectedFirstVideoPacket: " + expectedFirstVideoPacket);
+ }
+ if (expectedTracksAdded != 0) {
+ stillWaitingForExpectations.add("expectedAddedTrack: " + expectedTracksAdded);
+ }
+ return stillWaitingForExpectations;
+ }
+
+ public boolean waitForAllExpectationsToBeSatisfied(int timeoutSeconds) {
+ // TODO(fischman): problems with this approach:
+ // - come up with something better than a poll loop
+ // - avoid serializing expectations explicitly; the test is not as robust
+ // as it could be because it must place expectations between wait
+ // statements very precisely (e.g. frame must not arrive before its
+ // expectation, and expectation must not be registered so early as to
+ // stall a wait). Use callbacks to fire off dependent steps instead of
+ // explicitly waiting, so there can be just a single wait at the end of
+ // the test.
+ long endTime = System.currentTimeMillis() + 1000 * timeoutSeconds;
+ TreeSet<String> prev = null;
+ TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations();
+ while (!stillWaitingForExpectations.isEmpty()) {
+ if (!stillWaitingForExpectations.equals(prev)) {
+ Logging.d(TAG,
+ name + " still waiting at\n " + (new Throwable()).getStackTrace()[1]
+ + "\n for: " + Arrays.toString(stillWaitingForExpectations.toArray()));
+ }
+ if (endTime < System.currentTimeMillis()) {
+ Logging.d(TAG,
+ name + " timed out waiting for: "
+ + Arrays.toString(stillWaitingForExpectations.toArray()));
+ return false;
+ }
+ try {
+ Thread.sleep(10);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ prev = stillWaitingForExpectations;
+ stillWaitingForExpectations = unsatisfiedExpectations();
+ }
+ if (prev == null) {
+ Logging.d(
+ TAG, name + " didn't need to wait at\n " + (new Throwable()).getStackTrace()[1]);
+ }
+ return true;
+ }
+
+ // This methods return a list of all currently gathered ice candidates or waits until
+ // 1 candidate have been gathered.
+ public List<IceCandidate> getAtLeastOneIceCandidate() throws InterruptedException {
+ synchronized (gotIceCandidates) {
+ while (gotIceCandidates.isEmpty()) {
+ gotIceCandidates.wait();
+ }
+ return new ArrayList<IceCandidate>(gotIceCandidates);
+ }
+ }
+ }
+
+ // Sets the expected resolution for an ObserverExpectations once a frame
+ // has been captured.
+ private static class ExpectedResolutionSetter implements VideoSink {
+ private ObserverExpectations observer;
+
+ public ExpectedResolutionSetter(ObserverExpectations observer) {
+ this.observer = observer;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrame(VideoFrame frame) {
+ // Because different camera devices (fake & physical) produce different
+ // resolutions, we only sanity-check the set sizes,
+ assertTrue(frame.getRotatedWidth() > 0);
+ assertTrue(frame.getRotatedHeight() > 0);
+ observer.setExpectedResolution(frame.getRotatedWidth(), frame.getRotatedHeight());
+ frame.retain();
+ }
+ }
+
+ private static class SdpObserverLatch implements SdpObserver {
+ private boolean success;
+ private @Nullable SessionDescription sdp;
+ private @Nullable String error;
+ private CountDownLatch latch = new CountDownLatch(1);
+
+ public SdpObserverLatch() {}
+
+ @Override
+ public void onCreateSuccess(SessionDescription sdp) {
+ this.sdp = sdp;
+ onSetSuccess();
+ }
+
+ @Override
+ public void onSetSuccess() {
+ success = true;
+ latch.countDown();
+ }
+
+ @Override
+ public void onCreateFailure(String error) {
+ onSetFailure(error);
+ }
+
+ @Override
+ public void onSetFailure(String error) {
+ this.error = error;
+ latch.countDown();
+ }
+
+ public boolean await() {
+ try {
+ assertTrue(latch.await(1000, TimeUnit.MILLISECONDS));
+ return getSuccess();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public boolean getSuccess() {
+ return success;
+ }
+
+ public @Nullable SessionDescription getSdp() {
+ return sdp;
+ }
+
+ public @Nullable String getError() {
+ return error;
+ }
+ }
+
+ // Return a weak reference to test that ownership is correctly held by
+ // PeerConnection, not by test code.
+ private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
+ PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
+ String audioTrackId, VideoSink videoSink) {
+ return addTracksToPC(factory, pc, videoSource, streamLabel, videoTrackId, audioTrackId,
+ videoSink, /*useAddStream=*/false);
+ }
+ private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
+ PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
+ String audioTrackId, VideoSink videoSink, boolean useAddStream) {
+ MediaStream lMS = factory.createLocalMediaStream(streamLabel);
+ VideoTrack videoTrack = factory.createVideoTrack(videoTrackId, videoSource);
+ assertNotNull(videoTrack);
+ assertNotNull(videoSink);
+ videoTrack.addSink(videoSink);
+ lMS.addTrack(videoTrack);
+ // Just for fun, let's remove and re-add the track.
+ lMS.removeTrack(videoTrack);
+ lMS.addTrack(videoTrack);
+ lMS.addTrack(
+ factory.createAudioTrack(audioTrackId, factory.createAudioSource(new MediaConstraints())));
+ if (!useAddStream) {
+ // In Unified Plan, addTrack() is the preferred way of adding tracks.
+ for (AudioTrack track : lMS.audioTracks) {
+ pc.addTrack(track, Collections.singletonList(lMS.getId()));
+ }
+ for (VideoTrack track : lMS.videoTracks) {
+ pc.addTrack(track, Collections.singletonList(lMS.getId()));
+ }
+ } else {
+ // Only in Plan B is addStream() supported. Used by a legacy test not yet
+ // updated to Unified Plan.
+ // TODO(https://crbug.com/webrtc/13528): Remove use of addStream().
+ pc.addStream(lMS);
+ }
+ return new WeakReference<MediaStream>(lMS);
+ }
+
+ @Test
+ @MediumTest
+ public void testCompleteSession() throws Exception {
+ Metrics.enable();
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ PeerConnectionFactory factory = PeerConnectionFactory.builder()
+ .setOptions(options)
+ .setVideoEncoderFactory(new SoftwareVideoEncoderFactory())
+ .setVideoDecoderFactory(new SoftwareVideoDecoderFactory())
+ .createPeerConnectionFactory();
+
+ List<PeerConnection.IceServer> iceServers = new ArrayList<>();
+ iceServers.add(
+ PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer());
+ iceServers.add(PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .createIceServer());
+
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(rtcConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
+ PeerConnection answeringPC = factory.createPeerConnection(rtcConfig, answeringExpectations);
+ assertNotNull(answeringPC);
+
+ // We want to use the same camera for offerer & answerer, so create it here
+ // instead of in addTracksToPC.
+ final CameraEnumerator enumerator = new Camera1Enumerator(false /* captureToTexture */);
+ final VideoCapturer videoCapturer =
+ enumerator.createCapturer(enumerator.getDeviceNames()[0], null /* eventsHandler */);
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper", /* sharedContext= */ null);
+ final VideoSource videoSource = factory.createVideoSource(/* isScreencast= */ false);
+ videoCapturer.initialize(surfaceTextureHelper, InstrumentationRegistry.getTargetContext(),
+ videoSource.getCapturerObserver());
+ videoCapturer.startCapture(640, 480, 30);
+
+ offeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> oLMS =
+ addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
+ "offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations));
+
+ offeringExpectations.expectAddTrack(2);
+ answeringExpectations.expectAddTrack(2);
+
+ offeringExpectations.expectRenegotiationNeeded();
+ DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
+ assertEquals("offeringDC", offeringDC.label());
+
+ offeringExpectations.setDataChannel(offeringDC);
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ answeringExpectations.expectAddStream("offeredMediaStream");
+ // SCTP DataChannels are announced via OPEN messages over the established
+ // connection (not via SDP), so answeringExpectations can only register
+ // expecting the channel during ICE, below.
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ answeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
+ "answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
+ new ExpectedResolutionSetter(offeringExpectations));
+
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ offeringExpectations.expectIceCandidates(2);
+ answeringExpectations.expectIceCandidates(2);
+
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ offeringExpectations.expectAddStream("answeredMediaStream");
+
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+ // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ //
+ // offeringExpectations.expectIceConnectionChange(
+ // IceConnectionState.COMPLETED);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
+ assertEquals(offeringPC.getRemoteDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getLocalDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getRemoteDescription().type, offerSdp.type);
+
+ assertEquals(offeringPC.getSenders().size(), 2);
+ assertEquals(offeringPC.getReceivers().size(), 2);
+ assertEquals(answeringPC.getSenders().size(), 2);
+ assertEquals(answeringPC.getReceivers().size(), 2);
+
+ offeringExpectations.expectFirstPacketReceived();
+ answeringExpectations.expectFirstPacketReceived();
+
+ for (RtpReceiver receiver : offeringPC.getReceivers()) {
+ receiver.SetObserver(offeringExpectations);
+ }
+
+ for (RtpReceiver receiver : answeringPC.getReceivers()) {
+ receiver.SetObserver(answeringExpectations);
+ }
+
+ // Wait for at least some frames to be delivered at each end (number
+ // chosen arbitrarily).
+ offeringExpectations.expectFramesDelivered(10);
+ answeringExpectations.expectFramesDelivered(10);
+
+ offeringExpectations.expectStateChange(DataChannel.State.OPEN);
+ // See commentary about SCTP DataChannels above for why this is here.
+ answeringExpectations.expectDataChannel("offeringDC");
+ answeringExpectations.expectStateChange(DataChannel.State.OPEN);
+
+ // Wait for at least one ice candidate from the offering PC and forward them to the answering
+ // PC.
+ for (IceCandidate candidate : offeringExpectations.getAtLeastOneIceCandidate()) {
+ answeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for at least one ice candidate from the answering PC and forward them to the offering
+ // PC.
+ for (IceCandidate candidate : answeringExpectations.getAtLeastOneIceCandidate()) {
+ offeringPC.addIceCandidate(candidate);
+ }
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+
+ // Test some of the RtpSender API.
+ RtpSender videoSender = null;
+ RtpSender audioSender = null;
+ for (RtpSender sender : offeringPC.getSenders()) {
+ if (sender.track().kind().equals("video")) {
+ videoSender = sender;
+ } else {
+ audioSender = sender;
+ }
+ }
+ assertNotNull(videoSender);
+ assertNotNull(audioSender);
+
+ // Set a bitrate limit for the outgoing video stream for the offerer.
+ RtpParameters rtpParameters = videoSender.getParameters();
+ assertNotNull(rtpParameters);
+ assertEquals(1, rtpParameters.encodings.size());
+ assertNull(rtpParameters.encodings.get(0).maxBitrateBps);
+ assertNull(rtpParameters.encodings.get(0).minBitrateBps);
+ assertNull(rtpParameters.encodings.get(0).maxFramerate);
+ assertNull(rtpParameters.encodings.get(0).numTemporalLayers);
+ assertNull(rtpParameters.encodings.get(0).scaleResolutionDownBy);
+ assertTrue(rtpParameters.encodings.get(0).rid.isEmpty());
+
+ rtpParameters.encodings.get(0).maxBitrateBps = 300000;
+ rtpParameters.encodings.get(0).minBitrateBps = 100000;
+ rtpParameters.encodings.get(0).maxFramerate = 20;
+ rtpParameters.encodings.get(0).numTemporalLayers = 2;
+ rtpParameters.encodings.get(0).scaleResolutionDownBy = 2.0;
+ assertTrue(videoSender.setParameters(rtpParameters));
+
+ // Create a DTMF sender.
+ DtmfSender dtmfSender = audioSender.dtmf();
+ assertNotNull(dtmfSender);
+ assertTrue(dtmfSender.canInsertDtmf());
+ assertTrue(dtmfSender.insertDtmf("123", 300, 100));
+
+ // Verify that we can read back the updated value.
+ rtpParameters = videoSender.getParameters();
+ assertEquals(300000, (int) rtpParameters.encodings.get(0).maxBitrateBps);
+ assertEquals(100000, (int) rtpParameters.encodings.get(0).minBitrateBps);
+ assertEquals(20, (int) rtpParameters.encodings.get(0).maxFramerate);
+ assertEquals(2, (int) rtpParameters.encodings.get(0).numTemporalLayers);
+ assertThat(rtpParameters.encodings.get(0).scaleResolutionDownBy).isEqualTo(2.0);
+
+ // Test send & receive UTF-8 text.
+ answeringExpectations.expectMessage(
+ ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ DataChannel.Buffer buffer =
+ new DataChannel.Buffer(ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ assertTrue(offeringExpectations.dataChannel.send(buffer));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Construct this binary message two different ways to ensure no
+ // shortcuts are taken.
+ ByteBuffer expectedBinaryMessage = ByteBuffer.allocateDirect(5);
+ for (byte i = 1; i < 6; ++i) {
+ expectedBinaryMessage.put(i);
+ }
+ expectedBinaryMessage.flip();
+ offeringExpectations.expectMessage(expectedBinaryMessage, true);
+ assertTrue(answeringExpectations.dataChannel.send(
+ new DataChannel.Buffer(ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}), true)));
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ offeringExpectations.dataChannel.close();
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Test SetBitrate.
+ assertTrue(offeringPC.setBitrate(100000, 5000000, 500000000));
+ assertFalse(offeringPC.setBitrate(3, 2, 1));
+
+ // Free the Java-land objects and collect them.
+ shutdownPC(offeringPC, offeringExpectations);
+ offeringPC = null;
+ shutdownPC(answeringPC, answeringExpectations);
+ answeringPC = null;
+ videoCapturer.stopCapture();
+ videoCapturer.dispose();
+ videoSource.dispose();
+ surfaceTextureHelper.dispose();
+ factory.dispose();
+ System.gc();
+ }
+
+ @Test
+ @MediumTest
+ public void testDataChannelOnlySession() throws Exception {
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ PeerConnectionFactory factory =
+ PeerConnectionFactory.builder().setOptions(options).createPeerConnectionFactory();
+
+ List<PeerConnection.IceServer> iceServers = new ArrayList<>();
+ iceServers.add(
+ PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer());
+ iceServers.add(PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .createIceServer());
+
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(rtcConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
+ PeerConnection answeringPC = factory.createPeerConnection(rtcConfig, answeringExpectations);
+ assertNotNull(answeringPC);
+
+ offeringExpectations.expectRenegotiationNeeded();
+ DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
+ assertEquals("offeringDC", offeringDC.label());
+
+ offeringExpectations.setDataChannel(offeringDC);
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ // SCTP DataChannels are announced via OPEN messages over the established
+ // connection (not via SDP), so answeringExpectations can only register
+ // expecting the channel during ICE, below.
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ offeringExpectations.expectIceCandidates(2);
+ answeringExpectations.expectIceCandidates(2);
+
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+ // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
+ assertEquals(offeringPC.getRemoteDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getLocalDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getRemoteDescription().type, offerSdp.type);
+
+ offeringExpectations.expectStateChange(DataChannel.State.OPEN);
+ // See commentary about SCTP DataChannels above for why this is here.
+ answeringExpectations.expectDataChannel("offeringDC");
+ answeringExpectations.expectStateChange(DataChannel.State.OPEN);
+
+ // Wait for at least one ice candidate from the offering PC and forward them to the answering
+ // PC.
+ for (IceCandidate candidate : offeringExpectations.getAtLeastOneIceCandidate()) {
+ answeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for at least one ice candidate from the answering PC and forward them to the offering
+ // PC.
+ for (IceCandidate candidate : answeringExpectations.getAtLeastOneIceCandidate()) {
+ offeringPC.addIceCandidate(candidate);
+ }
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+
+ // Test send & receive UTF-8 text.
+ answeringExpectations.expectMessage(
+ ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ DataChannel.Buffer buffer =
+ new DataChannel.Buffer(ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ assertTrue(offeringExpectations.dataChannel.send(buffer));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Construct this binary message two different ways to ensure no
+ // shortcuts are taken.
+ ByteBuffer expectedBinaryMessage = ByteBuffer.allocateDirect(5);
+ for (byte i = 1; i < 6; ++i) {
+ expectedBinaryMessage.put(i);
+ }
+ expectedBinaryMessage.flip();
+ offeringExpectations.expectMessage(expectedBinaryMessage, true);
+ assertTrue(answeringExpectations.dataChannel.send(
+ new DataChannel.Buffer(ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}), true)));
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ offeringExpectations.dataChannel.close();
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Free the Java-land objects and collect them.
+ shutdownPC(offeringPC, offeringExpectations);
+ offeringPC = null;
+ shutdownPC(answeringPC, answeringExpectations);
+ answeringPC = null;
+ factory.dispose();
+ System.gc();
+ }
+
+ // Tests that ICE candidates that are not allowed by an ICE transport type, thus not being
+ // signaled to the gathering PeerConnection, can be surfaced via configuration if allowed by the
+ // new ICE transport type, when RTCConfiguration.surfaceIceCandidatesOnIceTransportTypeChanged is
+ // true.
+ @Test
+ @SmallTest
+ public void testSurfaceIceCandidatesWhenIceTransportTypeChanged() throws Exception {
+ // For this test, we only need one PeerConnection to observe the behavior of gathering, and we
+ // create only the offering PC below.
+ //
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ PeerConnectionFactory factory =
+ PeerConnectionFactory.builder().setOptions(options).createPeerConnectionFactory();
+
+ PeerConnection.RTCConfiguration rtcConfig =
+ new PeerConnection.RTCConfiguration(Collections.emptyList());
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+ // NONE would prevent any candidate being signaled to the PC.
+ rtcConfig.iceTransportsType = PeerConnection.IceTransportsType.NONE;
+ // We must have the continual gathering enabled to allow the surfacing of candidates on the ICE
+ // transport type change.
+ rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
+ rtcConfig.surfaceIceCandidatesOnIceTransportTypeChanged = true;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(rtcConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ // Create a data channel and set local description to kick off the ICE candidate gathering.
+ offeringExpectations.expectRenegotiationNeeded();
+ DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
+ assertEquals("offeringDC", offeringDC.label());
+
+ offeringExpectations.setDataChannel(offeringDC);
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
+
+ // Wait until we satisfy all expectations in the setup.
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Add the expectation of gathering at least one candidate, which should however fail because of
+ // the transport type NONE.
+ offeringExpectations.expectIceCandidates(1);
+ assertFalse(offeringExpectations.waitForAllExpectationsToBeSatisfied(SHORT_TIMEOUT_SECONDS));
+
+ // Change the transport type and we should be able to meet the expectation of gathering this
+ // time.
+ rtcConfig.iceTransportsType = PeerConnection.IceTransportsType.ALL;
+ offeringPC.setConfiguration(rtcConfig);
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ }
+
+ @Test
+ @MediumTest
+ public void testTrackRemovalAndAddition() throws Exception {
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ PeerConnectionFactory factory = PeerConnectionFactory.builder()
+ .setOptions(options)
+ .setVideoEncoderFactory(new SoftwareVideoEncoderFactory())
+ .setVideoDecoderFactory(new SoftwareVideoDecoderFactory())
+ .createPeerConnectionFactory();
+
+ List<PeerConnection.IceServer> iceServers = new ArrayList<>();
+ iceServers.add(
+ PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer());
+
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ // TODO(https://crbug.com/webrtc/13528): Update test not to use Plan B.
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.PLAN_B;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(rtcConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
+ PeerConnection answeringPC = factory.createPeerConnection(rtcConfig, answeringExpectations);
+ assertNotNull(answeringPC);
+
+ // We want to use the same camera for offerer & answerer, so create it here
+ // instead of in addTracksToPC.
+ final CameraEnumerator enumerator = new Camera1Enumerator(false /* captureToTexture */);
+ final VideoCapturer videoCapturer =
+ enumerator.createCapturer(enumerator.getDeviceNames()[0], null /* eventsHandler */);
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper", /* sharedContext= */ null);
+ final VideoSource videoSource = factory.createVideoSource(/* isScreencast= */ false);
+ videoCapturer.initialize(surfaceTextureHelper, InstrumentationRegistry.getTargetContext(),
+ videoSource.getCapturerObserver());
+ videoCapturer.startCapture(640, 480, 30);
+
+ // Add offerer media stream.
+ offeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> oLMS =
+ addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
+ "offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations),
+ /*useAddStream=*/true);
+
+ offeringExpectations.expectAddTrack(2);
+ answeringExpectations.expectAddTrack(2);
+ // Create offer.
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ // Set local description for offerer.
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringExpectations.expectIceCandidates(2);
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Set remote description for answerer.
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ answeringExpectations.expectAddStream("offeredMediaStream");
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Add answerer media stream.
+ answeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
+ "answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
+ new ExpectedResolutionSetter(offeringExpectations),
+ /*useAddStream=*/true);
+
+ // Create answer.
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ // Set local description for answerer.
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringExpectations.expectIceCandidates(2);
+ answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Set remote description for offerer.
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ offeringExpectations.expectAddStream("answeredMediaStream");
+
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+ // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ //
+ // offeringExpectations.expectIceConnectionChange(
+ // IceConnectionState.COMPLETED);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Wait for at least one ice candidate from the offering PC and forward them to the answering
+ // PC.
+ for (IceCandidate candidate : offeringExpectations.getAtLeastOneIceCandidate()) {
+ answeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for at least one ice candidate from the answering PC and forward them to the offering
+ // PC.
+ for (IceCandidate candidate : answeringExpectations.getAtLeastOneIceCandidate()) {
+ offeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for one frame of the correct size to be delivered.
+ // Otherwise we could get a dummy black frame of unexpcted size when the
+ // video track is removed.
+ offeringExpectations.expectFramesDelivered(1);
+ answeringExpectations.expectFramesDelivered(1);
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+
+ // Now do another negotiation, removing the video track from one peer.
+ // This previously caused a crash on pc.dispose().
+ // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5128
+ VideoTrack offererVideoTrack = oLMS.get().videoTracks.get(0);
+ // Note that when we call removeTrack, we regain responsibility for
+ // disposing of the track.
+ offeringExpectations.expectRenegotiationNeeded();
+ oLMS.get().removeTrack(offererVideoTrack);
+ negotiate(offeringPC, offeringExpectations, answeringPC, answeringExpectations);
+
+ // Make sure the track was really removed.
+ MediaStream aRMS = answeringExpectations.gotRemoteStreams.iterator().next();
+ assertTrue(aRMS.videoTracks.isEmpty());
+
+ // Add the video track to test if the answeringPC will create a new track
+ // for the updated remote description.
+ offeringExpectations.expectRenegotiationNeeded();
+ oLMS.get().addTrack(offererVideoTrack);
+ // The answeringPC sets the updated remote description with a track added.
+ // So the onAddTrack callback is expected to be called once.
+ answeringExpectations.expectAddTrack(1);
+ offeringExpectations.expectAddTrack(0);
+ negotiate(offeringPC, offeringExpectations, answeringPC, answeringExpectations);
+
+ // Finally, remove both the audio and video tracks, which should completely
+ // remove the remote stream. This used to trigger an assert.
+ // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5128
+ offeringExpectations.expectRenegotiationNeeded();
+ oLMS.get().removeTrack(offererVideoTrack);
+ AudioTrack offererAudioTrack = oLMS.get().audioTracks.get(0);
+ offeringExpectations.expectRenegotiationNeeded();
+ oLMS.get().removeTrack(offererAudioTrack);
+
+ answeringExpectations.expectRemoveStream("offeredMediaStream");
+ negotiate(offeringPC, offeringExpectations, answeringPC, answeringExpectations);
+
+ // Make sure the stream was really removed.
+ assertTrue(answeringExpectations.gotRemoteStreams.isEmpty());
+
+ // Free the Java-land objects and collect them.
+ shutdownPC(offeringPC, offeringExpectations);
+ offeringPC = null;
+ shutdownPC(answeringPC, answeringExpectations);
+ answeringPC = null;
+ offererVideoTrack.dispose();
+ offererAudioTrack.dispose();
+ videoCapturer.stopCapture();
+ videoCapturer.dispose();
+ videoSource.dispose();
+ surfaceTextureHelper.dispose();
+ factory.dispose();
+ System.gc();
+ }
+
+ /**
+ * Test that a Java MediaStream is updated when the native stream is.
+ * <p>
+ * Specifically, test that when remote tracks are indicated as being added or
+ * removed from a MediaStream (via "a=ssrc" or "a=msid" in a remote
+ * description), the existing remote MediaStream object is updated.
+ * <p>
+ * This test starts with just an audio track, adds a video track, then
+ * removes it. It only applies remote offers, which is sufficient to test
+ * this functionality and simplifies the test. This means that no media will
+ * actually be sent/received; we're just testing that the Java MediaStream
+ * object gets updated when the native object changes.
+ */
+ @Test
+ @MediumTest
+ public void testRemoteStreamUpdatedWhenTracksAddedOrRemoved() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder()
+ .setVideoEncoderFactory(new SoftwareVideoEncoderFactory())
+ .setVideoDecoderFactory(new SoftwareVideoDecoderFactory())
+ .createPeerConnectionFactory();
+
+ // TODO(https://crbug.com/webrtc/13528): Update test not to use Plan B.
+ PeerConnection.RTCConfiguration planBConfig =
+ new PeerConnection.RTCConfiguration(Collections.emptyList());
+ planBConfig.sdpSemantics = PeerConnection.SdpSemantics.PLAN_B;
+
+ // Use OfferToReceiveAudio/Video to ensure every offer has an audio and
+ // video m= section. Simplifies the test because it means we don't have to
+ // actually apply the offer to "offeringPC"; it's just used as an SDP
+ // factory.
+ MediaConstraints offerConstraints = new MediaConstraints();
+ offerConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
+ offerConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
+
+ // This PeerConnection will only be used to generate offers.
+ ObserverExpectations offeringExpectations = new ObserverExpectations("offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(planBConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations expectations = new ObserverExpectations("PC under test");
+ PeerConnection pcUnderTest = factory.createPeerConnection(planBConfig, expectations);
+ assertNotNull(pcUnderTest);
+
+ // Add offerer media stream with just an audio track.
+ MediaStream localStream = factory.createLocalMediaStream("stream");
+ AudioTrack localAudioTrack =
+ factory.createAudioTrack("audio", factory.createAudioSource(new MediaConstraints()));
+ localStream.addTrack(localAudioTrack);
+ offeringExpectations.expectRenegotiationNeeded();
+ RtpSender audioSender =
+ offeringPC.addTrack(localAudioTrack, Collections.singletonList(localStream.getId()));
+ // Create offer.
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, offerConstraints);
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+
+ // Apply remote offer to PC under test.
+ sdpLatch = new SdpObserverLatch();
+ expectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ expectations.expectAddStream("stream");
+ pcUnderTest.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ // Sanity check that we get one remote stream with one audio track.
+ MediaStream remoteStream = expectations.gotRemoteStreams.iterator().next();
+ assertEquals(remoteStream.audioTracks.size(), 1);
+ assertEquals(remoteStream.videoTracks.size(), 0);
+
+ // Add a video track...
+ final CameraEnumerator enumerator = new Camera1Enumerator(false /* captureToTexture */);
+ final VideoCapturer videoCapturer =
+ enumerator.createCapturer(enumerator.getDeviceNames()[0], null /* eventsHandler */);
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper", /* sharedContext= */ null);
+ final VideoSource videoSource = factory.createVideoSource(/* isScreencast= */ false);
+ videoCapturer.initialize(surfaceTextureHelper, InstrumentationRegistry.getTargetContext(),
+ videoSource.getCapturerObserver());
+ VideoTrack videoTrack = factory.createVideoTrack("video", videoSource);
+ offeringExpectations.expectRenegotiationNeeded();
+ localStream.addTrack(videoTrack);
+ offeringPC.addTrack(videoTrack, Collections.singletonList(localStream.getId()));
+ // ... and create an updated offer.
+ sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, offerConstraints);
+ assertTrue(sdpLatch.await());
+ offerSdp = sdpLatch.getSdp();
+
+ // Apply remote offer with new video track to PC under test.
+ sdpLatch = new SdpObserverLatch();
+ pcUnderTest.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ // The remote stream should now have a video track.
+ assertEquals(remoteStream.audioTracks.size(), 1);
+ assertEquals(remoteStream.videoTracks.size(), 1);
+
+ // Finally, create another offer with the audio track removed.
+ offeringExpectations.expectRenegotiationNeeded();
+ localStream.removeTrack(localAudioTrack);
+ localAudioTrack.dispose();
+ offeringPC.removeTrack(audioSender);
+ sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, offerConstraints);
+ assertTrue(sdpLatch.await());
+ offerSdp = sdpLatch.getSdp();
+
+ // Apply remote offer with just a video track to PC under test.
+ sdpLatch = new SdpObserverLatch();
+ pcUnderTest.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ // The remote stream should no longer have an audio track.
+ assertEquals(remoteStream.audioTracks.size(), 0);
+ assertEquals(remoteStream.videoTracks.size(), 1);
+
+ // Free the Java-land objects. Video capturer and source aren't owned by
+ // the PeerConnection and need to be disposed separately.
+ // TODO(deadbeef): Should all these events really occur on disposal?
+ // "Gathering complete" is especially odd since gathering never started.
+ // Note that this test isn't meant to test these events, but we must do
+ // this or otherwise it will crash.
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CLOSED);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CLOSED);
+ offeringExpectations.expectSignalingChange(SignalingState.CLOSED);
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ offeringPC.dispose();
+ expectations.expectIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectStandardizedIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectSignalingChange(SignalingState.CLOSED);
+ expectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ pcUnderTest.dispose();
+ videoCapturer.dispose();
+ videoSource.dispose();
+ surfaceTextureHelper.dispose();
+ factory.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testRollback() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ PeerConnection.RTCConfiguration config =
+ new PeerConnection.RTCConfiguration(Collections.emptyList());
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection pc = factory.createPeerConnection(config, offeringExpectations);
+
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ pc.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offer = sdpLatch.getSdp();
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ pc.setLocalDescription(sdpLatch, offer);
+ assertTrue(sdpLatch.await());
+
+ SessionDescription rollback = new SessionDescription(SessionDescription.Type.ROLLBACK, "");
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ // TODO(bugs.webrtc.org/11970): determine if triggering ONN (twice even) is correct.
+ offeringExpectations.expectRenegotiationNeeded();
+ offeringExpectations.expectRenegotiationNeeded();
+ pc.setLocalDescription(sdpLatch, rollback);
+ assertTrue(sdpLatch.await());
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ }
+
+ private static void negotiate(PeerConnection offeringPC,
+ ObserverExpectations offeringExpectations, PeerConnection answeringPC,
+ ObserverExpectations answeringExpectations) {
+ // Create offer.
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ // Set local description for offerer.
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Set remote description for answerer.
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Create answer.
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ // Set local description for answerer.
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Set remote description for offerer.
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+ }
+
+ @SuppressWarnings("deprecation") // TODO(sakal): getStats is deprecated
+ private static void shutdownPC(PeerConnection pc, ObserverExpectations expectations) {
+ if (expectations.dataChannel != null) {
+ expectations.dataChannel.unregisterObserver();
+ expectations.dataChannel.dispose();
+ }
+
+ // Call getStats (old implementation) before shutting down PC.
+ expectations.expectOldStatsCallback();
+ assertTrue(pc.getStats(expectations, null /* track */));
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Call the new getStats implementation as well.
+ expectations.expectNewStatsCallback();
+ pc.getStats(expectations);
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ expectations.expectIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectStandardizedIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectConnectionChange(PeerConnectionState.CLOSED);
+ expectations.expectSignalingChange(SignalingState.CLOSED);
+ pc.close();
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Call getStats (old implementation) after calling close(). Should still
+ // work.
+ expectations.expectOldStatsCallback();
+ assertTrue(pc.getStats(expectations, null /* track */));
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ Logging.d(TAG, "FYI stats: ");
+ int reportIndex = -1;
+ for (StatsReport[] reports : expectations.takeStatsReports()) {
+ Logging.d(TAG, " Report #" + (++reportIndex));
+ for (int i = 0; i < reports.length; ++i) {
+ Logging.d(TAG, " " + reports[i].toString());
+ }
+ }
+ assertEquals(1, reportIndex);
+ Logging.d(TAG, "End stats.");
+
+ pc.dispose();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java
new file mode 100644
index 0000000000..8eebfb5878
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import org.junit.Test;
+
+public class PeerConnectionFactoryTest {
+ @SmallTest
+ @Test
+ public void testInitialize() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @SmallTest
+ @Test
+ public void testInitializeTwice() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @SmallTest
+ @Test
+ public void testInitializeTwiceWithTracer() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setEnableInternalTracer(true)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setEnableInternalTracer(true)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @SmallTest
+ @Test
+ public void testInitializeWithTracerAndShutdown() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setEnableInternalTracer(true)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ PeerConnectionFactory.shutdownInternalTracer();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java
new file mode 100644
index 0000000000..7ced991859
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static java.util.Collections.singletonList;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.util.Arrays;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.PeerConnection.TlsCertPolicy;
+
+/** Unit tests for {@link PeerConnection}. */
+public class PeerConnectionTest {
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @Test
+ @SmallTest
+ public void testIceServerChanged() throws Exception {
+ PeerConnection.IceServer iceServer1 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Same as iceServer1.
+ PeerConnection.IceServer iceServer2 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the url.
+ PeerConnection.IceServer iceServer3 =
+ PeerConnection.IceServer.builder("turn:fake.example2.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the username.
+ PeerConnection.IceServer iceServer4 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername2")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the password.
+ PeerConnection.IceServer iceServer5 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword2")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the TLS certificate policy.
+ PeerConnection.IceServer iceServer6 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_INSECURE_NO_CHECK)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the hostname.
+ PeerConnection.IceServer iceServer7 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_INSECURE_NO_CHECK)
+ .setHostname("fakeHostname2")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the TLS ALPN.
+ PeerConnection.IceServer iceServer8 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_INSECURE_NO_CHECK)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol2"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the TLS elliptic curve.
+ PeerConnection.IceServer iceServer9 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_INSECURE_NO_CHECK)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve2"))
+ .createIceServer();
+
+ assertTrue(iceServer1.equals(iceServer2));
+ assertFalse(iceServer1.equals(iceServer3));
+ assertFalse(iceServer1.equals(iceServer4));
+ assertFalse(iceServer1.equals(iceServer5));
+ assertFalse(iceServer1.equals(iceServer6));
+ assertFalse(iceServer1.equals(iceServer7));
+ assertFalse(iceServer1.equals(iceServer8));
+ assertFalse(iceServer1.equals(iceServer9));
+ }
+
+ // TODO(fischman) MOAR test ideas:
+ // - Test that PC.removeStream() works; requires a second
+ // createOffer/createAnswer dance.
+ // - audit each place that uses `constraints` for specifying non-trivial
+ // constraints (and ensure they're honored).
+ // - test error cases
+ // - ensure reasonable coverage of jni code is achieved. Coverage is
+ // extra-important because of all the free-text (class/method names, etc)
+ // in JNI-style programming; make sure no typos!
+ // - Test that shutdown mid-interaction is crash-free.
+
+ // Tests that the JNI glue between Java and C++ does not crash when creating a PeerConnection.
+ @Test
+ @SmallTest
+ public void testCreationWithConfig() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ List<PeerConnection.IceServer> iceServers = Arrays.asList(
+ PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer(),
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .createIceServer());
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(iceServers);
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ // Test configuration options.
+ config.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
+
+ PeerConnection offeringPC =
+ factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+ assertNotNull(offeringPC);
+ }
+
+ @Test
+ @SmallTest
+ public void testCreationWithCertificate() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList());
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ // Test certificate.
+ RtcCertificatePem originalCert = RtcCertificatePem.generateCertificate();
+ config.certificate = originalCert;
+
+ PeerConnection offeringPC =
+ factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+
+ RtcCertificatePem restoredCert = offeringPC.getCertificate();
+ assertEquals(originalCert.privateKey, restoredCert.privateKey);
+ assertEquals(originalCert.certificate, restoredCert.certificate);
+ }
+
+ @Test
+ @SmallTest
+ public void testCreationWithCryptoOptions() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList());
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ assertNull(config.cryptoOptions);
+
+ CryptoOptions cryptoOptions = CryptoOptions.builder()
+ .setEnableGcmCryptoSuites(true)
+ .setEnableAes128Sha1_32CryptoCipher(true)
+ .setEnableEncryptedRtpHeaderExtensions(true)
+ .setRequireFrameEncryption(true)
+ .createCryptoOptions();
+ config.cryptoOptions = cryptoOptions;
+
+ PeerConnection offeringPC =
+ factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+ assertNotNull(offeringPC);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java
new file mode 100644
index 0000000000..8b1cd67051
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.webrtc.RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
+import static org.webrtc.RendererCommon.ScalingType.SCALE_ASPECT_FILL;
+import static org.webrtc.RendererCommon.ScalingType.SCALE_ASPECT_FIT;
+import static org.webrtc.RendererCommon.getDisplaySize;
+import static org.webrtc.RendererCommon.getLayoutMatrix;
+
+import android.graphics.Point;
+import androidx.test.filters.SmallTest;
+import org.junit.Test;
+
+public class RendererCommonTest {
+ @Test
+ @SmallTest
+ public void testDisplaySizeNoFrame() {
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 0, 0));
+ }
+
+ @Test
+ @SmallTest
+ public void testDisplaySizeDegenerateAspectRatio() {
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 1280, 720));
+ }
+
+ @Test
+ @SmallTest
+ public void testZeroDisplaySize() {
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 0, 0));
+ }
+
+ @Test
+ @SmallTest
+ public void testDisplaySizePerfectFit() {
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 720, 1280));
+ }
+
+ @Test
+ @SmallTest
+ public void testLandscapeVideoInPortraitDisplay() {
+ assertEquals(new Point(720, 405), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 720, 1280));
+ assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 720, 1280));
+ }
+
+ @Test
+ @SmallTest
+ public void testPortraitVideoInLandscapeDisplay() {
+ assertEquals(new Point(405, 720), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 1280, 720));
+ assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 1280, 720));
+ }
+
+ @Test
+ @SmallTest
+ public void testFourToThreeVideoInSixteenToNineDisplay() {
+ assertEquals(new Point(960, 720), getDisplaySize(SCALE_ASPECT_FIT, 4.0f / 3, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 4.0f / 3, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 4.0f / 3, 1280, 720));
+ }
+
+ // Only keep 2 rounded decimals to make float comparison robust.
+ private static double[] round(float[] array) {
+ assertEquals(16, array.length);
+ final double[] doubleArray = new double[16];
+ for (int i = 0; i < 16; ++i) {
+ doubleArray[i] = Math.round(100 * array[i]) / 100.0;
+ }
+ return doubleArray;
+ }
+
+ // Brief summary about matrix transformations:
+ // A coordinate p = [u, v, 0, 1] is transformed by matrix m like this p' = [u', v', 0, 1] = m * p.
+ // OpenGL uses column-major order, so:
+ // u' = u * m[0] + v * m[4] + m[12].
+ // v' = u * m[1] + v * m[5] + m[13].
+
+ @Test
+ @SmallTest
+ public void testLayoutMatrixDefault() {
+ final float layoutMatrix[] = getLayoutMatrix(false, 1.0f, 1.0f);
+ // Assert:
+ // u' = u.
+ // v' = v.
+ // clang-format off
+ assertArrayEquals(new double[] {
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1}, round(layoutMatrix), 0.0);
+ // clang-format on
+ }
+
+ @Test
+ @SmallTest
+ public void testLayoutMatrixMirror() {
+ final float layoutMatrix[] = getLayoutMatrix(true, 1.0f, 1.0f);
+ // Assert:
+ // u' = 1 - u.
+ // v' = v.
+ // clang-format off
+ assertArrayEquals(new double[] {
+ -1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 1, 0, 0, 1}, round(layoutMatrix), 0.0);
+ // clang-format on
+ }
+
+ @Test
+ @SmallTest
+ public void testLayoutMatrixScale() {
+ // Video has aspect ratio 2, but layout is square. This will cause only the center part of the
+ // video to be visible, i.e. the u coordinate will go from 0.25 to 0.75 instead of from 0 to 1.
+ final float layoutMatrix[] = getLayoutMatrix(false, 2.0f, 1.0f);
+ // Assert:
+ // u' = 0.25 + 0.5 u.
+ // v' = v.
+ // clang-format off
+ assertArrayEquals(new double[] {
+ 0.5, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0.25, 0, 0, 1}, round(layoutMatrix), 0.0);
+ // clang-format on
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java
new file mode 100644
index 0000000000..4127bb2d4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import androidx.test.filters.SmallTest;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.PeerConnection;
+import org.webrtc.RtcCertificatePem;
+
+/** Tests for RtcCertificatePem.java. */
+public class RtcCertificatePemTest {
+ @Before
+ public void setUp() {
+ System.loadLibrary(TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testConstructor() {
+ RtcCertificatePem original = RtcCertificatePem.generateCertificate();
+ RtcCertificatePem recreated = new RtcCertificatePem(original.privateKey, original.certificate);
+ assertThat(original.privateKey).isEqualTo(recreated.privateKey);
+ assertThat(original.certificate).isEqualTo(recreated.certificate);
+ }
+
+ @Test
+ @SmallTest
+ public void testGenerateCertificateDefaults() {
+ RtcCertificatePem rtcCertificate = RtcCertificatePem.generateCertificate();
+ assertThat(rtcCertificate.privateKey).isNotEmpty();
+ assertThat(rtcCertificate.certificate).isNotEmpty();
+ }
+
+ @Test
+ @SmallTest
+ public void testGenerateCertificateCustomKeyTypeDefaultExpires() {
+ RtcCertificatePem rtcCertificate =
+ RtcCertificatePem.generateCertificate(PeerConnection.KeyType.RSA);
+ assertThat(rtcCertificate.privateKey).isNotEmpty();
+ assertThat(rtcCertificate.certificate).isNotEmpty();
+ }
+
+ @Test
+ @SmallTest
+ public void testGenerateCertificateCustomExpiresDefaultKeyType() {
+ RtcCertificatePem rtcCertificate = RtcCertificatePem.generateCertificate(60 * 60 * 24);
+ assertThat(rtcCertificate.privateKey).isNotEmpty();
+ assertThat(rtcCertificate.certificate).isNotEmpty();
+ }
+
+ @Test
+ @SmallTest
+ public void testGenerateCertificateCustomKeyTypeAndExpires() {
+ RtcCertificatePem rtcCertificate =
+ RtcCertificatePem.generateCertificate(PeerConnection.KeyType.RSA, 60 * 60 * 24);
+ assertThat(rtcCertificate.privateKey).isNotEmpty();
+ assertThat(rtcCertificate.certificate).isNotEmpty();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java
new file mode 100644
index 0000000000..9f315d5dc3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.util.Arrays;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.RtpParameters.DegradationPreference;
+
+/** Unit-tests for {@link RtpSender}. */
+public class RtpSenderTest {
+ private PeerConnectionFactory factory;
+ private PeerConnection pc;
+
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+
+ factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList());
+ // RtpTranceiver is part of new unified plan semantics.
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+ pc = factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+ }
+
+ /** Test checking the enum values for DegradationPreference stay consistent */
+ @Test
+ @SmallTest
+ public void testSetDegradationPreference() throws Exception {
+ RtpTransceiver transceiver = pc.addTransceiver(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO);
+ RtpSender sender = transceiver.getSender();
+
+ RtpParameters parameters = sender.getParameters();
+ assertNotNull(parameters);
+ assertNull(parameters.degradationPreference);
+
+ parameters.degradationPreference = DegradationPreference.MAINTAIN_FRAMERATE;
+ assertTrue(sender.setParameters(parameters));
+ parameters = sender.getParameters();
+ assertEquals(DegradationPreference.MAINTAIN_FRAMERATE, parameters.degradationPreference);
+
+ parameters.degradationPreference = DegradationPreference.MAINTAIN_RESOLUTION;
+ assertTrue(sender.setParameters(parameters));
+ parameters = sender.getParameters();
+ assertEquals(DegradationPreference.MAINTAIN_RESOLUTION, parameters.degradationPreference);
+
+ parameters.degradationPreference = DegradationPreference.BALANCED;
+ assertTrue(sender.setParameters(parameters));
+ parameters = sender.getParameters();
+ assertEquals(DegradationPreference.BALANCED, parameters.degradationPreference);
+
+ parameters.degradationPreference = DegradationPreference.DISABLED;
+ assertTrue(sender.setParameters(parameters));
+ parameters = sender.getParameters();
+ assertEquals(DegradationPreference.DISABLED, parameters.degradationPreference);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpTransceiverTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpTransceiverTest.java
new file mode 100644
index 0000000000..a53ff20f1c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpTransceiverTest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.RtpParameters.Encoding;
+import org.webrtc.RtpTransceiver.RtpTransceiverInit;
+
+/** Unit-tests for {@link RtpTransceiver}. */
+public class RtpTransceiverTest {
+ private PeerConnectionFactory factory;
+ private PeerConnection pc;
+
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+
+ factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList());
+ // RtpTranceiver is part of new unified plan semantics.
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+ pc = factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+ }
+
+ /** Test that RIDs get set in the RTP sender when passed in through an RtpTransceiverInit. */
+ @Test
+ @SmallTest
+ public void testSetRidInSimulcast() throws Exception {
+ List<Encoding> encodings = new ArrayList<Encoding>();
+ encodings.add(new Encoding("F", true, null));
+ encodings.add(new Encoding("H", true, null));
+
+ RtpTransceiverInit init = new RtpTransceiverInit(
+ RtpTransceiver.RtpTransceiverDirection.SEND_ONLY, Collections.emptyList(), encodings);
+ RtpTransceiver transceiver =
+ pc.addTransceiver(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO, init);
+
+ RtpSender sender = transceiver.getSender();
+ RtpParameters parameters = sender.getParameters();
+ List<Encoding> sendEncodings = parameters.getEncodings();
+ assertEquals(2, sendEncodings.size());
+ assertEquals("F", sendEncodings.get(0).getRid());
+ assertEquals("H", sendEncodings.get(1).getRid());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java
new file mode 100644
index 0000000000..9781d03999
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -0,0 +1,518 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.opengl.GLES20;
+import android.os.SystemClock;
+import androidx.annotation.Nullable;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.concurrent.CountDownLatch;
+import org.junit.Before;
+import org.junit.Test;
+
+public class SurfaceTextureHelperTest {
+ /**
+ * Mock texture listener with blocking wait functionality.
+ */
+ public static final class MockTextureListener implements VideoSink {
+ private final Object lock = new Object();
+ private @Nullable VideoFrame.TextureBuffer textureBuffer;
+ // Thread where frames are expected to be received on.
+ private final @Nullable Thread expectedThread;
+
+ MockTextureListener() {
+ this.expectedThread = null;
+ }
+
+ MockTextureListener(Thread expectedThread) {
+ this.expectedThread = expectedThread;
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ if (expectedThread != null && Thread.currentThread() != expectedThread) {
+ throw new IllegalStateException("onTextureFrameAvailable called on wrong thread.");
+ }
+ synchronized (lock) {
+ this.textureBuffer = (VideoFrame.TextureBuffer) frame.getBuffer();
+ textureBuffer.retain();
+ lock.notifyAll();
+ }
+ }
+
+ /** Wait indefinitely for a new textureBuffer. */
+ public VideoFrame.TextureBuffer waitForTextureBuffer() throws InterruptedException {
+ synchronized (lock) {
+ while (true) {
+ final VideoFrame.TextureBuffer textureBufferToReturn = textureBuffer;
+ if (textureBufferToReturn != null) {
+ textureBuffer = null;
+ return textureBufferToReturn;
+ }
+ lock.wait();
+ }
+ }
+ }
+
+ /** Make sure we get no frame in the specified time period. */
+ public void assertNoFrameIsDelivered(final long waitPeriodMs) throws InterruptedException {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = waitPeriodMs;
+ synchronized (lock) {
+ while (textureBuffer == null && timeRemainingMs > 0) {
+ lock.wait(timeRemainingMs);
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = waitPeriodMs - elapsedTimeMs;
+ }
+ assertTrue(textureBuffer == null);
+ }
+ }
+ }
+
+ /** Assert that two integers are close, with difference at most
+ * {@code threshold}. */
+ public static void assertClose(int threshold, int expected, int actual) {
+ if (Math.abs(expected - actual) <= threshold)
+ return;
+ fail("Not close enough, threshold " + threshold + ". Expected: " + expected + " Actual: "
+ + actual);
+ }
+
+ @Before
+ public void setUp() {
+ // Load the JNI library for textureToYuv.
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ /**
+ * Test normal use by receiving three uniform texture frames. Texture frames are returned as early
+ * as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
+ * buffer and reading it back with glReadPixels().
+ */
+ @Test
+ @MediumTest
+ public void testThreeConstantColorFrames() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(width, height);
+ final GlRectDrawer drawer = new GlRectDrawer();
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ surfaceTextureHelper.setTextureSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. `eglOesBase` has the SurfaceTexture in
+ // `surfaceTextureHelper` as the target EGLSurface.
+ final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglOesBase.surfaceWidth(), width);
+ assertEquals(eglOesBase.surfaceHeight(), height);
+
+ final int red[] = new int[] {79, 144, 185};
+ final int green[] = new int[] {66, 210, 162};
+ final int blue[] = new int[] {161, 117, 158};
+ // Draw three frames.
+ for (int i = 0; i < 3; ++i) {
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglOesBase.makeCurrent();
+ GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+
+ // Wait for an OES texture to arrive and draw it onto the pixel buffer.
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+ eglBase.makeCurrent();
+ drawer.drawOes(textureBuffer.getTextureId(),
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(textureBuffer.getTransformMatrix()),
+ width, height, 0, 0, width, height);
+ textureBuffer.release();
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g.
+ // Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is expected constant color.
+ while (rgbaData.hasRemaining()) {
+ assertEquals(rgbaData.get() & 0xFF, red[i]);
+ assertEquals(rgbaData.get() & 0xFF, green[i]);
+ assertEquals(rgbaData.get() & 0xFF, blue[i]);
+ assertEquals(rgbaData.get() & 0xFF, 255);
+ }
+ }
+
+ drawer.release();
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+
+ /**
+ * Test disposing the SurfaceTextureHelper while holding a pending texture frame. The pending
+ * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
+ * buffer and reading it back with glReadPixels().
+ */
+ @Test
+ @MediumTest
+ public void testLateReturnFrame() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(width, height);
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ surfaceTextureHelper.setTextureSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. `eglOesBase` has the SurfaceTexture in
+ // `surfaceTextureHelper` as the target EGLSurface.
+ final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglOesBase.surfaceWidth(), width);
+ assertEquals(eglOesBase.surfaceHeight(), height);
+
+ final int red = 79;
+ final int green = 66;
+ final int blue = 161;
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglOesBase.makeCurrent();
+ GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+ eglOesBase.release();
+
+ // Wait for OES texture frame.
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+ // Diconnect while holding the frame.
+ surfaceTextureHelper.dispose();
+
+ // Draw the pending texture frame onto the pixel buffer.
+ eglBase.makeCurrent();
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawOes(textureBuffer.getTextureId(),
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(textureBuffer.getTransformMatrix()),
+ width, height, 0, 0, width, height);
+ drawer.release();
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+ eglBase.release();
+
+ // Assert rendered image is expected constant color.
+ while (rgbaData.hasRemaining()) {
+ assertEquals(rgbaData.get() & 0xFF, red);
+ assertEquals(rgbaData.get() & 0xFF, green);
+ assertEquals(rgbaData.get() & 0xFF, blue);
+ assertEquals(rgbaData.get() & 0xFF, 255);
+ }
+ // Late frame return after everything has been disposed and released.
+ textureBuffer.release();
+ }
+
+ /**
+ * Test disposing the SurfaceTextureHelper, but keep trying to produce more texture frames. No
+ * frames should be delivered to the listener.
+ */
+ @Test
+ @MediumTest
+ public void testDispose() throws InterruptedException {
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ // Create EglBase with the SurfaceTexture as target EGLSurface.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+ surfaceTextureHelper.setTextureSize(/* textureWidth= */ 32, /* textureHeight= */ 32);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ // Assert no frame has been received yet.
+ listener.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+ // Draw and wait for one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+ listener.waitForTextureBuffer().release();
+
+ // Dispose - we should not receive any textures after this.
+ surfaceTextureHelper.dispose();
+
+ // Draw one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ // swapBuffers() should not trigger onTextureFrameAvailable() because disposed has been called.
+ // Assert that no OES texture was delivered.
+ listener.assertNoFrameIsDelivered(/* waitPeriodMs= */ 500);
+
+ eglBase.release();
+ }
+
+ /**
+ * Test disposing the SurfaceTextureHelper immediately after is has been setup to use a
+ * shared context. No frames should be delivered to the listener.
+ */
+ @Test
+ @SmallTest
+ public void testDisposeImmediately() {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ surfaceTextureHelper.dispose();
+ }
+
+ /**
+ * Call stopListening(), but keep trying to produce more texture frames. No frames should be
+ * delivered to the listener.
+ */
+ @Test
+ @MediumTest
+ public void testStopListening() throws InterruptedException {
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ surfaceTextureHelper.setTextureSize(/* textureWidth= */ 32, /* textureHeight= */ 32);
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ // Create EglBase with the SurfaceTexture as target EGLSurface.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ // Assert no frame has been received yet.
+ listener.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+ // Draw and wait for one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+ listener.waitForTextureBuffer().release();
+
+ // Stop listening - we should not receive any textures after this.
+ surfaceTextureHelper.stopListening();
+
+ // Draw one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ // swapBuffers() should not trigger onTextureFrameAvailable() because disposed has been called.
+ // Assert that no OES texture was delivered.
+ listener.assertNoFrameIsDelivered(/* waitPeriodMs= */ 500);
+
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+
+ /**
+ * Test stopListening() immediately after the SurfaceTextureHelper has been setup.
+ */
+ @Test
+ @SmallTest
+ public void testStopListeningImmediately() throws InterruptedException {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper.dispose();
+ }
+
+ /**
+ * Test stopListening() immediately after the SurfaceTextureHelper has been setup on the handler
+ * thread.
+ */
+ @Test
+ @SmallTest
+ public void testStopListeningImmediatelyOnHandlerThread() throws InterruptedException {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ final MockTextureListener listener = new MockTextureListener();
+
+ final CountDownLatch stopListeningBarrier = new CountDownLatch(1);
+ final CountDownLatch stopListeningBarrierDone = new CountDownLatch(1);
+ // Start by posting to the handler thread to keep it occupied.
+ surfaceTextureHelper.getHandler().post(new Runnable() {
+ @Override
+ public void run() {
+ ThreadUtils.awaitUninterruptibly(stopListeningBarrier);
+ surfaceTextureHelper.stopListening();
+ stopListeningBarrierDone.countDown();
+ }
+ });
+
+ // startListening() is asynchronous and will post to the occupied handler thread.
+ surfaceTextureHelper.startListening(listener);
+ // Wait for stopListening() to be called on the handler thread.
+ stopListeningBarrier.countDown();
+ stopListeningBarrierDone.await();
+ // Wait until handler thread is idle to try to catch late startListening() call.
+ final CountDownLatch barrier = new CountDownLatch(1);
+ surfaceTextureHelper.getHandler().post(new Runnable() {
+ @Override
+ public void run() {
+ barrier.countDown();
+ }
+ });
+ ThreadUtils.awaitUninterruptibly(barrier);
+ // Previous startListening() call should never have taken place and it should be ok to call it
+ // again.
+ surfaceTextureHelper.startListening(listener);
+
+ surfaceTextureHelper.dispose();
+ }
+
+ /**
+ * Test calling startListening() with a new listener after stopListening() has been called.
+ */
+ @Test
+ @MediumTest
+ public void testRestartListeningWithNewListener() throws InterruptedException {
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ surfaceTextureHelper.setTextureSize(/* textureWidth= */ 32, /* textureHeight= */ 32);
+ final MockTextureListener listener1 = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener1);
+ // Create EglBase with the SurfaceTexture as target EGLSurface.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ // Assert no frame has been received yet.
+ listener1.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+ // Draw and wait for one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+ listener1.waitForTextureBuffer().release();
+
+ // Stop listening - `listener1` should not receive any textures after this.
+ surfaceTextureHelper.stopListening();
+
+ // Connect different listener.
+ final MockTextureListener listener2 = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener2);
+ // Assert no frame has been received yet.
+ listener2.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+
+ // Draw one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+
+ // Check that `listener2` received the frame, and not `listener1`.
+ listener2.waitForTextureBuffer().release();
+ listener1.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+
+ @Test
+ @MediumTest
+ public void testTexturetoYuv() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ surfaceTextureHelper.setTextureSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. `eglBase` has the SurfaceTexture in
+ // `surfaceTextureHelper` as the target EGLSurface.
+
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglBase.surfaceWidth(), width);
+ assertEquals(eglBase.surfaceHeight(), height);
+
+ final int red[] = new int[] {79, 144, 185};
+ final int green[] = new int[] {66, 210, 162};
+ final int blue[] = new int[] {161, 117, 158};
+
+ final int ref_y[] = new int[] {85, 170, 161};
+ final int ref_u[] = new int[] {168, 97, 123};
+ final int ref_v[] = new int[] {127, 106, 138};
+
+ // Draw three frames.
+ for (int i = 0; i < 3; ++i) {
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglBase.makeCurrent();
+ GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+
+ // Wait for an OES texture to arrive.
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+ final VideoFrame.I420Buffer i420 = textureBuffer.toI420();
+ textureBuffer.release();
+
+ // Memory layout: Lines are 16 bytes. First 16 lines are
+ // the Y data. These are followed by 8 lines with 8 bytes of U
+ // data on the left and 8 bytes of V data on the right.
+ //
+ // Offset
+ // 0 YYYYYYYY YYYYYYYY
+ // 16 YYYYYYYY YYYYYYYY
+ // ...
+ // 240 YYYYYYYY YYYYYYYY
+ // 256 UUUUUUUU VVVVVVVV
+ // 272 UUUUUUUU VVVVVVVV
+ // ...
+ // 368 UUUUUUUU VVVVVVVV
+ // 384 buffer end
+
+ // Allow off-by-one differences due to different rounding.
+ final ByteBuffer dataY = i420.getDataY();
+ final int strideY = i420.getStrideY();
+ for (int y = 0; y < height; y++) {
+ for (int x = 0; x < width; x++) {
+ assertClose(1, ref_y[i], dataY.get(y * strideY + x) & 0xFF);
+ }
+ }
+
+ final int chromaWidth = width / 2;
+ final int chromaHeight = height / 2;
+
+ final ByteBuffer dataU = i420.getDataU();
+ final ByteBuffer dataV = i420.getDataV();
+ final int strideU = i420.getStrideU();
+ final int strideV = i420.getStrideV();
+ for (int y = 0; y < chromaHeight; y++) {
+ for (int x = 0; x < chromaWidth; x++) {
+ assertClose(1, ref_u[i], dataU.get(y * strideU + x) & 0xFF);
+ assertClose(1, ref_v[i], dataV.get(y * strideV + x) & 0xFF);
+ }
+ }
+ i420.release();
+ }
+
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
new file mode 100644
index 0000000000..4d499789e6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import android.annotation.SuppressLint;
+import android.graphics.Point;
+import android.support.test.InstrumentationRegistry;
+import android.support.test.annotation.UiThreadTest;
+import android.support.test.rule.UiThreadTestRule;
+import android.view.View.MeasureSpec;
+import androidx.test.filters.MediumTest;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.List;
+import org.junit.Rule;
+import org.junit.Test;
+
+public class SurfaceViewRendererOnMeasureTest {
+ @Rule public final UiThreadTestRule uiThreadRule = new UiThreadTestRule();
+
+ /**
+ * List with all possible scaling types.
+ */
+ private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
+ RendererCommon.ScalingType.SCALE_ASPECT_FIT, RendererCommon.ScalingType.SCALE_ASPECT_FILL,
+ RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
+
+ /**
+ * List with MeasureSpec modes.
+ */
+ private static final List<Integer> measureSpecModes =
+ Arrays.asList(MeasureSpec.EXACTLY, MeasureSpec.AT_MOST);
+
+ /**
+ * Returns a dummy YUV frame.
+ */
+ static VideoFrame createFrame(int width, int height, int rotationDegree) {
+ final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2};
+ final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2};
+ final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+ for (int i = 0; i < 3; ++i) {
+ yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]);
+ }
+ final VideoFrame.I420Buffer buffer =
+ JavaI420Buffer.wrap(width, height, yuvPlanes[0], yuvStrides[0], yuvPlanes[1], yuvStrides[1],
+ yuvPlanes[2], yuvStrides[2], null /* releaseCallback */);
+ return new VideoFrame(buffer, rotationDegree, 0 /* timestamp */);
+ }
+
+ /**
+ * Assert onMeasure() with given parameters will result in expected measured size.
+ */
+ @SuppressLint("WrongCall")
+ private static void assertMeasuredSize(SurfaceViewRenderer surfaceViewRenderer,
+ RendererCommon.ScalingType scalingType, String frameDimensions, int expectedWidth,
+ int expectedHeight, int widthSpec, int heightSpec) {
+ surfaceViewRenderer.setScalingType(scalingType);
+ surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
+ final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
+ final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
+ if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
+ fail("onMeasure(" + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec)
+ + ")"
+ + " with scaling type " + scalingType + " and frame: " + frameDimensions
+ + " expected measured size " + expectedWidth + "x" + expectedHeight + ", but was "
+ + measuredWidth + "x" + measuredHeight);
+ }
+ }
+
+ /**
+ * Test how SurfaceViewRenderer.onMeasure() behaves when no frame has been delivered.
+ */
+ @Test
+ @UiThreadTest
+ @MediumTest
+ public void testNoFrame() {
+ final SurfaceViewRenderer surfaceViewRenderer =
+ new SurfaceViewRenderer(InstrumentationRegistry.getContext());
+ final String frameDimensions = "null";
+
+ // Test behaviour before SurfaceViewRenderer.init() is called.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
+ zeroMeasureSize);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
+ MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(720, measureSpecMode));
+ }
+ }
+
+ // Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
+ surfaceViewRenderer.init((EglBase.Context) null, null);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
+ zeroMeasureSize);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
+ MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(720, measureSpecMode));
+ }
+ }
+
+ surfaceViewRenderer.release();
+ }
+
+ /**
+ * Test how SurfaceViewRenderer.onMeasure() behaves with a 1280x720 frame.
+ */
+ @Test
+ @UiThreadTest
+ @MediumTest
+ public void testFrame1280x720() throws InterruptedException {
+ final SurfaceViewRenderer surfaceViewRenderer =
+ new SurfaceViewRenderer(InstrumentationRegistry.getContext());
+ /**
+ * Mock renderer events with blocking wait functionality for frame size changes.
+ */
+ class MockRendererEvents implements RendererCommon.RendererEvents {
+ private int frameWidth;
+ private int frameHeight;
+ private int rotation;
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void waitForFrameSize(int frameWidth, int frameHeight, int rotation)
+ throws InterruptedException {
+ while (this.frameWidth != frameWidth || this.frameHeight != frameHeight
+ || this.rotation != rotation) {
+ wait();
+ }
+ }
+
+ @Override
+ public void onFirstFrameRendered() {}
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrameResolutionChanged(
+ int frameWidth, int frameHeight, int rotation) {
+ this.frameWidth = frameWidth;
+ this.frameHeight = frameHeight;
+ this.rotation = rotation;
+ notifyAll();
+ }
+ }
+ final MockRendererEvents rendererEvents = new MockRendererEvents();
+ surfaceViewRenderer.init((EglBase.Context) null, rendererEvents);
+
+ // Test different rotation degress, but same rotated size.
+ for (int rotationDegree : new int[] {0, 90, 180, 270}) {
+ final int rotatedWidth = 1280;
+ final int rotatedHeight = 720;
+ final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight);
+ final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth);
+ final VideoFrame frame = createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
+ assertEquals(rotatedWidth, frame.getRotatedWidth());
+ assertEquals(rotatedHeight, frame.getRotatedHeight());
+ final String frameDimensions =
+ unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree;
+ surfaceViewRenderer.onFrame(frame);
+ frame.release();
+ rendererEvents.waitForFrameSize(unrotatedWidth, unrotatedHeight, rotationDegree);
+
+ // Test forcing to zero size.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0,
+ zeroMeasureSize, zeroMeasureSize);
+ }
+ }
+
+ // Test perfect fit.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, rotatedWidth,
+ rotatedHeight, MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
+ }
+ }
+
+ // Force spec size with different aspect ratio than frame aspect ratio.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 720, 1280,
+ MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
+ MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
+ }
+
+ final float videoAspectRatio = (float) rotatedWidth / rotatedHeight;
+ {
+ // Relax both width and height constraints.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ final Point expectedSize =
+ RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
+ expectedSize.y, widthSpec, heightSpec);
+ }
+ }
+ {
+ // Force width to 720, but relax height constraint. This will give the same result as
+ // above, because width is already the limiting factor and will be maxed out.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ final Point expectedSize =
+ RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
+ expectedSize.y, widthSpec, heightSpec);
+ }
+ }
+ {
+ // Force height, but relax width constraint. This will force a bad layout size.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ assertMeasuredSize(
+ surfaceViewRenderer, scalingType, frameDimensions, 720, 1280, widthSpec, heightSpec);
+ }
+ }
+ }
+
+ surfaceViewRenderer.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TestConstants.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TestConstants.java
new file mode 100644
index 0000000000..6c7904c9f3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TestConstants.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+class TestConstants {
+ public static final String NATIVE_LIBRARY = "jingle_peerconnection_instrumentationtests_so";
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TimestampAlignerTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TimestampAlignerTest.java
new file mode 100644
index 0000000000..46cb37e5f1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TimestampAlignerTest.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.test.filters.SmallTest;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TimestampAlignerTest {
+ @BeforeClass
+ public static void setUp() {
+ System.loadLibrary(TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testGetRtcTimeNanos() {
+ TimestampAligner.getRtcTimeNanos();
+ }
+
+ @Test
+ @SmallTest
+ public void testDispose() {
+ final TimestampAligner timestampAligner = new TimestampAligner();
+ timestampAligner.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testTranslateTimestamp() {
+ final TimestampAligner timestampAligner = new TimestampAligner();
+ timestampAligner.translateTimestamp(/* cameraTimeNs= */ 123);
+ timestampAligner.dispose();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java
new file mode 100644
index 0000000000..9c66edd8ef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+
+import android.os.Environment;
+import androidx.test.filters.SmallTest;
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import org.junit.Before;
+import org.junit.Test;
+
+public class VideoFileRendererTest {
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testYuvRenderingToFile() throws InterruptedException, IOException {
+ EglBase eglBase = EglBase.create();
+ final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ + "/chromium_tests_root/testvideoout.y4m";
+ int frameWidth = 4;
+ int frameHeight = 4;
+ VideoFileRenderer videoFileRenderer =
+ new VideoFileRenderer(videoOutPath, frameWidth, frameHeight, eglBase.getEglBaseContext());
+
+ String[] frames = {
+ "THIS IS JUST SOME TEXT x", "THE SECOND FRAME qwerty.", "HERE IS THE THRID FRAME!"};
+
+ for (String frameStr : frames) {
+ int[] planeSizes = {
+ frameWidth * frameWidth, frameWidth * frameHeight / 4, frameWidth * frameHeight / 4};
+ int[] yuvStrides = {frameWidth, frameWidth / 2, frameWidth / 2};
+
+ ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+ byte[] frameBytes = frameStr.getBytes(Charset.forName("US-ASCII"));
+ int pos = 0;
+ for (int i = 0; i < 3; i++) {
+ yuvPlanes[i] = ByteBuffer.allocateDirect(planeSizes[i]);
+ yuvPlanes[i].put(frameBytes, pos, planeSizes[i]);
+ yuvPlanes[i].rewind();
+ pos += planeSizes[i];
+ }
+
+ VideoFrame.I420Buffer buffer =
+ JavaI420Buffer.wrap(frameWidth, frameHeight, yuvPlanes[0], yuvStrides[0], yuvPlanes[1],
+ yuvStrides[1], yuvPlanes[2], yuvStrides[2], null /* releaseCallback */);
+
+ VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, 0 /* timestampNs */);
+ videoFileRenderer.onFrame(frame);
+ frame.release();
+ }
+ videoFileRenderer.release();
+
+ RandomAccessFile writtenFile = new RandomAccessFile(videoOutPath, "r");
+ try {
+ int length = (int) writtenFile.length();
+ byte[] data = new byte[length];
+ writtenFile.readFully(data);
+ String fileContent = new String(data, Charset.forName("US-ASCII"));
+ String expected = "YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1\n"
+ + "FRAME\n"
+ + "THIS IS JUST SOME TEXT xFRAME\n"
+ + "THE SECOND FRAME qwerty.FRAME\n"
+ + "HERE IS THE THRID FRAME!";
+ assertEquals(expected, fileContent);
+ } finally {
+ writtenFile.close();
+ }
+
+ new File(videoOutPath).delete();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java
new file mode 100644
index 0000000000..3668cd71b1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java
@@ -0,0 +1,530 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
+import android.graphics.Matrix;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.webrtc.VideoFrame;
+
+/**
+ * Test VideoFrame buffers of different kind of formats: I420, RGB, OES, NV12, NV21, and verify
+ * toI420() and cropAndScale() behavior. Creating RGB/OES textures involves VideoFrameDrawer and
+ * GlRectDrawer and we are testing the full chain I420 -> OES/RGB texture -> I420, with and without
+ * cropping in the middle. Reading textures back to I420 also exercises the YuvConverter code.
+ */
+@RunWith(Parameterized.class)
+public class VideoFrameBufferTest {
+ /**
+ * These tests are parameterized on this enum which represents the different VideoFrame.Buffers.
+ */
+ private static enum BufferType { I420_JAVA, I420_NATIVE, RGB_TEXTURE, OES_TEXTURE, NV21, NV12 }
+
+ @Parameters(name = "{0}")
+ public static Collection<BufferType> parameters() {
+ return Arrays.asList(BufferType.values());
+ }
+
+ @BeforeClass
+ public static void setUp() {
+ // Needed for JniCommon.nativeAllocateByteBuffer() to work, which is used from JavaI420Buffer.
+ System.loadLibrary(TestConstants.NATIVE_LIBRARY);
+ }
+
+ private final BufferType bufferType;
+
+ public VideoFrameBufferTest(BufferType bufferType) {
+ this.bufferType = bufferType;
+ }
+
+ /**
+ * Create a VideoFrame.Buffer of the given type with the same pixel content as the given I420
+ * buffer.
+ */
+ private static VideoFrame.Buffer createBufferWithType(
+ BufferType bufferType, VideoFrame.I420Buffer i420Buffer) {
+ VideoFrame.Buffer buffer;
+ switch (bufferType) {
+ case I420_JAVA:
+ buffer = i420Buffer;
+ buffer.retain();
+ assertEquals(VideoFrameBufferType.I420, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.I420, nativeGetBufferType(buffer));
+ return buffer;
+ case I420_NATIVE:
+ buffer = nativeGetNativeI420Buffer(i420Buffer);
+ assertEquals(VideoFrameBufferType.I420, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.I420, nativeGetBufferType(buffer));
+ return buffer;
+ case RGB_TEXTURE:
+ buffer = createRgbTextureBuffer(/* eglContext= */ null, i420Buffer);
+ assertEquals(VideoFrameBufferType.NATIVE, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.NATIVE, nativeGetBufferType(buffer));
+ return buffer;
+ case OES_TEXTURE:
+ buffer = createOesTextureBuffer(/* eglContext= */ null, i420Buffer);
+ assertEquals(VideoFrameBufferType.NATIVE, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.NATIVE, nativeGetBufferType(buffer));
+ return buffer;
+ case NV21:
+ buffer = createNV21Buffer(i420Buffer);
+ assertEquals(VideoFrameBufferType.NATIVE, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.NATIVE, nativeGetBufferType(buffer));
+ return buffer;
+ case NV12:
+ buffer = createNV12Buffer(i420Buffer);
+ assertEquals(VideoFrameBufferType.NATIVE, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.NATIVE, nativeGetBufferType(buffer));
+ return buffer;
+ default:
+ throw new IllegalArgumentException("Unknown buffer type: " + bufferType);
+ }
+ }
+
+ private VideoFrame.Buffer createBufferToTest(VideoFrame.I420Buffer i420Buffer) {
+ return createBufferWithType(this.bufferType, i420Buffer);
+ }
+
+ /**
+ * Creates a 16x16 I420 buffer that varies smoothly and spans all RGB values.
+ */
+ public static VideoFrame.I420Buffer createTestI420Buffer() {
+ final int width = 16;
+ final int height = 16;
+ final int[] yData = new int[] {156, 162, 167, 172, 177, 182, 187, 193, 199, 203, 209, 214, 219,
+ 224, 229, 235, 147, 152, 157, 162, 168, 173, 178, 183, 188, 193, 199, 205, 210, 215, 220,
+ 225, 138, 143, 148, 153, 158, 163, 168, 174, 180, 184, 190, 195, 200, 205, 211, 216, 128,
+ 133, 138, 144, 149, 154, 159, 165, 170, 175, 181, 186, 191, 196, 201, 206, 119, 124, 129,
+ 134, 140, 145, 150, 156, 161, 166, 171, 176, 181, 187, 192, 197, 109, 114, 119, 126, 130,
+ 136, 141, 146, 151, 156, 162, 167, 172, 177, 182, 187, 101, 105, 111, 116, 121, 126, 132,
+ 137, 142, 147, 152, 157, 162, 168, 173, 178, 90, 96, 101, 107, 112, 117, 122, 127, 132, 138,
+ 143, 148, 153, 158, 163, 168, 82, 87, 92, 97, 102, 107, 113, 118, 123, 128, 133, 138, 144,
+ 149, 154, 159, 72, 77, 83, 88, 93, 98, 103, 108, 113, 119, 124, 129, 134, 139, 144, 150, 63,
+ 68, 73, 78, 83, 89, 94, 99, 104, 109, 114, 119, 125, 130, 135, 140, 53, 58, 64, 69, 74, 79,
+ 84, 89, 95, 100, 105, 110, 115, 120, 126, 131, 44, 49, 54, 59, 64, 70, 75, 80, 85, 90, 95,
+ 101, 106, 111, 116, 121, 34, 40, 45, 50, 55, 60, 65, 71, 76, 81, 86, 91, 96, 101, 107, 113,
+ 25, 30, 35, 40, 46, 51, 56, 61, 66, 71, 77, 82, 87, 92, 98, 103, 16, 21, 26, 31, 36, 41, 46,
+ 52, 57, 62, 67, 72, 77, 83, 89, 94};
+ final int[] uData = new int[] {110, 113, 116, 118, 120, 123, 125, 128, 113, 116, 118, 120, 123,
+ 125, 128, 130, 116, 118, 120, 123, 125, 128, 130, 132, 118, 120, 123, 125, 128, 130, 132,
+ 135, 120, 123, 125, 128, 130, 132, 135, 138, 123, 125, 128, 130, 132, 135, 138, 139, 125,
+ 128, 130, 132, 135, 138, 139, 142, 128, 130, 132, 135, 138, 139, 142, 145};
+ final int[] vData = new int[] {31, 45, 59, 73, 87, 100, 114, 127, 45, 59, 73, 87, 100, 114, 128,
+ 141, 59, 73, 87, 100, 114, 127, 141, 155, 73, 87, 100, 114, 127, 141, 155, 168, 87, 100,
+ 114, 128, 141, 155, 168, 182, 100, 114, 128, 141, 155, 168, 182, 197, 114, 127, 141, 155,
+ 168, 182, 196, 210, 127, 141, 155, 168, 182, 196, 210, 224};
+ return JavaI420Buffer.wrap(width, height, toByteBuffer(yData),
+ /* strideY= */ width, toByteBuffer(uData), /* strideU= */ width / 2, toByteBuffer(vData),
+ /* strideV= */ width / 2,
+ /* releaseCallback= */ null);
+ }
+
+ /**
+ * Create an RGB texture buffer available in `eglContext` with the same pixel content as the given
+ * I420 buffer.
+ */
+ public static VideoFrame.TextureBuffer createRgbTextureBuffer(
+ EglBase.Context eglContext, VideoFrame.I420Buffer i420Buffer) {
+ final int width = i420Buffer.getWidth();
+ final int height = i420Buffer.getHeight();
+
+ final HandlerThread renderThread = new HandlerThread("RGB texture thread");
+ renderThread.start();
+ final Handler renderThreadHandler = new Handler(renderThread.getLooper());
+ return ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(eglContext, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ final GlTextureFrameBuffer textureFrameBuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
+ textureFrameBuffer.setSize(width, height);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureFrameBuffer.getFrameBufferId());
+ drawI420Buffer(i420Buffer);
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+
+ final YuvConverter yuvConverter = new YuvConverter();
+ return new TextureBufferImpl(width, height, VideoFrame.TextureBuffer.Type.RGB,
+ textureFrameBuffer.getTextureId(),
+ /* transformMatrix= */ new Matrix(), renderThreadHandler, yuvConverter,
+ /* releaseCallback= */ () -> renderThreadHandler.post(() -> {
+ textureFrameBuffer.release();
+ yuvConverter.release();
+ eglBase.release();
+ renderThread.quit();
+ }));
+ });
+ }
+
+ /**
+ * Create an OES texture buffer available in `eglContext` with the same pixel content as the given
+ * I420 buffer.
+ */
+ public static VideoFrame.TextureBuffer createOesTextureBuffer(
+ EglBase.Context eglContext, VideoFrame.I420Buffer i420Buffer) {
+ final int width = i420Buffer.getWidth();
+ final int height = i420Buffer.getHeight();
+
+ // Create resources for generating OES textures.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test", eglContext);
+ surfaceTextureHelper.setTextureSize(width, height);
+
+ final HandlerThread renderThread = new HandlerThread("OES texture thread");
+ renderThread.start();
+ final Handler renderThreadHandler = new Handler(renderThread.getLooper());
+ final VideoFrame.TextureBuffer oesBuffer =
+ ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
+ // Create EGL base with the SurfaceTexture as display output.
+ final EglBase eglBase = EglBase.create(eglContext, EglBase.CONFIG_PLAIN);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ assertEquals(width, eglBase.surfaceWidth());
+ assertEquals(height, eglBase.surfaceHeight());
+
+ final SurfaceTextureHelperTest.MockTextureListener listener =
+ new SurfaceTextureHelperTest.MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+
+ // Draw the frame and block until an OES texture is delivered.
+ drawI420Buffer(i420Buffer);
+ eglBase.swapBuffers();
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper.dispose();
+
+ return textureBuffer;
+ });
+ renderThread.quit();
+
+ return oesBuffer;
+ }
+
+ /** Create an NV21Buffer with the same pixel content as the given I420 buffer. */
+ public static NV21Buffer createNV21Buffer(VideoFrame.I420Buffer i420Buffer) {
+ final int width = i420Buffer.getWidth();
+ final int height = i420Buffer.getHeight();
+ final int chromaStride = width;
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ final int ySize = width * height;
+
+ final ByteBuffer nv21Buffer = ByteBuffer.allocateDirect(ySize + chromaStride * chromaHeight);
+ // We don't care what the array offset is since we only want an array that is direct.
+ @SuppressWarnings("ByteBufferBackingArray") final byte[] nv21Data = nv21Buffer.array();
+
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final byte yValue = i420Buffer.getDataY().get(y * i420Buffer.getStrideY() + x);
+ nv21Data[y * width + x] = yValue;
+ }
+ }
+ for (int y = 0; y < chromaHeight; ++y) {
+ for (int x = 0; x < chromaWidth; ++x) {
+ final byte uValue = i420Buffer.getDataU().get(y * i420Buffer.getStrideU() + x);
+ final byte vValue = i420Buffer.getDataV().get(y * i420Buffer.getStrideV() + x);
+ nv21Data[ySize + y * chromaStride + 2 * x + 0] = vValue;
+ nv21Data[ySize + y * chromaStride + 2 * x + 1] = uValue;
+ }
+ }
+ return new NV21Buffer(nv21Data, width, height, /* releaseCallback= */ null);
+ }
+
+ /** Create an NV12Buffer with the same pixel content as the given I420 buffer. */
+ public static NV12Buffer createNV12Buffer(VideoFrame.I420Buffer i420Buffer) {
+ final int width = i420Buffer.getWidth();
+ final int height = i420Buffer.getHeight();
+ final int chromaStride = width;
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ final int ySize = width * height;
+
+ final ByteBuffer nv12Buffer = ByteBuffer.allocateDirect(ySize + chromaStride * chromaHeight);
+
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final byte yValue = i420Buffer.getDataY().get(y * i420Buffer.getStrideY() + x);
+ nv12Buffer.put(y * width + x, yValue);
+ }
+ }
+ for (int y = 0; y < chromaHeight; ++y) {
+ for (int x = 0; x < chromaWidth; ++x) {
+ final byte uValue = i420Buffer.getDataU().get(y * i420Buffer.getStrideU() + x);
+ final byte vValue = i420Buffer.getDataV().get(y * i420Buffer.getStrideV() + x);
+ nv12Buffer.put(ySize + y * chromaStride + 2 * x + 0, uValue);
+ nv12Buffer.put(ySize + y * chromaStride + 2 * x + 1, vValue);
+ }
+ }
+ return new NV12Buffer(width, height, /* stride= */ width, /* sliceHeight= */ height, nv12Buffer,
+ /* releaseCallback */ null);
+ }
+
+ /** Print the ByteBuffer plane to the StringBuilder. */
+ private static void printPlane(
+ StringBuilder stringBuilder, int width, int height, ByteBuffer plane, int stride) {
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final int value = plane.get(y * stride + x) & 0xFF;
+ if (x != 0) {
+ stringBuilder.append(", ");
+ }
+ stringBuilder.append(value);
+ }
+ stringBuilder.append("\n");
+ }
+ }
+
+ /** Convert the pixel content of an I420 buffer to a string representation. */
+ private static String i420BufferToString(VideoFrame.I420Buffer buffer) {
+ final StringBuilder stringBuilder = new StringBuilder();
+ stringBuilder.append(
+ "I420 buffer with size: " + buffer.getWidth() + "x" + buffer.getHeight() + ".\n");
+ stringBuilder.append("Y-plane:\n");
+ printPlane(stringBuilder, buffer.getWidth(), buffer.getHeight(), buffer.getDataY(),
+ buffer.getStrideY());
+ final int chromaWidth = (buffer.getWidth() + 1) / 2;
+ final int chromaHeight = (buffer.getHeight() + 1) / 2;
+ stringBuilder.append("U-plane:\n");
+ printPlane(stringBuilder, chromaWidth, chromaHeight, buffer.getDataU(), buffer.getStrideU());
+ stringBuilder.append("V-plane:\n");
+ printPlane(stringBuilder, chromaWidth, chromaHeight, buffer.getDataV(), buffer.getStrideV());
+ return stringBuilder.toString();
+ }
+
+ /**
+ * Assert that the given I420 buffers are almost identical, allowing for some difference due to
+ * numerical errors. It has limits for both overall PSNR and maximum individual pixel difference.
+ */
+ public static void assertAlmostEqualI420Buffers(
+ VideoFrame.I420Buffer bufferA, VideoFrame.I420Buffer bufferB) {
+ final int diff = maxDiff(bufferA, bufferB);
+ assertThat("Pixel difference too high: " + diff + "."
+ + "\nBuffer A: " + i420BufferToString(bufferA)
+ + "Buffer B: " + i420BufferToString(bufferB),
+ diff, lessThanOrEqualTo(4));
+ final double psnr = calculatePsnr(bufferA, bufferB);
+ assertThat("PSNR too low: " + psnr + "."
+ + "\nBuffer A: " + i420BufferToString(bufferA)
+ + "Buffer B: " + i420BufferToString(bufferB),
+ psnr, greaterThanOrEqualTo(50.0));
+ }
+
+ /** Returns a flattened list of pixel differences for two ByteBuffer planes. */
+ private static List<Integer> getPixelDiffs(
+ int width, int height, ByteBuffer planeA, int strideA, ByteBuffer planeB, int strideB) {
+ List<Integer> res = new ArrayList<>();
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final int valueA = planeA.get(y * strideA + x) & 0xFF;
+ final int valueB = planeB.get(y * strideB + x) & 0xFF;
+ res.add(Math.abs(valueA - valueB));
+ }
+ }
+ return res;
+ }
+
+ /** Returns a flattened list of pixel differences for two I420 buffers. */
+ private static List<Integer> getPixelDiffs(
+ VideoFrame.I420Buffer bufferA, VideoFrame.I420Buffer bufferB) {
+ assertEquals(bufferA.getWidth(), bufferB.getWidth());
+ assertEquals(bufferA.getHeight(), bufferB.getHeight());
+ final int width = bufferA.getWidth();
+ final int height = bufferA.getHeight();
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ final List<Integer> diffs = getPixelDiffs(width, height, bufferA.getDataY(),
+ bufferA.getStrideY(), bufferB.getDataY(), bufferB.getStrideY());
+ diffs.addAll(getPixelDiffs(chromaWidth, chromaHeight, bufferA.getDataU(), bufferA.getStrideU(),
+ bufferB.getDataU(), bufferB.getStrideU()));
+ diffs.addAll(getPixelDiffs(chromaWidth, chromaHeight, bufferA.getDataV(), bufferA.getStrideV(),
+ bufferB.getDataV(), bufferB.getStrideV()));
+ return diffs;
+ }
+
+ /** Returns the maximum pixel difference from any of the Y/U/V planes in the given buffers. */
+ private static int maxDiff(VideoFrame.I420Buffer bufferA, VideoFrame.I420Buffer bufferB) {
+ return Collections.max(getPixelDiffs(bufferA, bufferB));
+ }
+
+ /**
+ * Returns the PSNR given a sum of squared error and the number of measurements that were added.
+ */
+ private static double sseToPsnr(long sse, int count) {
+ if (sse == 0) {
+ return Double.POSITIVE_INFINITY;
+ }
+ final double meanSquaredError = (double) sse / (double) count;
+ final double maxPixelValue = 255.0;
+ return 10.0 * Math.log10(maxPixelValue * maxPixelValue / meanSquaredError);
+ }
+
+ /** Returns the PSNR of the given I420 buffers. */
+ private static double calculatePsnr(
+ VideoFrame.I420Buffer bufferA, VideoFrame.I420Buffer bufferB) {
+ final List<Integer> pixelDiffs = getPixelDiffs(bufferA, bufferB);
+ long sse = 0;
+ for (int pixelDiff : pixelDiffs) {
+ sse += pixelDiff * pixelDiff;
+ }
+ return sseToPsnr(sse, pixelDiffs.size());
+ }
+
+ /**
+ * Convert an int array to a byte array and make sure the values are within the range [0, 255].
+ */
+ private static byte[] toByteArray(int[] array) {
+ final byte[] res = new byte[array.length];
+ for (int i = 0; i < array.length; ++i) {
+ final int value = array[i];
+ assertThat(value, greaterThanOrEqualTo(0));
+ assertThat(value, lessThanOrEqualTo(255));
+ res[i] = (byte) value;
+ }
+ return res;
+ }
+
+ /** Convert a byte array to a direct ByteBuffer. */
+ private static ByteBuffer toByteBuffer(int[] array) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(array.length);
+ buffer.put(toByteArray(array));
+ buffer.rewind();
+ return buffer;
+ }
+
+ /**
+ * Draw an I420 buffer on the currently bound frame buffer, allocating and releasing any
+ * resources necessary.
+ */
+ private static void drawI420Buffer(VideoFrame.I420Buffer i420Buffer) {
+ final GlRectDrawer drawer = new GlRectDrawer();
+ final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
+ videoFrameDrawer.drawFrame(
+ new VideoFrame(i420Buffer, /* rotation= */ 0, /* timestampNs= */ 0), drawer);
+ videoFrameDrawer.release();
+ drawer.release();
+ }
+
+ /**
+ * Helper function that tests cropAndScale() with the given cropping and scaling parameters, and
+ * compares the pixel content against a reference I420 buffer.
+ */
+ private void testCropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ final VideoFrame.I420Buffer referenceI420Buffer = createTestI420Buffer();
+ final VideoFrame.Buffer bufferToTest = createBufferToTest(referenceI420Buffer);
+
+ final VideoFrame.Buffer croppedReferenceBuffer = referenceI420Buffer.cropAndScale(
+ cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ referenceI420Buffer.release();
+ final VideoFrame.I420Buffer croppedReferenceI420Buffer = croppedReferenceBuffer.toI420();
+ croppedReferenceBuffer.release();
+
+ final VideoFrame.Buffer croppedBufferToTest =
+ bufferToTest.cropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ bufferToTest.release();
+
+ final VideoFrame.I420Buffer croppedOutputI420Buffer = croppedBufferToTest.toI420();
+ croppedBufferToTest.release();
+
+ assertAlmostEqualI420Buffers(croppedReferenceI420Buffer, croppedOutputI420Buffer);
+ croppedReferenceI420Buffer.release();
+ croppedOutputI420Buffer.release();
+ }
+
+ @Test
+ @SmallTest
+ /** Test calling toI420() and comparing pixel content against I420 reference. */
+ public void testToI420() {
+ final VideoFrame.I420Buffer referenceI420Buffer = createTestI420Buffer();
+ final VideoFrame.Buffer bufferToTest = createBufferToTest(referenceI420Buffer);
+
+ final VideoFrame.I420Buffer outputI420Buffer = bufferToTest.toI420();
+ bufferToTest.release();
+
+ assertEquals(VideoFrameBufferType.I420, nativeGetBufferType(outputI420Buffer));
+ assertAlmostEqualI420Buffers(referenceI420Buffer, outputI420Buffer);
+ referenceI420Buffer.release();
+ outputI420Buffer.release();
+ }
+
+ @Test
+ @SmallTest
+ /** Pure 2x scaling with no cropping. */
+ public void testScale2x() {
+ testCropAndScale(0 /* cropX= */, 0 /* cropY= */, /* cropWidth= */ 16, /* cropHeight= */ 16,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 8);
+ }
+
+ @Test
+ @SmallTest
+ /** Test cropping only X direction, with no scaling. */
+ public void testCropX() {
+ testCropAndScale(8 /* cropX= */, 0 /* cropY= */, /* cropWidth= */ 8, /* cropHeight= */ 16,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 16);
+ }
+
+ @Test
+ @SmallTest
+ /** Test cropping only Y direction, with no scaling. */
+ public void testCropY() {
+ testCropAndScale(0 /* cropX= */, 8 /* cropY= */, /* cropWidth= */ 16, /* cropHeight= */ 8,
+ /* scaleWidth= */ 16, /* scaleHeight= */ 8);
+ }
+
+ @Test
+ @SmallTest
+ /** Test center crop, with no scaling. */
+ public void testCenterCrop() {
+ testCropAndScale(4 /* cropX= */, 4 /* cropY= */, /* cropWidth= */ 8, /* cropHeight= */ 8,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 8);
+ }
+
+ @Test
+ @SmallTest
+ /** Test non-center crop for right bottom corner, with no scaling. */
+ public void testRightBottomCornerCrop() {
+ testCropAndScale(8 /* cropX= */, 8 /* cropY= */, /* cropWidth= */ 8, /* cropHeight= */ 8,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 8);
+ }
+
+ @Test
+ @SmallTest
+ /** Test combined cropping and scaling. */
+ public void testCropAndScale() {
+ testCropAndScale(4 /* cropX= */, 4 /* cropY= */, /* cropWidth= */ 12, /* cropHeight= */ 12,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 8);
+ }
+
+ @VideoFrameBufferType private static native int nativeGetBufferType(VideoFrame.Buffer buffer);
+
+ /** Returns the copy of I420Buffer using WrappedNativeI420Buffer. */
+ private static native VideoFrame.Buffer nativeGetNativeI420Buffer(
+ VideoFrame.I420Buffer i420Buffer);
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoTrackTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoTrackTest.java
new file mode 100644
index 0000000000..8d7894c048
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoTrackTest.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import org.junit.Before;
+import org.junit.Test;
+
+/** Unit tests for {@link VideoTrack}. */
+public class VideoTrackTest {
+ private PeerConnectionFactory factory;
+ private VideoSource videoSource;
+ private VideoTrack videoTrack;
+
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+
+ factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ videoSource = factory.createVideoSource(/* isScreencast= */ false);
+ videoTrack = factory.createVideoTrack("video", videoSource);
+ }
+
+ @Test
+ @SmallTest
+ public void testAddingNullVideoSink() {
+ try {
+ videoTrack.addSink(/* sink= */ null);
+ fail("Should have thrown an IllegalArgumentException.");
+ } catch (IllegalArgumentException e) {
+ // Expected path.
+ }
+ }
+
+ @Test
+ @SmallTest
+ public void testRemovingNullVideoSink() {
+ videoTrack.removeSink(/* sink= */ null);
+ }
+
+ @Test
+ @SmallTest
+ public void testRemovingNonExistantVideoSink() {
+ final VideoSink videoSink = new VideoSink() {
+ @Override
+ public void onFrame(VideoFrame frame) {}
+ };
+ videoTrack.removeSink(videoSink);
+ }
+
+ @Test
+ @SmallTest
+ public void testAddingSameVideoSinkMultipleTimes() {
+ class FrameCounter implements VideoSink {
+ private int count;
+
+ public int getCount() {
+ return count;
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ count += 1;
+ }
+ }
+ final FrameCounter frameCounter = new FrameCounter();
+
+ final VideoFrame videoFrame = new VideoFrame(
+ JavaI420Buffer.allocate(/* width= */ 32, /* height= */ 32), /* rotation= */ 0,
+ /* timestampNs= */ 0);
+
+ videoTrack.addSink(frameCounter);
+ videoTrack.addSink(frameCounter);
+ videoSource.getCapturerObserver().onFrameCaptured(videoFrame);
+
+ // Even though we called addSink() multiple times, we should only get one frame out.
+ assertEquals(1, frameCounter.count);
+ }
+
+ @Test
+ @SmallTest
+ public void testAddingAndRemovingVideoSink() {
+ final VideoFrame videoFrame = new VideoFrame(
+ JavaI420Buffer.allocate(/* width= */ 32, /* height= */ 32), /* rotation= */ 0,
+ /* timestampNs= */ 0);
+
+ final VideoSink failSink = new VideoSink() {
+ @Override
+ public void onFrame(VideoFrame frame) {
+ fail("onFrame() should not be called on removed sink");
+ }
+ };
+ videoTrack.addSink(failSink);
+ videoTrack.removeSink(failSink);
+ videoSource.getCapturerObserver().onFrameCaptured(videoFrame);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java
new file mode 100644
index 0000000000..b1badd5773
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import org.junit.Test;
+import org.webrtc.PeerConnectionFactory;
+
+// This test is intended to run on ARM and catch LoadLibrary errors when we load the WebRTC
+// JNI. It can't really be setting up calls since ARM emulators are too slow, but instantiating
+// a peer connection isn't timing-sensitive, so we can at least do that.
+public class WebRtcJniBootTest {
+ @Test
+ @SmallTest
+ public void testJniLoadsWithoutError() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ PeerConnectionFactory.builder().createPeerConnectionFactory();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java
new file mode 100644
index 0000000000..7c58e9554f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import org.junit.Before;
+import org.junit.Test;
+
+public class YuvHelperTest {
+ private static final int TEST_WIDTH = 3;
+ private static final int TEST_HEIGHT = 3;
+ private static final int TEST_CHROMA_WIDTH = 2;
+ private static final int TEST_CHROMA_HEIGHT = 2;
+
+ private static final int TEST_I420_STRIDE_Y = 3;
+ private static final int TEST_I420_STRIDE_V = 2;
+ private static final int TEST_I420_STRIDE_U = 4;
+
+ private static final ByteBuffer TEST_I420_Y = getTestY();
+ private static final ByteBuffer TEST_I420_U = getTestU();
+ private static final ByteBuffer TEST_I420_V = getTestV();
+
+ private static ByteBuffer getTestY() {
+ final ByteBuffer testY = ByteBuffer.allocateDirect(TEST_HEIGHT * TEST_I420_STRIDE_Y);
+ testY.put(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9});
+ return testY;
+ }
+
+ private static ByteBuffer getTestU() {
+ final ByteBuffer testU = ByteBuffer.allocateDirect(TEST_CHROMA_HEIGHT * TEST_I420_STRIDE_V);
+ testU.put(new byte[] {51, 52, 53, 54});
+ return testU;
+ }
+
+ private static ByteBuffer getTestV() {
+ final ByteBuffer testV = ByteBuffer.allocateDirect(TEST_CHROMA_HEIGHT * TEST_I420_STRIDE_U);
+ testV.put(new byte[] {101, 102, 103, 104, 105, 106, 107, 108});
+ return testV;
+ }
+
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ @SmallTest
+ @Test
+ public void testCopyPlane() {
+ final int dstStride = TEST_WIDTH;
+ final ByteBuffer dst = ByteBuffer.allocateDirect(TEST_HEIGHT * dstStride);
+
+ YuvHelper.copyPlane(TEST_I420_Y, TEST_I420_STRIDE_Y, dst, dstStride, TEST_WIDTH, TEST_HEIGHT);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9}, dst);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420Copy() {
+ final int dstStrideY = TEST_WIDTH;
+ final int dstStrideU = TEST_CHROMA_WIDTH;
+ final int dstStrideV = TEST_CHROMA_WIDTH;
+ final ByteBuffer dstY = ByteBuffer.allocateDirect(TEST_HEIGHT * dstStrideY);
+ final ByteBuffer dstU = ByteBuffer.allocateDirect(TEST_CHROMA_HEIGHT * dstStrideU);
+ final ByteBuffer dstV = ByteBuffer.allocateDirect(TEST_CHROMA_HEIGHT * dstStrideV);
+
+ YuvHelper.I420Copy(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dstY, dstStrideY, dstU, dstStrideU, dstV, dstStrideV,
+ TEST_WIDTH, TEST_HEIGHT);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9}, dstY);
+ assertByteBufferContentEquals(new byte[] {51, 52, 53, 54}, dstU);
+ assertByteBufferContentEquals(new byte[] {101, 102, 105, 106}, dstV);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420CopyTight() {
+ final ByteBuffer dst = ByteBuffer.allocateDirect(
+ TEST_WIDTH * TEST_HEIGHT + TEST_CHROMA_WIDTH * TEST_CHROMA_HEIGHT * 2);
+
+ YuvHelper.I420Copy(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT);
+
+ assertByteBufferContentEquals(
+ new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 51, 52, 53, 54, 101, 102, 105, 106}, dst);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420CopyStride() {
+ final int dstStrideY = 4;
+ final int dstSliceHeightY = 4;
+ final int dstStrideU = dstStrideY / 2;
+ final int dstSliceHeightU = dstSliceHeightY / 2;
+ final int dstSize = dstStrideY * dstStrideY * 3 / 2;
+
+ final ByteBuffer dst = ByteBuffer.allocateDirect(dstSize);
+ YuvHelper.I420Copy(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT, dstStrideY, dstSliceHeightY,
+ dstStrideU, dstSliceHeightU);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 0, 4, 5, 6, 0, 7, 8, 9, 0, 0, 0, 0, 0, 51,
+ 52, 53, 54, 101, 102, 105, 106},
+ dst);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420ToNV12() {
+ final int dstStrideY = TEST_WIDTH;
+ final int dstStrideUV = TEST_CHROMA_WIDTH * 2;
+ final ByteBuffer dstY = ByteBuffer.allocateDirect(TEST_HEIGHT * dstStrideY);
+ final ByteBuffer dstUV = ByteBuffer.allocateDirect(2 * TEST_CHROMA_HEIGHT * dstStrideUV);
+
+ YuvHelper.I420ToNV12(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dstY, dstStrideY, dstUV, dstStrideUV, TEST_WIDTH,
+ TEST_HEIGHT);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9}, dstY);
+ assertByteBufferContentEquals(new byte[] {51, 101, 52, 102, 53, 105, 54, 106}, dstUV);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420ToNV12Tight() {
+ final int dstStrideY = TEST_WIDTH;
+ final int dstStrideUV = TEST_CHROMA_WIDTH * 2;
+ final ByteBuffer dst = ByteBuffer.allocateDirect(
+ TEST_WIDTH * TEST_HEIGHT + TEST_CHROMA_WIDTH * TEST_CHROMA_HEIGHT * 2);
+
+ YuvHelper.I420ToNV12(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT);
+
+ assertByteBufferContentEquals(
+ new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 51, 101, 52, 102, 53, 105, 54, 106}, dst);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420ToNV12Stride() {
+ final int dstStrideY = 4;
+ final int dstSliceHeightY = 4;
+ final int dstSize = dstStrideY * dstStrideY * 3 / 2;
+
+ final ByteBuffer dst = ByteBuffer.allocateDirect(dstSize);
+ YuvHelper.I420ToNV12(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT, dstStrideY, dstSliceHeightY);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 0, 4, 5, 6, 0, 7, 8, 9, 0, 0, 0, 0, 0, 51,
+ 101, 52, 102, 53, 105, 54, 106},
+ dst);
+ }
+
+ private static void assertByteBufferContentEquals(byte[] expected, ByteBuffer test) {
+ assertTrue(
+ "ByteBuffer is too small. Expected " + expected.length + " but was " + test.capacity(),
+ test.capacity() >= expected.length);
+ for (int i = 0; i < expected.length; i++) {
+ assertEquals("Unexpected ByteBuffer contents at index: " + i, expected[i], test.get(i));
+ }
+ }
+
+ @SmallTest
+ @Test
+ public void testI420Rotate90() {
+ final int dstStrideY = TEST_HEIGHT;
+ final int dstStrideU = TEST_CHROMA_HEIGHT;
+ final int dstStrideV = TEST_CHROMA_HEIGHT;
+ final ByteBuffer dstY = ByteBuffer.allocateDirect(TEST_WIDTH * dstStrideY);
+ final ByteBuffer dstU = ByteBuffer.allocateDirect(TEST_CHROMA_WIDTH * dstStrideU);
+ final ByteBuffer dstV = ByteBuffer.allocateDirect(TEST_CHROMA_WIDTH * dstStrideV);
+
+ YuvHelper.I420Rotate(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dstY, dstStrideY, dstU, dstStrideU, dstV, dstStrideV,
+ TEST_WIDTH, TEST_HEIGHT, 90);
+
+ assertByteBufferContentEquals(new byte[] {7, 4, 1, 8, 5, 2, 9, 6, 3}, dstY);
+ assertByteBufferContentEquals(new byte[] {53, 51, 54, 52}, dstU);
+ assertByteBufferContentEquals(new byte[] {105, 101, 106, 102}, dstV);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420Rotate90Tight() {
+ final ByteBuffer dst = ByteBuffer.allocateDirect(
+ TEST_WIDTH * TEST_HEIGHT + TEST_CHROMA_WIDTH * TEST_CHROMA_HEIGHT * 2);
+
+ YuvHelper.I420Rotate(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT, 90);
+
+ assertByteBufferContentEquals(
+ new byte[] {7, 4, 1, 8, 5, 2, 9, 6, 3, 53, 51, 54, 52, 105, 101, 106, 102}, dst);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m
new file mode 100644
index 0000000000..ecc695a09a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m
@@ -0,0 +1,5 @@
+YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1
+FRAME
+THIS IS JUST SOME TEXT xFRAME
+THE SECOND FRAME qwerty.FRAME
+HERE IS THE THRID FRAME!
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/video_frame_buffer_test.cc b/third_party/libwebrtc/sdk/android/instrumentationtests/video_frame_buffer_test.cc
new file mode 100644
index 0000000000..686b232f6d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/video_frame_buffer_test.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/i420_buffer.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/video_frame.h"
+#include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
+
+namespace webrtc {
+namespace jni {
+
+JNI_FUNCTION_DECLARATION(jint,
+ VideoFrameBufferTest_nativeGetBufferType,
+ JNIEnv* jni,
+ jclass,
+ jobject video_frame_buffer) {
+ const JavaParamRef<jobject> j_video_frame_buffer(video_frame_buffer);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ JavaToNativeFrameBuffer(jni, j_video_frame_buffer);
+ return static_cast<jint>(buffer->type());
+}
+
+JNI_FUNCTION_DECLARATION(jobject,
+ VideoFrameBufferTest_nativeGetNativeI420Buffer,
+ JNIEnv* jni,
+ jclass,
+ jobject i420_buffer) {
+ const JavaParamRef<jobject> j_i420_buffer(i420_buffer);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ JavaToNativeFrameBuffer(jni, j_i420_buffer);
+ const I420BufferInterface* inputBuffer = buffer->GetI420();
+ RTC_DCHECK(inputBuffer != nullptr);
+ rtc::scoped_refptr<I420Buffer> outputBuffer = I420Buffer::Copy(*inputBuffer);
+ return WrapI420Buffer(jni, outputBuffer).Release();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/DEPS b/third_party/libwebrtc/sdk/android/native_api/DEPS
new file mode 100644
index 0000000000..020e1cbf09
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+modules/audio_device/include/audio_device.h",
+ "+system_wrappers/include",
+]
diff --git a/third_party/libwebrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc b/third_party/libwebrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc
new file mode 100644
index 0000000000..2be7f7d7fb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc
@@ -0,0 +1,155 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
+
+#include <stdlib.h>
+
+#include <memory>
+#include <utility>
+
+#include "api/scoped_refptr.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ref_count.h"
+
+#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
+#include "sdk/android/src/jni/audio_device/aaudio_player.h"
+#include "sdk/android/src/jni/audio_device/aaudio_recorder.h"
+#endif
+
+#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
+#include "sdk/android/src/jni/audio_device/opensles_player.h"
+#include "sdk/android/src/jni/audio_device/opensles_recorder.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+namespace {
+
+void GetDefaultAudioParameters(JNIEnv* env,
+ jobject application_context,
+ AudioParameters* input_parameters,
+ AudioParameters* output_parameters) {
+ const JavaParamRef<jobject> j_context(application_context);
+ const ScopedJavaLocalRef<jobject> j_audio_manager =
+ jni::GetAudioManager(env, j_context);
+ const int input_sample_rate = jni::GetDefaultSampleRate(env, j_audio_manager);
+ const int output_sample_rate =
+ jni::GetDefaultSampleRate(env, j_audio_manager);
+ jni::GetAudioParameters(env, j_context, j_audio_manager, input_sample_rate,
+ output_sample_rate, false /* use_stereo_input */,
+ false /* use_stereo_output */, input_parameters,
+ output_parameters);
+}
+
+} // namespace
+
+#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
+rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
+ JNIEnv* env,
+ jobject application_context) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ // Get default audio input/output parameters.
+ AudioParameters input_parameters;
+ AudioParameters output_parameters;
+ GetDefaultAudioParameters(env, application_context, &input_parameters,
+ &output_parameters);
+ // Create ADM from AAudioRecorder and AAudioPlayer.
+ return CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::kAndroidAAudioAudio, false /* use_stereo_input */,
+ false /* use_stereo_output */,
+ jni::kLowLatencyModeDelayEstimateInMilliseconds,
+ std::make_unique<jni::AAudioRecorder>(input_parameters),
+ std::make_unique<jni::AAudioPlayer>(output_parameters));
+}
+#endif
+
+rtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
+ JNIEnv* env,
+ jobject application_context) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ // Get default audio input/output parameters.
+ const JavaParamRef<jobject> j_context(application_context);
+ const ScopedJavaLocalRef<jobject> j_audio_manager =
+ jni::GetAudioManager(env, j_context);
+ AudioParameters input_parameters;
+ AudioParameters output_parameters;
+ GetDefaultAudioParameters(env, application_context, &input_parameters,
+ &output_parameters);
+ // Create ADM from AudioRecord and AudioTrack.
+ auto audio_input = std::make_unique<jni::AudioRecordJni>(
+ env, input_parameters, jni::kHighLatencyModeDelayEstimateInMilliseconds,
+ jni::AudioRecordJni::CreateJavaWebRtcAudioRecord(env, j_context,
+ j_audio_manager));
+ auto audio_output = std::make_unique<jni::AudioTrackJni>(
+ env, output_parameters,
+ jni::AudioTrackJni::CreateJavaWebRtcAudioTrack(env, j_context,
+ j_audio_manager));
+ return CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::kAndroidJavaAudio, false /* use_stereo_input */,
+ false /* use_stereo_output */,
+ jni::kHighLatencyModeDelayEstimateInMilliseconds, std::move(audio_input),
+ std::move(audio_output));
+}
+
+rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
+ JNIEnv* env,
+ jobject application_context) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ // Get default audio input/output parameters.
+ AudioParameters input_parameters;
+ AudioParameters output_parameters;
+ GetDefaultAudioParameters(env, application_context, &input_parameters,
+ &output_parameters);
+ // Create ADM from OpenSLESRecorder and OpenSLESPlayer.
+ rtc::scoped_refptr<jni::OpenSLEngineManager> engine_manager(
+ new jni::OpenSLEngineManager());
+ auto audio_input =
+ std::make_unique<jni::OpenSLESRecorder>(input_parameters, engine_manager);
+ auto audio_output = std::make_unique<jni::OpenSLESPlayer>(
+ output_parameters, std::move(engine_manager));
+ return CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::kAndroidOpenSLESAudio, false /* use_stereo_input */,
+ false /* use_stereo_output */,
+ jni::kLowLatencyModeDelayEstimateInMilliseconds, std::move(audio_input),
+ std::move(audio_output));
+}
+
+rtc::scoped_refptr<AudioDeviceModule>
+CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
+ jobject application_context) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ // Get default audio input/output parameters.
+ const JavaParamRef<jobject> j_context(application_context);
+ const ScopedJavaLocalRef<jobject> j_audio_manager =
+ jni::GetAudioManager(env, j_context);
+ AudioParameters input_parameters;
+ AudioParameters output_parameters;
+ GetDefaultAudioParameters(env, application_context, &input_parameters,
+ &output_parameters);
+ // Create ADM from AudioRecord and OpenSLESPlayer.
+ auto audio_input = std::make_unique<jni::AudioRecordJni>(
+ env, input_parameters, jni::kLowLatencyModeDelayEstimateInMilliseconds,
+ jni::AudioRecordJni::CreateJavaWebRtcAudioRecord(env, j_context,
+ j_audio_manager));
+
+ rtc::scoped_refptr<jni::OpenSLEngineManager> engine_manager(
+ new jni::OpenSLEngineManager());
+ auto audio_output = std::make_unique<jni::OpenSLESPlayer>(
+ output_parameters, std::move(engine_manager));
+ return CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio,
+ false /* use_stereo_input */, false /* use_stereo_output */,
+ jni::kLowLatencyModeDelayEstimateInMilliseconds, std::move(audio_input),
+ std::move(audio_output));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/audio_device_module/audio_device_android.h b/third_party/libwebrtc/sdk/android/native_api/audio_device_module/audio_device_android.h
new file mode 100644
index 0000000000..a093f8c895
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/audio_device_module/audio_device_android.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_AUDIO_DEVICE_MODULE_AUDIO_DEVICE_ANDROID_H_
+#define SDK_ANDROID_NATIVE_API_AUDIO_DEVICE_MODULE_AUDIO_DEVICE_ANDROID_H_
+
+#include <jni.h>
+
+#include "modules/audio_device/include/audio_device.h"
+
+namespace webrtc {
+
+#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
+rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
+ JNIEnv* env,
+ jobject application_context);
+#endif
+
+rtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
+ JNIEnv* env,
+ jobject application_context);
+
+rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
+ JNIEnv* env,
+ jobject application_context);
+
+rtc::scoped_refptr<AudioDeviceModule>
+CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
+ jobject application_context);
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_AUDIO_DEVICE_MODULE_AUDIO_DEVICE_ANDROID_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/base/init.cc b/third_party/libwebrtc/sdk/android/native_api/base/init.cc
new file mode 100644
index 0000000000..176aa89ece
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/base/init.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/base/init.h"
+
+#include "rtc_base/checks.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+
+void InitAndroid(JavaVM* jvm) {
+ RTC_CHECK_GE(jni::InitGlobalJniVariables(jvm), 0);
+ InitClassLoader(jni::GetEnv());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/base/init.h b/third_party/libwebrtc/sdk/android/native_api/base/init.h
new file mode 100644
index 0000000000..d6a0ec1509
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/base/init.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_BASE_INIT_H_
+#define SDK_ANDROID_NATIVE_API_BASE_INIT_H_
+
+#include <jni.h>
+
+namespace webrtc {
+
+// Initializes global state needed by WebRTC Android NDK.
+void InitAndroid(JavaVM* jvm);
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_BASE_INIT_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/codecs/wrapper.cc b/third_party/libwebrtc/sdk/android/native_api/codecs/wrapper.cc
new file mode 100644
index 0000000000..c3f2095335
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/codecs/wrapper.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/codecs/wrapper.h"
+
+#include <memory>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/android/src/jni/video_codec_info.h"
+#include "sdk/android/src/jni/video_decoder_factory_wrapper.h"
+#include "sdk/android/src/jni/video_encoder_factory_wrapper.h"
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+
+namespace webrtc {
+
+SdpVideoFormat JavaToNativeVideoCodecInfo(JNIEnv* jni, jobject codec_info) {
+ return jni::VideoCodecInfoToSdpVideoFormat(jni,
+ JavaParamRef<jobject>(codec_info));
+}
+
+std::unique_ptr<VideoDecoderFactory> JavaToNativeVideoDecoderFactory(
+ JNIEnv* jni,
+ jobject decoder_factory) {
+ return std::make_unique<jni::VideoDecoderFactoryWrapper>(
+ jni, JavaParamRef<jobject>(decoder_factory));
+}
+
+std::unique_ptr<VideoEncoderFactory> JavaToNativeVideoEncoderFactory(
+ JNIEnv* jni,
+ jobject encoder_factory) {
+ return std::make_unique<jni::VideoEncoderFactoryWrapper>(
+ jni, JavaParamRef<jobject>(encoder_factory));
+}
+
+std::vector<VideoEncoder::ResolutionBitrateLimits>
+JavaToNativeResolutionBitrateLimits(JNIEnv* jni,
+ const jobjectArray j_bitrate_limits_array) {
+ return jni::JavaToNativeResolutionBitrateLimits(
+ jni, JavaParamRef<jobjectArray>(j_bitrate_limits_array));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/codecs/wrapper.h b/third_party/libwebrtc/sdk/android/native_api/codecs/wrapper.h
new file mode 100644
index 0000000000..04201699bc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/codecs/wrapper.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_CODECS_WRAPPER_H_
+#define SDK_ANDROID_NATIVE_API_CODECS_WRAPPER_H_
+
+#include <jni.h>
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+
+namespace webrtc {
+
+// Creates an instance of webrtc::SdpVideoFormat from Java VideoCodecInfo.
+SdpVideoFormat JavaToNativeVideoCodecInfo(JNIEnv* jni, jobject codec_info);
+
+// Creates an instance of webrtc::VideoDecoderFactory from Java
+// VideoDecoderFactory.
+std::unique_ptr<VideoDecoderFactory> JavaToNativeVideoDecoderFactory(
+ JNIEnv* jni,
+ jobject decoder_factory);
+
+// Creates an instance of webrtc::VideoEncoderFactory from Java
+// VideoEncoderFactory.
+std::unique_ptr<VideoEncoderFactory> JavaToNativeVideoEncoderFactory(
+ JNIEnv* jni,
+ jobject encoder_factory);
+
+// Creates an array of VideoEncoder::ResolutionBitrateLimits from Java array
+// of ResolutionBitrateLimits.
+std::vector<VideoEncoder::ResolutionBitrateLimits>
+JavaToNativeResolutionBitrateLimits(JNIEnv* jni,
+ jobjectArray j_bitrate_limits_array);
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_CODECS_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/jni/class_loader.cc b/third_party/libwebrtc/sdk/android/native_api/jni/class_loader.cc
new file mode 100644
index 0000000000..1789d78c85
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/jni/class_loader.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/jni/class_loader.h"
+
+#include <algorithm>
+#include <string>
+
+#include "rtc_base/checks.h"
+#include "sdk/android/generated_native_api_jni/WebRtcClassLoader_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+// Abort the process if `jni` has a Java exception pending. This macros uses the
+// comma operator to execute ExceptionDescribe and ExceptionClear ignoring their
+// return values and sending "" to the error stream.
+#define CHECK_EXCEPTION(jni) \
+ RTC_CHECK(!jni->ExceptionCheck()) \
+ << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+namespace webrtc {
+
+namespace {
+
+class ClassLoader {
+ public:
+ explicit ClassLoader(JNIEnv* env)
+ : class_loader_(jni::Java_WebRtcClassLoader_getClassLoader(env)) {
+ class_loader_class_ = reinterpret_cast<jclass>(
+ env->NewGlobalRef(env->FindClass("java/lang/ClassLoader")));
+ CHECK_EXCEPTION(env);
+ load_class_method_ =
+ env->GetMethodID(class_loader_class_, "loadClass",
+ "(Ljava/lang/String;)Ljava/lang/Class;");
+ CHECK_EXCEPTION(env);
+ }
+
+ ScopedJavaLocalRef<jclass> FindClass(JNIEnv* env, const char* c_name) {
+ // ClassLoader.loadClass expects a classname with components separated by
+ // dots instead of the slashes that JNIEnv::FindClass expects.
+ std::string name(c_name);
+ std::replace(name.begin(), name.end(), '/', '.');
+ ScopedJavaLocalRef<jstring> j_name = NativeToJavaString(env, name);
+ const jclass clazz = static_cast<jclass>(env->CallObjectMethod(
+ class_loader_.obj(), load_class_method_, j_name.obj()));
+ CHECK_EXCEPTION(env);
+ return ScopedJavaLocalRef<jclass>(env, clazz);
+ }
+
+ private:
+ ScopedJavaGlobalRef<jobject> class_loader_;
+ jclass class_loader_class_;
+ jmethodID load_class_method_;
+};
+
+static ClassLoader* g_class_loader = nullptr;
+
+} // namespace
+
+void InitClassLoader(JNIEnv* env) {
+ RTC_CHECK(g_class_loader == nullptr);
+ g_class_loader = new ClassLoader(env);
+}
+
+ScopedJavaLocalRef<jclass> GetClass(JNIEnv* env, const char* name) {
+ // The class loader will be null in the JNI code called from the ClassLoader
+ // ctor when we are bootstrapping ourself.
+ return (g_class_loader == nullptr)
+ ? ScopedJavaLocalRef<jclass>(env, env->FindClass(name))
+ : g_class_loader->FindClass(env, name);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/jni/class_loader.h b/third_party/libwebrtc/sdk/android/native_api/jni/class_loader.h
new file mode 100644
index 0000000000..2d102fe4a2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/jni/class_loader.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Android's FindClass() is tricky because the app-specific ClassLoader is not
+// consulted when there is no app-specific frame on the stack (i.e. when called
+// from a thread created from native C++ code). These helper functions provide a
+// workaround for this.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+
+#ifndef SDK_ANDROID_NATIVE_API_JNI_CLASS_LOADER_H_
+#define SDK_ANDROID_NATIVE_API_JNI_CLASS_LOADER_H_
+
+#include <jni.h>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+
+// This method should be called from JNI_OnLoad and before any calls to
+// FindClass. This is normally called by InitAndroid.
+void InitClassLoader(JNIEnv* env);
+
+// This function is identical to JNIEnv::FindClass except that it works from any
+// thread. This function loads and returns a local reference to the class with
+// the given name. The name argument is a fully-qualified class name. For
+// example, the fully-qualified class name for the java.lang.String class is:
+// "java/lang/String". This function will be used from the JNI generated code
+// and should rarely be used manually.
+ScopedJavaLocalRef<jclass> GetClass(JNIEnv* env, const char* name);
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_JNI_CLASS_LOADER_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/jni/java_types.cc b/third_party/libwebrtc/sdk/android/native_api/jni/java_types.cc
new file mode 100644
index 0000000000..af02c10f4c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/jni/java_types.cc
@@ -0,0 +1,355 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/jni/java_types.h"
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "sdk/android/generated_external_classes_jni/ArrayList_jni.h"
+#include "sdk/android/generated_external_classes_jni/Boolean_jni.h"
+#include "sdk/android/generated_external_classes_jni/Double_jni.h"
+#include "sdk/android/generated_external_classes_jni/Enum_jni.h"
+#include "sdk/android/generated_external_classes_jni/Integer_jni.h"
+#include "sdk/android/generated_external_classes_jni/Iterable_jni.h"
+#include "sdk/android/generated_external_classes_jni/Iterator_jni.h"
+#include "sdk/android/generated_external_classes_jni/LinkedHashMap_jni.h"
+#include "sdk/android/generated_external_classes_jni/Long_jni.h"
+#include "sdk/android/generated_external_classes_jni/Map_jni.h"
+#include "sdk/android/generated_native_api_jni/JniHelper_jni.h"
+
+namespace webrtc {
+
+Iterable::Iterable(JNIEnv* jni, const JavaRef<jobject>& iterable)
+ : jni_(jni), iterable_(jni, iterable) {}
+
+Iterable::Iterable(Iterable&& other) = default;
+
+Iterable::~Iterable() = default;
+
+// Creates an iterator representing the end of any collection.
+Iterable::Iterator::Iterator() = default;
+
+// Creates an iterator pointing to the beginning of the specified collection.
+Iterable::Iterator::Iterator(JNIEnv* jni, const JavaRef<jobject>& iterable)
+ : jni_(jni) {
+ iterator_ = JNI_Iterable::Java_Iterable_iterator(jni, iterable);
+ RTC_CHECK(!iterator_.is_null());
+ // Start at the first element in the collection.
+ ++(*this);
+}
+
+// Move constructor - necessary to be able to return iterator types from
+// functions.
+Iterable::Iterator::Iterator(Iterator&& other)
+ : jni_(std::move(other.jni_)),
+ iterator_(std::move(other.iterator_)),
+ value_(std::move(other.value_)) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+}
+
+Iterable::Iterator::~Iterator() = default;
+
+// Advances the iterator one step.
+Iterable::Iterator& Iterable::Iterator::operator++() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (AtEnd()) {
+ // Can't move past the end.
+ return *this;
+ }
+ bool has_next = JNI_Iterator::Java_Iterator_hasNext(jni_, iterator_);
+ if (!has_next) {
+ iterator_ = nullptr;
+ value_ = nullptr;
+ return *this;
+ }
+
+ value_ = JNI_Iterator::Java_Iterator_next(jni_, iterator_);
+ return *this;
+}
+
+void Iterable::Iterator::Remove() {
+ JNI_Iterator::Java_Iterator_remove(jni_, iterator_);
+}
+
+// Provides a way to compare the iterator with itself and with the end iterator.
+// Note: all other comparison results are undefined, just like for C++ input
+// iterators.
+bool Iterable::Iterator::operator==(const Iterable::Iterator& other) {
+ // Two different active iterators should never be compared.
+ RTC_DCHECK(this == &other || AtEnd() || other.AtEnd());
+ return AtEnd() == other.AtEnd();
+}
+
+ScopedJavaLocalRef<jobject>& Iterable::Iterator::operator*() {
+ RTC_CHECK(!AtEnd());
+ return value_;
+}
+
+bool Iterable::Iterator::AtEnd() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return jni_ == nullptr || IsNull(jni_, iterator_);
+}
+
+bool IsNull(JNIEnv* jni, const JavaRef<jobject>& obj) {
+ return jni->IsSameObject(obj.obj(), nullptr);
+}
+
+std::string GetJavaEnumName(JNIEnv* jni, const JavaRef<jobject>& j_enum) {
+ return JavaToStdString(jni, JNI_Enum::Java_Enum_name(jni, j_enum));
+}
+
+Iterable GetJavaMapEntrySet(JNIEnv* jni, const JavaRef<jobject>& j_map) {
+ return Iterable(jni, JNI_Map::Java_Map_entrySet(jni, j_map));
+}
+
+ScopedJavaLocalRef<jobject> GetJavaMapEntryKey(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_entry) {
+ return jni::Java_JniHelper_getKey(jni, j_entry);
+}
+
+ScopedJavaLocalRef<jobject> GetJavaMapEntryValue(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_entry) {
+ return jni::Java_JniHelper_getValue(jni, j_entry);
+}
+
+int64_t JavaToNativeLong(JNIEnv* env, const JavaRef<jobject>& j_long) {
+ return JNI_Long::Java_Long_longValue(env, j_long);
+}
+
+absl::optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
+ const JavaRef<jobject>& boolean) {
+ if (IsNull(jni, boolean))
+ return absl::nullopt;
+ return JNI_Boolean::Java_Boolean_booleanValue(jni, boolean);
+}
+
+absl::optional<double> JavaToNativeOptionalDouble(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_double) {
+ if (IsNull(jni, j_double))
+ return absl::nullopt;
+ return JNI_Double::Java_Double_doubleValue(jni, j_double);
+}
+
+absl::optional<int32_t> JavaToNativeOptionalInt(
+ JNIEnv* jni,
+ const JavaRef<jobject>& integer) {
+ if (IsNull(jni, integer))
+ return absl::nullopt;
+ return JNI_Integer::Java_Integer_intValue(jni, integer);
+}
+
+// Given a jstring, reinterprets it to a new native string.
+std::string JavaToNativeString(JNIEnv* jni, const JavaRef<jstring>& j_string) {
+ const ScopedJavaLocalRef<jbyteArray> j_byte_array =
+ jni::Java_JniHelper_getStringBytes(jni, j_string);
+
+ const size_t len = jni->GetArrayLength(j_byte_array.obj());
+ CHECK_EXCEPTION(jni) << "error during GetArrayLength";
+ std::string str(len, '\0');
+ jni->GetByteArrayRegion(j_byte_array.obj(), 0, len,
+ reinterpret_cast<jbyte*>(&str[0]));
+ CHECK_EXCEPTION(jni) << "error during GetByteArrayRegion";
+ return str;
+}
+
+std::map<std::string, std::string> JavaToNativeStringMap(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_map) {
+ return JavaToNativeMap<std::string, std::string>(
+ jni, j_map,
+ [](JNIEnv* env, JavaRef<jobject> const& key,
+ JavaRef<jobject> const& value) {
+ return std::make_pair(
+ JavaToNativeString(env, static_java_ref_cast<jstring>(env, key)),
+ JavaToNativeString(env, static_java_ref_cast<jstring>(env, value)));
+ });
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaBoolean(JNIEnv* env, bool b) {
+ return JNI_Boolean::Java_Boolean_ConstructorJLB_Z(env, b);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaDouble(JNIEnv* env, double d) {
+ return JNI_Double::Java_Double_ConstructorJLD_D(env, d);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaInteger(JNIEnv* jni, int32_t i) {
+ return JNI_Integer::Java_Integer_ConstructorJLI_I(jni, i);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaLong(JNIEnv* env, int64_t u) {
+ return JNI_Long::Java_Long_ConstructorJLLO_J(env, u);
+}
+
+ScopedJavaLocalRef<jstring> NativeToJavaString(JNIEnv* env, const char* str) {
+ jstring j_str = env->NewStringUTF(str);
+ CHECK_EXCEPTION(env) << "error during NewStringUTF";
+ return ScopedJavaLocalRef<jstring>(env, j_str);
+}
+
+ScopedJavaLocalRef<jstring> NativeToJavaString(JNIEnv* jni,
+ const std::string& str) {
+ return NativeToJavaString(jni, str.c_str());
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaDouble(
+ JNIEnv* jni,
+ const absl::optional<double>& optional_double) {
+ return optional_double ? NativeToJavaDouble(jni, *optional_double) : nullptr;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaInteger(
+ JNIEnv* jni,
+ const absl::optional<int32_t>& optional_int) {
+ return optional_int ? NativeToJavaInteger(jni, *optional_int) : nullptr;
+}
+
+ScopedJavaLocalRef<jstring> NativeToJavaString(
+ JNIEnv* jni,
+ const absl::optional<std::string>& str) {
+ return str ? NativeToJavaString(jni, *str) : nullptr;
+}
+
+ScopedJavaLocalRef<jbyteArray> NativeToJavaByteArray(
+ JNIEnv* env,
+ rtc::ArrayView<int8_t> container) {
+ ScopedJavaLocalRef<jbyteArray> jarray(env,
+ env->NewByteArray(container.size()));
+ int8_t* array_ptr =
+ env->GetByteArrayElements(jarray.obj(), /*isCopy=*/nullptr);
+ memcpy(array_ptr, container.data(), container.size() * sizeof(int8_t));
+ env->ReleaseByteArrayElements(jarray.obj(), array_ptr, /*mode=*/0);
+ return jarray;
+}
+
+ScopedJavaLocalRef<jintArray> NativeToJavaIntArray(
+ JNIEnv* env,
+ rtc::ArrayView<int32_t> container) {
+ ScopedJavaLocalRef<jintArray> jarray(env, env->NewIntArray(container.size()));
+ int32_t* array_ptr =
+ env->GetIntArrayElements(jarray.obj(), /*isCopy=*/nullptr);
+ memcpy(array_ptr, container.data(), container.size() * sizeof(int32_t));
+ env->ReleaseIntArrayElements(jarray.obj(), array_ptr, /*mode=*/0);
+ return jarray;
+}
+
+std::vector<int8_t> JavaToNativeByteArray(JNIEnv* env,
+ const JavaRef<jbyteArray>& jarray) {
+ int8_t* array_ptr =
+ env->GetByteArrayElements(jarray.obj(), /*isCopy=*/nullptr);
+ size_t array_length = env->GetArrayLength(jarray.obj());
+ std::vector<int8_t> container(array_ptr, array_ptr + array_length);
+ env->ReleaseByteArrayElements(jarray.obj(), array_ptr, /*mode=*/JNI_ABORT);
+ return container;
+}
+
+std::vector<int32_t> JavaToNativeIntArray(JNIEnv* env,
+ const JavaRef<jintArray>& jarray) {
+ int32_t* array_ptr =
+ env->GetIntArrayElements(jarray.obj(), /*isCopy=*/nullptr);
+ size_t array_length = env->GetArrayLength(jarray.obj());
+ std::vector<int32_t> container(array_ptr, array_ptr + array_length);
+ env->ReleaseIntArrayElements(jarray.obj(), array_ptr, /*mode=*/JNI_ABORT);
+ return container;
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaBooleanArray(
+ JNIEnv* env,
+ const std::vector<bool>& container) {
+ return NativeToJavaObjectArray(env, container, java_lang_Boolean_clazz(env),
+ &NativeToJavaBoolean);
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaDoubleArray(
+ JNIEnv* env,
+ const std::vector<double>& container) {
+ ScopedJavaLocalRef<jobject> (*convert_function)(JNIEnv*, double) =
+ &NativeToJavaDouble;
+ return NativeToJavaObjectArray(env, container, java_lang_Double_clazz(env),
+ convert_function);
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaIntegerArray(
+ JNIEnv* env,
+ const std::vector<int32_t>& container) {
+ ScopedJavaLocalRef<jobject> (*convert_function)(JNIEnv*, int32_t) =
+ &NativeToJavaInteger;
+ return NativeToJavaObjectArray(env, container, java_lang_Integer_clazz(env),
+ convert_function);
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaLongArray(
+ JNIEnv* env,
+ const std::vector<int64_t>& container) {
+ return NativeToJavaObjectArray(env, container, java_lang_Long_clazz(env),
+ &NativeToJavaLong);
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaStringArray(
+ JNIEnv* env,
+ const std::vector<std::string>& container) {
+ ScopedJavaLocalRef<jstring> (*convert_function)(JNIEnv*, const std::string&) =
+ &NativeToJavaString;
+ return NativeToJavaObjectArray(
+ env, container,
+ static_cast<jclass>(jni::Java_JniHelper_getStringClass(env).obj()),
+ convert_function);
+}
+
+JavaListBuilder::JavaListBuilder(JNIEnv* env)
+ : env_(env), j_list_(JNI_ArrayList::Java_ArrayList_ConstructorJUALI(env)) {}
+
+JavaListBuilder::~JavaListBuilder() = default;
+
+void JavaListBuilder::add(const JavaRef<jobject>& element) {
+ JNI_ArrayList::Java_ArrayList_addZ_JUE(env_, j_list_, element);
+}
+
+JavaMapBuilder::JavaMapBuilder(JNIEnv* env)
+ : env_(env),
+ j_map_(JNI_LinkedHashMap::Java_LinkedHashMap_ConstructorJULIHM(env)) {}
+
+JavaMapBuilder::~JavaMapBuilder() = default;
+
+void JavaMapBuilder::put(const JavaRef<jobject>& key,
+ const JavaRef<jobject>& value) {
+ JNI_Map::Java_Map_put(env_, j_map_, key, value);
+}
+
+jlong NativeToJavaPointer(void* ptr) {
+ static_assert(sizeof(intptr_t) <= sizeof(jlong),
+ "Time to rethink the use of jlongs");
+ // Going through intptr_t to be obvious about the definedness of the
+ // conversion from pointer to integral type. intptr_t to jlong is a standard
+ // widening by the static_assert above.
+ jlong ret = reinterpret_cast<intptr_t>(ptr);
+ RTC_DCHECK(reinterpret_cast<void*>(ret) == ptr);
+ return ret;
+}
+
+// Given a list of jstrings, reinterprets it to a new vector of native strings.
+std::vector<std::string> JavaToStdVectorStrings(JNIEnv* jni,
+ const JavaRef<jobject>& list) {
+ std::vector<std::string> converted_list;
+ if (!list.is_null()) {
+ for (const JavaRef<jobject>& str : Iterable(jni, list)) {
+ converted_list.push_back(JavaToStdString(
+ jni, JavaParamRef<jstring>(static_cast<jstring>(str.obj()))));
+ }
+ }
+ return converted_list;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/jni/java_types.h b/third_party/libwebrtc/sdk/android/native_api/jni/java_types.h
new file mode 100644
index 0000000000..1008737d90
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/jni/java_types.h
@@ -0,0 +1,366 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Android's FindClass() is tricky because the app-specific ClassLoader is not
+// consulted when there is no app-specific frame on the stack (i.e. when called
+// from a thread created from native C++ code). These helper functions provide a
+// workaround for this.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+
+#ifndef SDK_ANDROID_NATIVE_API_JNI_JAVA_TYPES_H_
+#define SDK_ANDROID_NATIVE_API_JNI_JAVA_TYPES_H_
+
+#include <jni.h>
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/checks.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+// Abort the process if `jni` has a Java exception pending.
+// This macros uses the comma operator to execute ExceptionDescribe
+// and ExceptionClear ignoring their return values and sending ""
+// to the error stream.
+#define CHECK_EXCEPTION(jni) \
+ RTC_CHECK(!jni->ExceptionCheck()) \
+ << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+namespace webrtc {
+
+// ---------------
+// -- Utilities --
+// ---------------
+
+// Provides a convenient way to iterate over a Java Iterable using the
+// C++ range-for loop.
+// E.g. for (jobject value : Iterable(jni, j_iterable)) { ... }
+// Note: Since Java iterators cannot be duplicated, the iterator class is not
+// copyable to prevent creating multiple C++ iterators that refer to the same
+// Java iterator.
+class Iterable {
+ public:
+ Iterable(JNIEnv* jni, const JavaRef<jobject>& iterable);
+ Iterable(Iterable&& other);
+
+ ~Iterable();
+
+ Iterable(const Iterable&) = delete;
+ Iterable& operator=(const Iterable&) = delete;
+
+ class Iterator {
+ public:
+ // Creates an iterator representing the end of any collection.
+ Iterator();
+ // Creates an iterator pointing to the beginning of the specified
+ // collection.
+ Iterator(JNIEnv* jni, const JavaRef<jobject>& iterable);
+
+ // Move constructor - necessary to be able to return iterator types from
+ // functions.
+ Iterator(Iterator&& other);
+
+ ~Iterator();
+
+ Iterator(const Iterator&) = delete;
+ Iterator& operator=(const Iterator&) = delete;
+
+ // Move assignment should not be used.
+ Iterator& operator=(Iterator&&) = delete;
+
+ // Advances the iterator one step.
+ Iterator& operator++();
+
+ // Removes the element the iterator is pointing to. Must still advance the
+ // iterator afterwards.
+ void Remove();
+
+ // Provides a way to compare the iterator with itself and with the end
+ // iterator.
+ // Note: all other comparison results are undefined, just like for C++ input
+ // iterators.
+ bool operator==(const Iterator& other);
+ bool operator!=(const Iterator& other) { return !(*this == other); }
+ ScopedJavaLocalRef<jobject>& operator*();
+
+ private:
+ bool AtEnd() const;
+
+ JNIEnv* jni_ = nullptr;
+ ScopedJavaLocalRef<jobject> iterator_;
+ ScopedJavaLocalRef<jobject> value_;
+ SequenceChecker thread_checker_;
+ };
+
+ Iterable::Iterator begin() { return Iterable::Iterator(jni_, iterable_); }
+ Iterable::Iterator end() { return Iterable::Iterator(); }
+
+ private:
+ JNIEnv* jni_;
+ ScopedJavaLocalRef<jobject> iterable_;
+};
+
+// Returns true if `obj` == null in Java.
+bool IsNull(JNIEnv* jni, const JavaRef<jobject>& obj);
+
+// Returns the name of a Java enum.
+std::string GetJavaEnumName(JNIEnv* jni, const JavaRef<jobject>& j_enum);
+
+Iterable GetJavaMapEntrySet(JNIEnv* jni, const JavaRef<jobject>& j_map);
+ScopedJavaLocalRef<jobject> GetJavaMapEntryKey(JNIEnv* jni,
+ const JavaRef<jobject>& j_entry);
+ScopedJavaLocalRef<jobject> GetJavaMapEntryValue(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_entry);
+
+// --------------------------------------------------------
+// -- Methods for converting Java types to native types. --
+// --------------------------------------------------------
+
+int64_t JavaToNativeLong(JNIEnv* env, const JavaRef<jobject>& j_long);
+
+absl::optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
+ const JavaRef<jobject>& boolean);
+absl::optional<double> JavaToNativeOptionalDouble(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_double);
+absl::optional<int32_t> JavaToNativeOptionalInt(
+ JNIEnv* jni,
+ const JavaRef<jobject>& integer);
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToNativeString(JNIEnv* jni, const JavaRef<jstring>& j_string);
+
+template <typename T, typename Convert>
+std::vector<T> JavaToNativeVector(JNIEnv* env,
+ const JavaRef<jobjectArray>& j_container,
+ Convert convert) {
+ std::vector<T> container;
+ const size_t size = env->GetArrayLength(j_container.obj());
+ container.reserve(size);
+ for (size_t i = 0; i < size; ++i) {
+ container.emplace_back(convert(
+ env, ScopedJavaLocalRef<jobject>(
+ env, env->GetObjectArrayElement(j_container.obj(), i))));
+ }
+ CHECK_EXCEPTION(env) << "Error during JavaToNativeVector";
+ return container;
+}
+
+template <typename T, typename Java_T = jobject, typename Convert>
+std::vector<T> JavaListToNativeVector(JNIEnv* env,
+ const JavaRef<jobject>& j_list,
+ Convert convert) {
+ std::vector<T> native_list;
+ if (!j_list.is_null()) {
+ for (ScopedJavaLocalRef<jobject>& j_item : Iterable(env, j_list)) {
+ native_list.emplace_back(
+ convert(env, static_java_ref_cast<Java_T>(env, j_item)));
+ }
+ CHECK_EXCEPTION(env) << "Error during JavaListToNativeVector";
+ }
+ return native_list;
+}
+
+template <typename Key, typename T, typename Convert>
+std::map<Key, T> JavaToNativeMap(JNIEnv* env,
+ const JavaRef<jobject>& j_map,
+ Convert convert) {
+ std::map<Key, T> container;
+ for (auto const& j_entry : GetJavaMapEntrySet(env, j_map)) {
+ container.emplace(convert(env, GetJavaMapEntryKey(env, j_entry),
+ GetJavaMapEntryValue(env, j_entry)));
+ }
+ return container;
+}
+
+// Converts Map<String, String> to std::map<std::string, std::string>.
+std::map<std::string, std::string> JavaToNativeStringMap(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_map);
+
+// --------------------------------------------------------
+// -- Methods for converting native types to Java types. --
+// --------------------------------------------------------
+
+ScopedJavaLocalRef<jobject> NativeToJavaBoolean(JNIEnv* env, bool b);
+ScopedJavaLocalRef<jobject> NativeToJavaDouble(JNIEnv* env, double d);
+ScopedJavaLocalRef<jobject> NativeToJavaInteger(JNIEnv* jni, int32_t i);
+ScopedJavaLocalRef<jobject> NativeToJavaLong(JNIEnv* env, int64_t u);
+ScopedJavaLocalRef<jstring> NativeToJavaString(JNIEnv* jni, const char* str);
+ScopedJavaLocalRef<jstring> NativeToJavaString(JNIEnv* jni,
+ const std::string& str);
+
+ScopedJavaLocalRef<jobject> NativeToJavaDouble(
+ JNIEnv* jni,
+ const absl::optional<double>& optional_double);
+ScopedJavaLocalRef<jobject> NativeToJavaInteger(
+ JNIEnv* jni,
+ const absl::optional<int32_t>& optional_int);
+ScopedJavaLocalRef<jstring> NativeToJavaString(
+ JNIEnv* jni,
+ const absl::optional<std::string>& str);
+
+// Helper function for converting std::vector<T> into a Java array.
+template <typename T, typename Convert>
+ScopedJavaLocalRef<jobjectArray> NativeToJavaObjectArray(
+ JNIEnv* env,
+ const std::vector<T>& container,
+ jclass clazz,
+ Convert convert) {
+ ScopedJavaLocalRef<jobjectArray> j_container(
+ env, env->NewObjectArray(container.size(), clazz, nullptr));
+ int i = 0;
+ for (const T& element : container) {
+ env->SetObjectArrayElement(j_container.obj(), i,
+ convert(env, element).obj());
+ ++i;
+ }
+ return j_container;
+}
+
+ScopedJavaLocalRef<jbyteArray> NativeToJavaByteArray(
+ JNIEnv* env,
+ rtc::ArrayView<int8_t> container);
+ScopedJavaLocalRef<jintArray> NativeToJavaIntArray(
+ JNIEnv* env,
+ rtc::ArrayView<int32_t> container);
+
+std::vector<int8_t> JavaToNativeByteArray(JNIEnv* env,
+ const JavaRef<jbyteArray>& jarray);
+std::vector<int32_t> JavaToNativeIntArray(JNIEnv* env,
+ const JavaRef<jintArray>& jarray);
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaBooleanArray(
+ JNIEnv* env,
+ const std::vector<bool>& container);
+ScopedJavaLocalRef<jobjectArray> NativeToJavaDoubleArray(
+ JNIEnv* env,
+ const std::vector<double>& container);
+ScopedJavaLocalRef<jobjectArray> NativeToJavaIntegerArray(
+ JNIEnv* env,
+ const std::vector<int32_t>& container);
+ScopedJavaLocalRef<jobjectArray> NativeToJavaLongArray(
+ JNIEnv* env,
+ const std::vector<int64_t>& container);
+ScopedJavaLocalRef<jobjectArray> NativeToJavaStringArray(
+ JNIEnv* env,
+ const std::vector<std::string>& container);
+
+// This is a helper class for NativeToJavaList(). Use that function instead of
+// using this class directly.
+class JavaListBuilder {
+ public:
+ explicit JavaListBuilder(JNIEnv* env);
+ ~JavaListBuilder();
+ void add(const JavaRef<jobject>& element);
+ ScopedJavaLocalRef<jobject> java_list() { return j_list_; }
+
+ private:
+ JNIEnv* env_;
+ ScopedJavaLocalRef<jobject> j_list_;
+};
+
+template <typename C, typename Convert>
+ScopedJavaLocalRef<jobject> NativeToJavaList(JNIEnv* env,
+ const C& container,
+ Convert convert) {
+ JavaListBuilder builder(env);
+ for (const auto& e : container)
+ builder.add(convert(env, e));
+ return builder.java_list();
+}
+
+// This is a helper class for NativeToJavaMap(). Use that function instead of
+// using this class directly.
+class JavaMapBuilder {
+ public:
+ explicit JavaMapBuilder(JNIEnv* env);
+ ~JavaMapBuilder();
+ void put(const JavaRef<jobject>& key, const JavaRef<jobject>& value);
+ ScopedJavaLocalRef<jobject> GetJavaMap() { return j_map_; }
+
+ private:
+ JNIEnv* env_;
+ ScopedJavaLocalRef<jobject> j_map_;
+};
+
+template <typename C, typename Convert>
+ScopedJavaLocalRef<jobject> NativeToJavaMap(JNIEnv* env,
+ const C& container,
+ Convert convert) {
+ JavaMapBuilder builder(env);
+ for (const auto& e : container) {
+ const auto key_value_pair = convert(env, e);
+ builder.put(key_value_pair.first, key_value_pair.second);
+ }
+ return builder.GetJavaMap();
+}
+
+template <typename C>
+ScopedJavaLocalRef<jobject> NativeToJavaStringMap(JNIEnv* env,
+ const C& container) {
+ JavaMapBuilder builder(env);
+ for (const auto& e : container) {
+ const auto key_value_pair = std::make_pair(
+ NativeToJavaString(env, e.first), NativeToJavaString(env, e.second));
+ builder.put(key_value_pair.first, key_value_pair.second);
+ }
+ return builder.GetJavaMap();
+}
+
+// Return a `jlong` that will correctly convert back to `ptr`. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong NativeToJavaPointer(void* ptr);
+
+// ------------------------
+// -- Deprecated methods --
+// ------------------------
+
+// Deprecated. Use JavaToNativeString.
+inline std::string JavaToStdString(JNIEnv* jni,
+ const JavaRef<jstring>& j_string) {
+ return JavaToNativeString(jni, j_string);
+}
+
+// Deprecated. Use scoped jobjects instead.
+inline std::string JavaToStdString(JNIEnv* jni, jstring j_string) {
+ return JavaToStdString(jni, JavaParamRef<jstring>(j_string));
+}
+
+// Deprecated. Use JavaListToNativeVector<std::string, jstring> instead.
+// Given a List of (UTF-16) jstrings
+// return a new vector of UTF-8 native strings.
+std::vector<std::string> JavaToStdVectorStrings(JNIEnv* jni,
+ const JavaRef<jobject>& list);
+
+// Deprecated. Use JavaToNativeStringMap instead.
+// Parses Map<String, String> to std::map<std::string, std::string>.
+inline std::map<std::string, std::string> JavaToStdMapStrings(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_map) {
+ return JavaToNativeStringMap(jni, j_map);
+}
+
+// Deprecated. Use scoped jobjects instead.
+inline std::map<std::string, std::string> JavaToStdMapStrings(JNIEnv* jni,
+ jobject j_map) {
+ return JavaToStdMapStrings(jni, JavaParamRef<jobject>(j_map));
+}
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_JNI_JAVA_TYPES_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/jni/jni_int_wrapper.h b/third_party/libwebrtc/sdk/android/native_api/jni/jni_int_wrapper.h
new file mode 100644
index 0000000000..23da7f2ce4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/jni/jni_int_wrapper.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Originally this class is from Chromium.
+// https://cs.chromium.org/chromium/src/base/android/jni_int_wrapper.h.
+
+#ifndef SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_
+#define SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_
+
+// Wrapper used to receive int when calling Java from native. The wrapper
+// disallows automatic conversion of anything besides int32_t to a jint.
+// Checking is only done in debugging builds.
+
+#ifdef NDEBUG
+
+typedef jint JniIntWrapper;
+
+// This inline is sufficiently trivial that it does not change the
+// final code generated by g++.
+inline jint as_jint(JniIntWrapper wrapper) {
+ return wrapper;
+}
+
+#else
+
+class JniIntWrapper {
+ public:
+ JniIntWrapper() : i_(0) {}
+ JniIntWrapper(int32_t i) : i_(i) {} // NOLINT(runtime/explicit)
+ explicit JniIntWrapper(const JniIntWrapper& ji) : i_(ji.i_) {}
+
+ jint as_jint() const { return i_; }
+
+ // If you get an "invokes a deleted function" error at the lines below it is
+ // because you used an implicit conversion to convert e.g. a long to an
+ // int32_t when calling Java. We disallow this. If you want a lossy
+ // conversion, please use an explicit conversion in your C++ code.
+ JniIntWrapper(uint32_t) = delete; // NOLINT(runtime/explicit)
+ JniIntWrapper(uint64_t) = delete; // NOLINT(runtime/explicit)
+ JniIntWrapper(int64_t) = delete; // NOLINT(runtime/explicit)
+
+ private:
+ const jint i_;
+};
+
+inline jint as_jint(const JniIntWrapper& wrapper) {
+ return wrapper.as_jint();
+}
+
+#endif // NDEBUG
+
+#endif // SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/jni/jvm.cc b/third_party/libwebrtc/sdk/android/native_api/jni/jvm.cc
new file mode 100644
index 0000000000..3356cbeb6f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/jni/jvm.cc
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/jni/jvm.h"
+
+#include "sdk/android/src/jni/jvm.h"
+
+namespace webrtc {
+
+JNIEnv* AttachCurrentThreadIfNeeded() {
+ return jni::AttachCurrentThreadIfNeeded();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/jni/jvm.h b/third_party/libwebrtc/sdk/android/native_api/jni/jvm.h
new file mode 100644
index 0000000000..00bce6734d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/jni/jvm.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_JNI_JVM_H_
+#define SDK_ANDROID_NATIVE_API_JNI_JVM_H_
+
+#include <jni.h>
+
+namespace webrtc {
+// Returns a JNI environment usable on this thread.
+JNIEnv* AttachCurrentThreadIfNeeded();
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_JNI_JVM_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/jni/scoped_java_ref.h b/third_party/libwebrtc/sdk/android/native_api/jni/scoped_java_ref.h
new file mode 100644
index 0000000000..a2be447de2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/jni/scoped_java_ref.h
@@ -0,0 +1,226 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Originally these classes are from Chromium.
+// https://cs.chromium.org/chromium/src/base/android/scoped_java_ref.h.
+
+#ifndef SDK_ANDROID_NATIVE_API_JNI_SCOPED_JAVA_REF_H_
+#define SDK_ANDROID_NATIVE_API_JNI_SCOPED_JAVA_REF_H_
+
+#include <jni.h>
+
+#include <utility>
+
+#include "sdk/android/native_api/jni/jvm.h"
+
+namespace webrtc {
+
+// Generic base class for ScopedJavaLocalRef and ScopedJavaGlobalRef. Useful
+// for allowing functions to accept a reference without having to mandate
+// whether it is a local or global type.
+template <typename T>
+class JavaRef;
+
+// Template specialization of JavaRef, which acts as the base class for all
+// other JavaRef<> template types. This allows you to e.g. pass JavaRef<jstring>
+// into a function taking const JavaRef<jobject>&.
+template <>
+class JavaRef<jobject> {
+ public:
+ JavaRef(const JavaRef&) = delete;
+ JavaRef& operator=(const JavaRef&) = delete;
+
+ jobject obj() const { return obj_; }
+ bool is_null() const {
+ // This is not valid for weak references. For weak references you need to
+ // use env->IsSameObject(objc_, nullptr), but that should be avoided anyway
+ // since it does not prevent the object from being freed immediately
+ // thereafter. Consequently, programmers should not use this check on weak
+ // references anyway and should first make a ScopedJavaLocalRef or
+ // ScopedJavaGlobalRef before checking if it is null.
+ return obj_ == nullptr;
+ }
+
+ protected:
+ constexpr JavaRef() : obj_(nullptr) {}
+ explicit JavaRef(jobject obj) : obj_(obj) {}
+ jobject obj_;
+};
+
+template <typename T>
+class JavaRef : public JavaRef<jobject> {
+ public:
+ JavaRef(const JavaRef&) = delete;
+ JavaRef& operator=(const JavaRef&) = delete;
+
+ T obj() const { return static_cast<T>(obj_); }
+
+ protected:
+ JavaRef() : JavaRef<jobject>(nullptr) {}
+ explicit JavaRef(T obj) : JavaRef<jobject>(obj) {}
+};
+
+// Holds a local reference to a JNI method parameter.
+// Method parameters should not be deleted, and so this class exists purely to
+// wrap them as a JavaRef<T> in the JNI binding generator. Do not create
+// instances manually.
+template <typename T>
+class JavaParamRef : public JavaRef<T> {
+ public:
+ // Assumes that `obj` is a parameter passed to a JNI method from Java.
+ // Does not assume ownership as parameters should not be deleted.
+ explicit JavaParamRef(T obj) : JavaRef<T>(obj) {}
+ JavaParamRef(JNIEnv*, T obj) : JavaRef<T>(obj) {}
+
+ JavaParamRef(const JavaParamRef&) = delete;
+ JavaParamRef& operator=(const JavaParamRef&) = delete;
+};
+
+// Holds a local reference to a Java object. The local reference is scoped
+// to the lifetime of this object.
+// Instances of this class may hold onto any JNIEnv passed into it until
+// destroyed. Therefore, since a JNIEnv is only suitable for use on a single
+// thread, objects of this class must be created, used, and destroyed, on a
+// single thread.
+// Therefore, this class should only be used as a stack-based object and from a
+// single thread. If you wish to have the reference outlive the current
+// callstack (e.g. as a class member) or you wish to pass it across threads,
+// use a ScopedJavaGlobalRef instead.
+template <typename T>
+class ScopedJavaLocalRef : public JavaRef<T> {
+ public:
+ ScopedJavaLocalRef() = default;
+ ScopedJavaLocalRef(std::nullptr_t) {} // NOLINT(runtime/explicit)
+
+ ScopedJavaLocalRef(JNIEnv* env, const JavaRef<T>& other) : env_(env) {
+ Reset(other.obj(), OwnershipPolicy::RETAIN);
+ }
+ // Allow constructing e.g. ScopedJavaLocalRef<jobject> from
+ // ScopedJavaLocalRef<jstring>.
+ template <typename G>
+ ScopedJavaLocalRef(ScopedJavaLocalRef<G>&& other) : env_(other.env()) {
+ Reset(other.Release(), OwnershipPolicy::ADOPT);
+ }
+ ScopedJavaLocalRef(const ScopedJavaLocalRef& other) : env_(other.env_) {
+ Reset(other.obj(), OwnershipPolicy::RETAIN);
+ }
+
+ // Assumes that `obj` is a reference to a Java object and takes
+ // ownership of this reference. This should preferably not be used
+ // outside of JNI helper functions.
+ ScopedJavaLocalRef(JNIEnv* env, T obj) : JavaRef<T>(obj), env_(env) {}
+
+ ~ScopedJavaLocalRef() {
+ if (obj_ != nullptr)
+ env_->DeleteLocalRef(obj_);
+ }
+
+ void operator=(const ScopedJavaLocalRef& other) {
+ Reset(other.obj(), OwnershipPolicy::RETAIN);
+ }
+ void operator=(ScopedJavaLocalRef&& other) {
+ Reset(other.Release(), OwnershipPolicy::ADOPT);
+ }
+
+ // Releases the reference to the caller. The caller *must* delete the
+ // reference when it is done with it. Note that calling a Java method
+ // is *not* a transfer of ownership and Release() should not be used.
+ T Release() {
+ T obj = static_cast<T>(obj_);
+ obj_ = nullptr;
+ return obj;
+ }
+
+ JNIEnv* env() const { return env_; }
+
+ private:
+ using JavaRef<T>::obj_;
+
+ enum OwnershipPolicy {
+ // The scoped object takes ownership of an object by taking over an existing
+ // ownership claim.
+ ADOPT,
+ // The scoped object will retain the the object and any initial ownership is
+ // not changed.
+ RETAIN
+ };
+
+ void Reset(T obj, OwnershipPolicy policy) {
+ if (obj_ != nullptr)
+ env_->DeleteLocalRef(obj_);
+ obj_ = (obj != nullptr && policy == OwnershipPolicy::RETAIN)
+ ? env_->NewLocalRef(obj)
+ : obj;
+ }
+
+ JNIEnv* const env_ = AttachCurrentThreadIfNeeded();
+};
+
+// Holds a global reference to a Java object. The global reference is scoped
+// to the lifetime of this object. This class does not hold onto any JNIEnv*
+// passed to it, hence it is safe to use across threads (within the constraints
+// imposed by the underlying Java object that it references).
+template <typename T>
+class ScopedJavaGlobalRef : public JavaRef<T> {
+ public:
+ using JavaRef<T>::obj_;
+
+ ScopedJavaGlobalRef() = default;
+ explicit constexpr ScopedJavaGlobalRef(std::nullptr_t) {}
+ ScopedJavaGlobalRef(JNIEnv* env, const JavaRef<T>& other)
+ : JavaRef<T>(static_cast<T>(env->NewGlobalRef(other.obj()))) {}
+ explicit ScopedJavaGlobalRef(const ScopedJavaLocalRef<T>& other)
+ : ScopedJavaGlobalRef(other.env(), other) {}
+ ScopedJavaGlobalRef(ScopedJavaGlobalRef&& other)
+ : JavaRef<T>(other.Release()) {}
+
+ ~ScopedJavaGlobalRef() {
+ if (obj_ != nullptr)
+ AttachCurrentThreadIfNeeded()->DeleteGlobalRef(obj_);
+ }
+
+ ScopedJavaGlobalRef(const ScopedJavaGlobalRef&) = delete;
+ ScopedJavaGlobalRef& operator=(const ScopedJavaGlobalRef&) = delete;
+
+ void operator=(const JavaRef<T>& other) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ if (obj_ != nullptr) {
+ env->DeleteGlobalRef(obj_);
+ }
+ obj_ = other.is_null() ? nullptr : env->NewGlobalRef(other.obj());
+ }
+
+ void operator=(std::nullptr_t) {
+ if (obj_ != nullptr) {
+ AttachCurrentThreadIfNeeded()->DeleteGlobalRef(obj_);
+ }
+ obj_ = nullptr;
+ }
+
+ // Releases the reference to the caller. The caller *must* delete the
+ // reference when it is done with it. Note that calling a Java method
+ // is *not* a transfer of ownership and Release() should not be used.
+ T Release() {
+ T obj = static_cast<T>(obj_);
+ obj_ = nullptr;
+ return obj;
+ }
+};
+
+template <typename T>
+inline ScopedJavaLocalRef<T> static_java_ref_cast(JNIEnv* env,
+ JavaRef<jobject> const& ref) {
+ ScopedJavaLocalRef<jobject> owned_ref(env, ref);
+ return ScopedJavaLocalRef<T>(env, static_cast<T>(owned_ref.Release()));
+}
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_JNI_SCOPED_JAVA_REF_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/network_monitor/network_monitor.cc b/third_party/libwebrtc/sdk/android/native_api/network_monitor/network_monitor.cc
new file mode 100644
index 0000000000..38be7fdef7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/network_monitor/network_monitor.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/network_monitor/network_monitor.h"
+
+#include <memory>
+
+#include "sdk/android/src/jni/android_network_monitor.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::NetworkMonitorFactory> CreateAndroidNetworkMonitorFactory(
+ JNIEnv* env,
+ jobject application_context) {
+ return std::make_unique<jni::AndroidNetworkMonitorFactory>(
+ env, JavaParamRef<jobject>(application_context));
+}
+
+std::unique_ptr<rtc::NetworkMonitorFactory>
+CreateAndroidNetworkMonitorFactory() {
+ return std::make_unique<jni::AndroidNetworkMonitorFactory>();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/network_monitor/network_monitor.h b/third_party/libwebrtc/sdk/android/native_api/network_monitor/network_monitor.h
new file mode 100644
index 0000000000..45ecd75543
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/network_monitor/network_monitor.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_NETWORK_MONITOR_NETWORK_MONITOR_H_
+#define SDK_ANDROID_NATIVE_API_NETWORK_MONITOR_NETWORK_MONITOR_H_
+
+#include <jni.h>
+
+#include <memory>
+
+#include "rtc_base/network_monitor_factory.h"
+
+namespace webrtc {
+
+// Creates an Android-specific network monitor, which is capable of detecting
+// network changes as soon as they occur, requesting a cellular interface
+// (dependent on permissions), and binding sockets to network interfaces (more
+// reliable than binding to IP addresses on Android).
+std::unique_ptr<rtc::NetworkMonitorFactory> CreateAndroidNetworkMonitorFactory(
+ JNIEnv* env,
+ jobject application_context);
+
+// Deprecated. Pass in application context instead.
+std::unique_ptr<rtc::NetworkMonitorFactory>
+CreateAndroidNetworkMonitorFactory();
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_NETWORK_MONITOR_NETWORK_MONITOR_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/peerconnection/peer_connection_factory.cc b/third_party/libwebrtc/sdk/android/native_api/peerconnection/peer_connection_factory.cc
new file mode 100644
index 0000000000..87ab991cf4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/peerconnection/peer_connection_factory.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/native_api/peerconnection/peer_connection_factory.h"
+
+#include <jni.h>
+
+#include <memory>
+#include <utility>
+
+#include "sdk/android/src/jni/pc/peer_connection_factory.h"
+
+namespace webrtc {
+
+jobject NativeToJavaPeerConnectionFactory(
+ JNIEnv* jni,
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread) {
+ return webrtc::jni::NativeToJavaPeerConnectionFactory(
+ jni, pcf, std::move(socket_factory), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/peerconnection/peer_connection_factory.h b/third_party/libwebrtc/sdk/android/native_api/peerconnection/peer_connection_factory.h
new file mode 100644
index 0000000000..959eb797ce
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/peerconnection/peer_connection_factory.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_PEERCONNECTION_PEER_CONNECTION_FACTORY_H_
+#define SDK_ANDROID_NATIVE_API_PEERCONNECTION_PEER_CONNECTION_FACTORY_H_
+
+#include <jni.h>
+
+#include <memory>
+
+#include "api/peer_connection_interface.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+// Creates java PeerConnectionFactory with specified `pcf`.
+jobject NativeToJavaPeerConnectionFactory(
+ JNIEnv* jni,
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread);
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_PEERCONNECTION_PEER_CONNECTION_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/stacktrace/stacktrace.cc b/third_party/libwebrtc/sdk/android/native_api/stacktrace/stacktrace.cc
new file mode 100644
index 0000000000..96e03e0af1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/stacktrace/stacktrace.cc
@@ -0,0 +1,286 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/stacktrace/stacktrace.h"
+
+#include <dlfcn.h>
+#include <errno.h>
+#include <linux/futex.h>
+#include <sys/ptrace.h>
+#include <sys/ucontext.h>
+#include <syscall.h>
+#include <ucontext.h>
+#include <unistd.h>
+#include <unwind.h>
+#include <atomic>
+
+// ptrace.h is polluting the namespace. Clean up to avoid conflicts with rtc.
+#if defined(DS)
+#undef DS
+#endif
+
+#include "absl/base/attributes.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+
+namespace {
+
+// Maximum stack trace depth we allow before aborting.
+constexpr size_t kMaxStackSize = 100;
+// Signal that will be used to interrupt threads. SIGURG ("Urgent condition on
+// socket") is chosen because Android does not set up a specific handler for
+// this signal.
+constexpr int kSignal = SIGURG;
+
+// Note: This class is only meant for use within this file, and for the
+// simplified use case of a single Wait() and a single Signal(), followed by
+// discarding the object (never reused).
+// This is a replacement of rtc::Event that is async-safe and doesn't use
+// pthread api. This is necessary since signal handlers cannot allocate memory
+// or use pthread api. This class is ported from Chromium.
+class AsyncSafeWaitableEvent {
+ public:
+ AsyncSafeWaitableEvent() {
+ std::atomic_store_explicit(&futex_, 0, std::memory_order_release);
+ }
+
+ ~AsyncSafeWaitableEvent() {}
+
+ // Returns false in the event of an error and errno is set to indicate the
+ // cause of the error.
+ bool Wait() {
+ // futex() can wake up spuriously if this memory address was previously used
+ // for a pthread mutex. So, also check the condition.
+ while (true) {
+ int res = syscall(SYS_futex, &futex_, FUTEX_WAIT | FUTEX_PRIVATE_FLAG, 0,
+ nullptr, nullptr, 0);
+ if (std::atomic_load_explicit(&futex_, std::memory_order_acquire) != 0)
+ return true;
+ if (res != 0)
+ return false;
+ }
+ }
+
+ void Signal() {
+ std::atomic_store_explicit(&futex_, 1, std::memory_order_release);
+ syscall(SYS_futex, &futex_, FUTEX_WAKE | FUTEX_PRIVATE_FLAG, 1, nullptr,
+ nullptr, 0);
+ }
+
+ private:
+ std::atomic<int> futex_;
+};
+
+// Struct to store the arguments to the signal handler.
+struct SignalHandlerOutputState {
+ // This function is called iteratively for each stack trace element and stores
+ // the element in the array from `unwind_output_state`.
+ static _Unwind_Reason_Code UnwindBacktrace(
+ struct _Unwind_Context* unwind_context,
+ void* unwind_output_state);
+
+ // This event is signalled when signal handler is done executing.
+ AsyncSafeWaitableEvent signal_handler_finish_event;
+ // Running counter of array index below.
+ size_t stack_size_counter = 0;
+ // Array storing the stack trace.
+ uintptr_t addresses[kMaxStackSize];
+};
+
+// This function is called iteratively for each stack trace element and stores
+// the element in the array from `unwind_output_state`.
+_Unwind_Reason_Code SignalHandlerOutputState::UnwindBacktrace(
+ struct _Unwind_Context* unwind_context,
+ void* unwind_output_state) {
+ SignalHandlerOutputState* const output_state =
+ static_cast<SignalHandlerOutputState*>(unwind_output_state);
+
+ // Abort if output state is corrupt.
+ if (output_state == nullptr)
+ return _URC_END_OF_STACK;
+
+ // Avoid overflowing the stack trace array.
+ if (output_state->stack_size_counter >= kMaxStackSize)
+ return _URC_END_OF_STACK;
+
+ // Store the instruction pointer in the array. Subtract 2 since we want to get
+ // the call instruction pointer, not the return address which is the
+ // instruction after.
+ output_state->addresses[output_state->stack_size_counter] =
+ _Unwind_GetIP(unwind_context) - 2;
+ ++output_state->stack_size_counter;
+
+ return _URC_NO_REASON;
+}
+
+class GlobalStackUnwinder {
+ public:
+ static GlobalStackUnwinder& Get() {
+ static GlobalStackUnwinder* const instance = new GlobalStackUnwinder();
+ return *instance;
+ }
+ const char* CaptureRawStacktrace(int pid,
+ int tid,
+ SignalHandlerOutputState* params);
+
+ private:
+ GlobalStackUnwinder() { current_output_state_.store(nullptr); }
+
+ // Temporarily installed signal handler.
+ static void SignalHandler(int signum, siginfo_t* info, void* ptr);
+
+ Mutex mutex_;
+
+ // Accessed by signal handler.
+ static std::atomic<SignalHandlerOutputState*> current_output_state_;
+ // A signal handler mustn't use locks.
+ static_assert(std::atomic<SignalHandlerOutputState*>::is_always_lock_free);
+};
+
+std::atomic<SignalHandlerOutputState*>
+ GlobalStackUnwinder::current_output_state_;
+
+// This signal handler is exectued on the interrupted thread.
+void GlobalStackUnwinder::SignalHandler(int signum,
+ siginfo_t* info,
+ void* ptr) {
+ // This should have been set by the thread requesting the stack trace.
+ SignalHandlerOutputState* signal_handler_output_state =
+ current_output_state_.load();
+ if (signal_handler_output_state != nullptr) {
+ _Unwind_Backtrace(&SignalHandlerOutputState::UnwindBacktrace,
+ signal_handler_output_state);
+ signal_handler_output_state->signal_handler_finish_event.Signal();
+ }
+}
+
+// Temporarily change the signal handler to a function that records a raw stack
+// trace and interrupt the given tid. This function will block until the output
+// thread stack trace has been stored in `params`. The return value is an error
+// string on failure and null on success.
+const char* GlobalStackUnwinder::CaptureRawStacktrace(
+ int pid,
+ int tid,
+ SignalHandlerOutputState* params) {
+ // This function is under a global lock since we are changing the signal
+ // handler and using global state for the output. The lock is to ensure only
+ // one thread at a time gets captured. The lock also means we need to be very
+ // careful with what statements we put in this function, and we should even
+ // avoid logging here.
+ struct sigaction act;
+ struct sigaction old_act;
+ memset(&act, 0, sizeof(act));
+ act.sa_sigaction = &SignalHandler;
+ act.sa_flags = SA_RESTART | SA_SIGINFO;
+ sigemptyset(&act.sa_mask);
+
+ MutexLock loch(&mutex_);
+ current_output_state_.store(params);
+
+ if (sigaction(kSignal, &act, &old_act) != 0)
+ return "Failed to change signal action";
+
+ // Interrupt the thread which will execute SignalHandler() on the given
+ // thread.
+ if (tgkill(pid, tid, kSignal) != 0)
+ return "Failed to interrupt thread";
+
+ // Wait until the thread is done recording its stack trace.
+ if (!params->signal_handler_finish_event.Wait())
+ return "Failed to wait for thread to finish stack trace";
+
+ // Restore previous signal handler.
+ sigaction(kSignal, &old_act, /* old_act= */ nullptr);
+
+ return nullptr;
+}
+
+// Translate addresses into symbolic information using dladdr().
+std::vector<StackTraceElement> FormatStackTrace(
+ const SignalHandlerOutputState& params) {
+ std::vector<StackTraceElement> stack_trace;
+ for (size_t i = 0; i < params.stack_size_counter; ++i) {
+ const uintptr_t address = params.addresses[i];
+
+ Dl_info dl_info = {};
+ if (!dladdr(reinterpret_cast<void*>(address), &dl_info)) {
+ RTC_LOG(LS_WARNING)
+ << "Could not translate address to symbolic information for address "
+ << address << " at stack depth " << i;
+ continue;
+ }
+
+ StackTraceElement stack_trace_element;
+ stack_trace_element.shared_object_path = dl_info.dli_fname;
+ stack_trace_element.relative_address = static_cast<uint32_t>(
+ address - reinterpret_cast<uintptr_t>(dl_info.dli_fbase));
+ stack_trace_element.symbol_name = dl_info.dli_sname;
+
+ stack_trace.push_back(stack_trace_element);
+ }
+
+ return stack_trace;
+}
+
+} // namespace
+
+std::vector<StackTraceElement> GetStackTrace(int tid) {
+ // Only a thread itself can unwind its stack, so we will interrupt the given
+ // tid with a custom signal handler in order to unwind its stack. The stack
+ // will be recorded to `params` through the use of the global pointer
+ // `g_signal_handler_param`.
+ SignalHandlerOutputState params;
+
+ const char* error_string =
+ GlobalStackUnwinder::Get().CaptureRawStacktrace(getpid(), tid, &params);
+ if (error_string != nullptr) {
+ RTC_LOG(LS_ERROR) << error_string << ". tid: " << tid
+ << ". errno: " << errno;
+ return {};
+ }
+ if (params.stack_size_counter >= kMaxStackSize) {
+ RTC_LOG(LS_WARNING) << "Stack trace for thread " << tid << " was truncated";
+ }
+ return FormatStackTrace(params);
+}
+
+std::vector<StackTraceElement> GetStackTrace() {
+ SignalHandlerOutputState params;
+ _Unwind_Backtrace(&SignalHandlerOutputState::UnwindBacktrace, &params);
+ if (params.stack_size_counter >= kMaxStackSize) {
+ RTC_LOG(LS_WARNING) << "Stack trace was truncated";
+ }
+ return FormatStackTrace(params);
+}
+
+std::string StackTraceToString(
+ const std::vector<StackTraceElement>& stack_trace) {
+ rtc::StringBuilder string_builder;
+
+ for (size_t i = 0; i < stack_trace.size(); ++i) {
+ const StackTraceElement& stack_trace_element = stack_trace[i];
+ string_builder.AppendFormat(
+ "#%02zu pc %08x %s", i,
+ static_cast<uint32_t>(stack_trace_element.relative_address),
+ stack_trace_element.shared_object_path);
+ // The symbol name is only available for unstripped .so files.
+ if (stack_trace_element.symbol_name != nullptr)
+ string_builder.AppendFormat(" %s", stack_trace_element.symbol_name);
+
+ string_builder.AppendFormat("\n");
+ }
+
+ return string_builder.Release();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/stacktrace/stacktrace.h b/third_party/libwebrtc/sdk/android/native_api/stacktrace/stacktrace.h
new file mode 100644
index 0000000000..4cae1a58dd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/stacktrace/stacktrace.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_STACKTRACE_STACKTRACE_H_
+#define SDK_ANDROID_NATIVE_API_STACKTRACE_STACKTRACE_H_
+
+#include <string>
+#include <vector>
+
+namespace webrtc {
+
+struct StackTraceElement {
+ // Pathname of shared object (.so file) that contains address.
+ const char* shared_object_path;
+ // Execution address relative to the .so base address. This matches the
+ // addresses you get with "nm", "objdump", and "ndk-stack", as long as the
+ // code is compiled with position-independent code. Android requires
+ // position-independent code since Lollipop.
+ uint32_t relative_address;
+ // Name of symbol whose definition overlaps the address. This value is null
+ // when symbol names are stripped.
+ const char* symbol_name;
+};
+
+// Utility to unwind stack for a given thread on Android ARM devices. This works
+// on top of unwind.h and unwinds native (C++) stack traces only.
+std::vector<StackTraceElement> GetStackTrace(int tid);
+
+// Unwind the stack of the current thread.
+std::vector<StackTraceElement> GetStackTrace();
+
+// Get a string representation of the stack trace in a format ndk-stack accepts.
+std::string StackTraceToString(
+ const std::vector<StackTraceElement>& stack_trace);
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_STACKTRACE_STACKTRACE_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/video/video_source.cc b/third_party/libwebrtc/sdk/android/native_api/video/video_source.cc
new file mode 100644
index 0000000000..e967c2a465
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/video/video_source.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/video/video_source.h"
+
+#include "sdk/android/src/jni/android_video_track_source.h"
+#include "sdk/android/src/jni/native_capturer_observer.h"
+
+namespace webrtc {
+
+namespace {
+
+// Hides full jni::AndroidVideoTrackSource interface and provides an instance of
+// NativeCapturerObserver associated with the video source. Does not extend
+// AndroidVideoTrackSource to avoid diamond inheritance on
+// VideoTrackSourceInterface.
+class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface {
+ public:
+ JavaVideoTrackSourceImpl(JNIEnv* env,
+ rtc::Thread* signaling_thread,
+ bool is_screencast,
+ bool align_timestamps)
+ : android_video_track_source_(
+ rtc::make_ref_counted<jni::AndroidVideoTrackSource>(
+ signaling_thread,
+ env,
+ is_screencast,
+ align_timestamps)),
+ native_capturer_observer_(jni::CreateJavaNativeCapturerObserver(
+ env,
+ android_video_track_source_)) {}
+
+ ScopedJavaLocalRef<jobject> GetJavaVideoCapturerObserver(
+ JNIEnv* env) override {
+ return ScopedJavaLocalRef<jobject>(env, native_capturer_observer_);
+ }
+
+ // Delegate VideoTrackSourceInterface methods to android_video_track_source_.
+ void RegisterObserver(ObserverInterface* observer) override {
+ android_video_track_source_->RegisterObserver(observer);
+ }
+
+ void UnregisterObserver(ObserverInterface* observer) override {
+ android_video_track_source_->UnregisterObserver(observer);
+ }
+
+ SourceState state() const override {
+ return android_video_track_source_->state();
+ }
+
+ bool remote() const override { return android_video_track_source_->remote(); }
+
+ void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override {
+ // The method is defined private in the implementation so we have to access
+ // it through the interface...
+ static_cast<VideoTrackSourceInterface*>(android_video_track_source_.get())
+ ->AddOrUpdateSink(sink, wants);
+ }
+
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override {
+ // The method is defined private in the implementation so we have to access
+ // it through the interface...
+ static_cast<VideoTrackSourceInterface*>(android_video_track_source_.get())
+ ->RemoveSink(sink);
+ }
+
+ bool is_screencast() const override {
+ return android_video_track_source_->is_screencast();
+ }
+
+ absl::optional<bool> needs_denoising() const override {
+ return android_video_track_source_->needs_denoising();
+ }
+
+ bool GetStats(Stats* stats) override {
+ // The method is defined private in the implementation so we have to access
+ // it through the interface...
+ return static_cast<VideoTrackSourceInterface*>(
+ android_video_track_source_.get())
+ ->GetStats(stats);
+ }
+
+ private:
+ // Encoded sinks not implemented for JavaVideoTrackSourceImpl.
+ bool SupportsEncodedOutput() const override { return false; }
+ void GenerateKeyFrame() override {}
+ void AddEncodedSink(
+ rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
+ void RemoveEncodedSink(
+ rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
+
+ rtc::scoped_refptr<jni::AndroidVideoTrackSource> android_video_track_source_;
+ ScopedJavaGlobalRef<jobject> native_capturer_observer_;
+};
+
+} // namespace
+
+rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
+ JNIEnv* jni,
+ rtc::Thread* signaling_thread,
+ bool is_screencast,
+ bool align_timestamps) {
+ return rtc::make_ref_counted<JavaVideoTrackSourceImpl>(
+ jni, signaling_thread, is_screencast, align_timestamps);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/video/video_source.h b/third_party/libwebrtc/sdk/android/native_api/video/video_source.h
new file mode 100644
index 0000000000..d46f3e8f53
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/video/video_source.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_VIDEO_VIDEO_SOURCE_H_
+#define SDK_ANDROID_NATIVE_API_VIDEO_VIDEO_SOURCE_H_
+
+#include <jni.h>
+
+#include "api/media_stream_interface.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+
+// Interface for class that implements VideoTrackSourceInterface and provides a
+// Java object that can be used to feed frames to the source.
+class JavaVideoTrackSourceInterface : public VideoTrackSourceInterface {
+ public:
+ // Returns CapturerObserver object that can be used to feed frames to the
+ // video source.
+ virtual ScopedJavaLocalRef<jobject> GetJavaVideoCapturerObserver(
+ JNIEnv* env) = 0;
+};
+
+// Creates an instance of JavaVideoTrackSourceInterface,
+rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
+ JNIEnv* env,
+ rtc::Thread* signaling_thread,
+ bool is_screencast,
+ bool align_timestamps);
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_VIDEO_VIDEO_SOURCE_H_
diff --git a/third_party/libwebrtc/sdk/android/native_api/video/wrapper.cc b/third_party/libwebrtc/sdk/android/native_api/video/wrapper.cc
new file mode 100644
index 0000000000..8faddc3b26
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/video/wrapper.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/video/wrapper.h"
+
+#include <memory>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/android/src/jni/video_frame.h"
+#include "sdk/android/src/jni/video_sink.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> JavaToNativeVideoSink(
+ JNIEnv* jni,
+ jobject video_sink) {
+ return std::make_unique<jni::VideoSinkWrapper>(
+ jni, JavaParamRef<jobject>(video_sink));
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
+ const VideoFrame& frame) {
+ return jni::NativeToJavaVideoFrame(jni, frame);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_api/video/wrapper.h b/third_party/libwebrtc/sdk/android/native_api/video/wrapper.h
new file mode 100644
index 0000000000..e32cf34806
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_api/video/wrapper.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_NATIVE_API_VIDEO_WRAPPER_H_
+#define SDK_ANDROID_NATIVE_API_VIDEO_WRAPPER_H_
+
+#include <jni.h>
+#include <memory>
+
+#include "api/media_stream_interface.h"
+#include "api/video/video_frame.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+
+// Creates an instance of rtc::VideoSinkInterface<VideoFrame> from Java
+// VideoSink.
+std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> JavaToNativeVideoSink(
+ JNIEnv* jni,
+ jobject video_sink);
+
+// Creates a Java VideoFrame object from a native VideoFrame. The returned
+// object has to be released by calling release.
+ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
+ const VideoFrame& frame);
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_API_VIDEO_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/DEPS b/third_party/libwebrtc/sdk/android/native_unittests/DEPS
new file mode 100644
index 0000000000..7825103fb4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "+modules/audio_device/include/audio_device.h",
+ "+modules/audio_device/include/mock_audio_transport.h",
+ "+system_wrappers/include",
+]
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/android_network_monitor_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/android_network_monitor_unittest.cc
new file mode 100644
index 0000000000..9aec62d630
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/android_network_monitor_unittest.cc
@@ -0,0 +1,330 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/android_network_monitor.h"
+
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/native_unittests/application_context_provider.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+namespace test {
+static const uint32_t kTestIpv4Address = 0xC0A80011; // 192.168.0.17
+// The following two ipv6 addresses only diff by the last 64 bits.
+static const char kTestIpv6Address1[] = "2a00:8a00:a000:1190:0000:0001:000:252";
+static const char kTestIpv6Address2[] = "2a00:8a00:a000:1190:0000:0002:000:253";
+
+jni::NetworkInformation CreateNetworkInformation(
+ const std::string& interface_name,
+ jni::NetworkHandle network_handle,
+ const rtc::IPAddress& ip_address) {
+ jni::NetworkInformation net_info;
+ net_info.interface_name = interface_name;
+ net_info.handle = network_handle;
+ net_info.type = jni::NETWORK_WIFI;
+ net_info.ip_addresses.push_back(ip_address);
+ return net_info;
+}
+
+rtc::IPAddress GetIpAddressFromIpv6String(const std::string& str) {
+ rtc::IPAddress ipv6;
+ RTC_CHECK(rtc::IPFromString(str, &ipv6));
+ return ipv6;
+}
+
+class AndroidNetworkMonitorTest : public ::testing::Test {
+ public:
+ AndroidNetworkMonitorTest() {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> context = test::GetAppContextForTest(env);
+ network_monitor_ = std::make_unique<jni::AndroidNetworkMonitor>(
+ env, context, field_trials_);
+ }
+
+ void SetUp() override {
+ // Reset network monitor states.
+ network_monitor_->Stop();
+ }
+
+ void TearDown() override {
+ // The network monitor must be stopped, before it is destructed.
+ network_monitor_->Stop();
+ }
+
+ void Disconnect(jni::NetworkHandle handle) {
+ network_monitor_->OnNetworkDisconnected_n(handle);
+ }
+
+ protected:
+ test::ScopedKeyValueConfig field_trials_;
+ rtc::AutoThread main_thread_;
+ std::unique_ptr<jni::AndroidNetworkMonitor> network_monitor_;
+};
+
+TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIpv4Address) {
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ std::vector<jni::NetworkInformation> net_infos(1, net_info);
+ network_monitor_->SetNetworkInfos(net_infos);
+
+ auto network_handle =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv4_address, "");
+
+ ASSERT_TRUE(network_handle.has_value());
+ EXPECT_EQ(ipv4_handle, *network_handle);
+}
+
+TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingFullIpv6Address) {
+ jni::NetworkHandle ipv6_handle = 200;
+ rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ rtc::IPAddress ipv6_address2 = GetIpAddressFromIpv6String(kTestIpv6Address2);
+ // Set up an IPv6 network.
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1);
+ std::vector<jni::NetworkInformation> net_infos(1, net_info);
+ network_monitor_->SetNetworkInfos(net_infos);
+
+ auto network_handle1 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address1, "");
+ auto network_handle2 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address2, "");
+
+ ASSERT_TRUE(network_handle1.has_value());
+ EXPECT_EQ(ipv6_handle, *network_handle1);
+ EXPECT_TRUE(!network_handle2);
+}
+
+TEST_F(AndroidNetworkMonitorTest,
+ TestFindNetworkHandleIgnoringIpv6TemporaryPart) {
+ ScopedKeyValueConfig field_trials(
+ field_trials_,
+ "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart/Enabled/");
+ // Start() updates the states introduced by the field trial.
+ network_monitor_->Start();
+ jni::NetworkHandle ipv6_handle = 200;
+ rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ rtc::IPAddress ipv6_address2 = GetIpAddressFromIpv6String(kTestIpv6Address2);
+ // Set up an IPv6 network.
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1);
+ std::vector<jni::NetworkInformation> net_infos(1, net_info);
+ network_monitor_->SetNetworkInfos(net_infos);
+
+ auto network_handle1 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address1, "");
+ auto network_handle2 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address2, "");
+
+ ASSERT_TRUE(network_handle1.has_value());
+ EXPECT_EQ(ipv6_handle, *network_handle1);
+ ASSERT_TRUE(network_handle2.has_value());
+ EXPECT_EQ(ipv6_handle, *network_handle2);
+}
+
+TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIfName) {
+ // Start() updates the states introduced by the field trial.
+ network_monitor_->Start();
+ jni::NetworkHandle ipv6_handle = 200;
+ rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
+
+ // Set up an IPv6 network.
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1);
+ std::vector<jni::NetworkInformation> net_infos(1, net_info);
+ network_monitor_->SetNetworkInfos(net_infos);
+
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+
+ // Search using ip address only...
+ auto network_handle1 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv4_address, "");
+
+ // Search using ip address AND if_name (for typical ipv4 over ipv6 tunnel).
+ auto network_handle2 = network_monitor_->FindNetworkHandleFromAddressOrName(
+ ipv4_address, "v4-wlan0");
+
+ ASSERT_FALSE(network_handle1.has_value());
+ ASSERT_TRUE(network_handle2.has_value());
+ EXPECT_EQ(ipv6_handle, *network_handle2);
+}
+
+TEST_F(AndroidNetworkMonitorTest, TestUnderlyingVpnType) {
+ ScopedKeyValueConfig field_trials(field_trials_,
+ "WebRTC-BindUsingInterfaceName/Enabled/");
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info.type = jni::NETWORK_VPN;
+ net_info.underlying_type_for_vpn = jni::NETWORK_WIFI;
+ network_monitor_->SetNetworkInfos({net_info});
+
+ EXPECT_EQ(
+ rtc::ADAPTER_TYPE_WIFI,
+ network_monitor_->GetInterfaceInfo("v4-wlan0").underlying_type_for_vpn);
+}
+
+// Verify that Disconnect makes interface unavailable.
+TEST_F(AndroidNetworkMonitorTest, Disconnect) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info.type = jni::NETWORK_WIFI;
+ network_monitor_->SetNetworkInfos({net_info});
+
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_TRUE(network_monitor_
+ ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0")
+ .has_value());
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_WIFI);
+
+ // Check that values are reset on disconnect().
+ Disconnect(ipv4_handle);
+ EXPECT_FALSE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_FALSE(
+ network_monitor_
+ ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0")
+ .has_value());
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_UNKNOWN);
+}
+
+// Verify that Stop() resets all caches.
+TEST_F(AndroidNetworkMonitorTest, Reset) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info.type = jni::NETWORK_WIFI;
+ network_monitor_->SetNetworkInfos({net_info});
+
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_TRUE(network_monitor_
+ ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0")
+ .has_value());
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_WIFI);
+
+ // Check that values are reset on Stop().
+ network_monitor_->Stop();
+ EXPECT_FALSE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_FALSE(
+ network_monitor_
+ ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0")
+ .has_value());
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_UNKNOWN);
+}
+
+TEST_F(AndroidNetworkMonitorTest, DuplicateIfname) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info1 =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info1.type = jni::NETWORK_WIFI;
+
+ jni::NetworkHandle ipv6_handle = 101;
+ rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ jni::NetworkInformation net_info2 =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address);
+ net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
+
+ network_monitor_->SetNetworkInfos({net_info1, net_info2});
+
+ // The last added.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_CELLULAR);
+
+ // But both IP addresses are still searchable.
+ EXPECT_EQ(
+ *network_monitor_->FindNetworkHandleFromAddressOrName(ipv4_address, ""),
+ ipv4_handle);
+ EXPECT_EQ(
+ *network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address, ""),
+ ipv6_handle);
+}
+
+TEST_F(AndroidNetworkMonitorTest, DuplicateIfnameDisconnectOwner) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info1 =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info1.type = jni::NETWORK_WIFI;
+
+ jni::NetworkHandle ipv6_handle = 101;
+ rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ jni::NetworkInformation net_info2 =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address);
+ net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
+
+ network_monitor_->SetNetworkInfos({net_info1, net_info2});
+
+ // The last added.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_CELLULAR);
+
+ Disconnect(ipv6_handle);
+
+ // We should now find ipv4_handle.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_WIFI);
+}
+
+TEST_F(AndroidNetworkMonitorTest, DuplicateIfnameDisconnectNonOwner) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info1 =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info1.type = jni::NETWORK_WIFI;
+
+ jni::NetworkHandle ipv6_handle = 101;
+ rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ jni::NetworkInformation net_info2 =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address);
+ net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
+
+ network_monitor_->SetNetworkInfos({net_info1, net_info2});
+
+ // The last added.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_CELLULAR);
+
+ Disconnect(ipv4_handle);
+
+ // We should still find ipv6 network.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_CELLULAR);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.cc b/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.cc
new file mode 100644
index 0000000000..07b3c04faf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/native_unittests/application_context_provider.h"
+
+#include "sdk/android/generated_native_unittests_jni/ApplicationContextProvider_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace test {
+
+ScopedJavaLocalRef<jobject> GetAppContextForTest(JNIEnv* jni) {
+ return ScopedJavaLocalRef<jobject>(
+ jni::Java_ApplicationContextProvider_getApplicationContextForTest(jni));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.h b/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.h
new file mode 100644
index 0000000000..8aace02c32
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
+#define SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
+
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace test {
+
+ScopedJavaLocalRef<jobject> GetAppContextForTest(JNIEnv* jni);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/audio_device/audio_device_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
new file mode 100644
index 0000000000..7d582d49db
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
@@ -0,0 +1,1161 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_device/include/audio_device.h"
+
+#include <list>
+#include <memory>
+#include <numeric>
+
+#include "api/scoped_refptr.h"
+#include "modules/audio_device/include/mock_audio_transport.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_native_unittests_jni/BuildInfo_jni.h"
+#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
+#include "sdk/android/native_unittests/application_context_provider.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+#include "sdk/android/src/jni/audio_device/opensles_common.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+
+using std::cout;
+using std::endl;
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::Gt;
+using ::testing::Invoke;
+using ::testing::NiceMock;
+using ::testing::NotNull;
+using ::testing::Return;
+
+// #define ENABLE_DEBUG_PRINTF
+#ifdef ENABLE_DEBUG_PRINTF
+#define PRINTD(...) fprintf(stderr, __VA_ARGS__);
+#else
+#define PRINTD(...) ((void)0)
+#endif
+#define PRINT(...) fprintf(stderr, __VA_ARGS__);
+
+namespace webrtc {
+
+namespace jni {
+
+// Number of callbacks (input or output) the tests waits for before we set
+// an event indicating that the test was OK.
+static const size_t kNumCallbacks = 10;
+// Max amount of time we wait for an event to be set while counting callbacks.
+static const int kTestTimeOutInMilliseconds = 10 * 1000;
+// Average number of audio callbacks per second assuming 10ms packet size.
+static const size_t kNumCallbacksPerSecond = 100;
+// Play out a test file during this time (unit is in seconds).
+static const int kFilePlayTimeInSec = 5;
+static const size_t kBitsPerSample = 16;
+static const size_t kBytesPerSample = kBitsPerSample / 8;
+// Run the full-duplex test during this time (unit is in seconds).
+// Note that first `kNumIgnoreFirstCallbacks` are ignored.
+static const int kFullDuplexTimeInSec = 5;
+// Wait for the callback sequence to stabilize by ignoring this amount of the
+// initial callbacks (avoids initial FIFO access).
+// Only used in the RunPlayoutAndRecordingInFullDuplex test.
+static const size_t kNumIgnoreFirstCallbacks = 50;
+// Sets the number of impulses per second in the latency test.
+static const int kImpulseFrequencyInHz = 1;
+// Length of round-trip latency measurements. Number of transmitted impulses
+// is kImpulseFrequencyInHz * kMeasureLatencyTimeInSec - 1.
+static const int kMeasureLatencyTimeInSec = 11;
+// Utilized in round-trip latency measurements to avoid capturing noise samples.
+static const int kImpulseThreshold = 1000;
+static const char kTag[] = "[..........] ";
+
+enum TransportType {
+ kPlayout = 0x1,
+ kRecording = 0x2,
+};
+
+// Interface for processing the audio stream. Real implementations can e.g.
+// run audio in loopback, read audio from a file or perform latency
+// measurements.
+class AudioStreamInterface {
+ public:
+ virtual void Write(const void* source, size_t num_frames) = 0;
+ virtual void Read(void* destination, size_t num_frames) = 0;
+
+ protected:
+ virtual ~AudioStreamInterface() {}
+};
+
+// Reads audio samples from a PCM file where the file is stored in memory at
+// construction.
+class FileAudioStream : public AudioStreamInterface {
+ public:
+ FileAudioStream(size_t num_callbacks,
+ const std::string& file_name,
+ int sample_rate)
+ : file_size_in_bytes_(0), sample_rate_(sample_rate), file_pos_(0) {
+ file_size_in_bytes_ = test::GetFileSize(file_name);
+ sample_rate_ = sample_rate;
+ EXPECT_GE(file_size_in_callbacks(), num_callbacks)
+ << "Size of test file is not large enough to last during the test.";
+ const size_t num_16bit_samples =
+ test::GetFileSize(file_name) / kBytesPerSample;
+ file_.reset(new int16_t[num_16bit_samples]);
+ FILE* audio_file = fopen(file_name.c_str(), "rb");
+ EXPECT_NE(audio_file, nullptr);
+ size_t num_samples_read =
+ fread(file_.get(), sizeof(int16_t), num_16bit_samples, audio_file);
+ EXPECT_EQ(num_samples_read, num_16bit_samples);
+ fclose(audio_file);
+ }
+
+ // AudioStreamInterface::Write() is not implemented.
+ void Write(const void* source, size_t num_frames) override {}
+
+ // Read samples from file stored in memory (at construction) and copy
+ // `num_frames` (<=> 10ms) to the `destination` byte buffer.
+ void Read(void* destination, size_t num_frames) override {
+ memcpy(destination, static_cast<int16_t*>(&file_[file_pos_]),
+ num_frames * sizeof(int16_t));
+ file_pos_ += num_frames;
+ }
+
+ int file_size_in_seconds() const {
+ return static_cast<int>(file_size_in_bytes_ /
+ (kBytesPerSample * sample_rate_));
+ }
+ size_t file_size_in_callbacks() const {
+ return file_size_in_seconds() * kNumCallbacksPerSecond;
+ }
+
+ private:
+ size_t file_size_in_bytes_;
+ int sample_rate_;
+ std::unique_ptr<int16_t[]> file_;
+ size_t file_pos_;
+};
+
+// Simple first in first out (FIFO) class that wraps a list of 16-bit audio
+// buffers of fixed size and allows Write and Read operations. The idea is to
+// store recorded audio buffers (using Write) and then read (using Read) these
+// stored buffers with as short delay as possible when the audio layer needs
+// data to play out. The number of buffers in the FIFO will stabilize under
+// normal conditions since there will be a balance between Write and Read calls.
+// The container is a std::list container and access is protected with a lock
+// since both sides (playout and recording) are driven by its own thread.
+class FifoAudioStream : public AudioStreamInterface {
+ public:
+ explicit FifoAudioStream(size_t frames_per_buffer)
+ : frames_per_buffer_(frames_per_buffer),
+ bytes_per_buffer_(frames_per_buffer_ * sizeof(int16_t)),
+ fifo_(new AudioBufferList),
+ largest_size_(0),
+ total_written_elements_(0),
+ write_count_(0) {
+ EXPECT_NE(fifo_.get(), nullptr);
+ }
+
+ ~FifoAudioStream() { Flush(); }
+
+ // Allocate new memory, copy `num_frames` samples from `source` into memory
+ // and add pointer to the memory location to end of the list.
+ // Increases the size of the FIFO by one element.
+ void Write(const void* source, size_t num_frames) override {
+ ASSERT_EQ(num_frames, frames_per_buffer_);
+ PRINTD("+");
+ if (write_count_++ < kNumIgnoreFirstCallbacks) {
+ return;
+ }
+ int16_t* memory = new int16_t[frames_per_buffer_];
+ memcpy(static_cast<int16_t*>(&memory[0]), source, bytes_per_buffer_);
+ MutexLock lock(&lock_);
+ fifo_->push_back(memory);
+ const size_t size = fifo_->size();
+ if (size > largest_size_) {
+ largest_size_ = size;
+ PRINTD("(%zu)", largest_size_);
+ }
+ total_written_elements_ += size;
+ }
+
+ // Read pointer to data buffer from front of list, copy `num_frames` of stored
+ // data into `destination` and delete the utilized memory allocation.
+ // Decreases the size of the FIFO by one element.
+ void Read(void* destination, size_t num_frames) override {
+ ASSERT_EQ(num_frames, frames_per_buffer_);
+ PRINTD("-");
+ MutexLock lock(&lock_);
+ if (fifo_->empty()) {
+ memset(destination, 0, bytes_per_buffer_);
+ } else {
+ int16_t* memory = fifo_->front();
+ fifo_->pop_front();
+ memcpy(destination, static_cast<int16_t*>(&memory[0]), bytes_per_buffer_);
+ delete memory;
+ }
+ }
+
+ size_t size() const { return fifo_->size(); }
+
+ size_t largest_size() const { return largest_size_; }
+
+ size_t average_size() const {
+ return (total_written_elements_ == 0)
+ ? 0.0
+ : 0.5 + static_cast<float>(total_written_elements_) /
+ (write_count_ - kNumIgnoreFirstCallbacks);
+ }
+
+ private:
+ void Flush() {
+ for (auto it = fifo_->begin(); it != fifo_->end(); ++it) {
+ delete *it;
+ }
+ fifo_->clear();
+ }
+
+ using AudioBufferList = std::list<int16_t*>;
+ Mutex lock_;
+ const size_t frames_per_buffer_;
+ const size_t bytes_per_buffer_;
+ std::unique_ptr<AudioBufferList> fifo_;
+ size_t largest_size_;
+ size_t total_written_elements_;
+ size_t write_count_;
+};
+
+// Inserts periodic impulses and measures the latency between the time of
+// transmission and time of receiving the same impulse.
+// Usage requires a special hardware called Audio Loopback Dongle.
+// See http://source.android.com/devices/audio/loopback.html for details.
+class LatencyMeasuringAudioStream : public AudioStreamInterface {
+ public:
+ explicit LatencyMeasuringAudioStream(size_t frames_per_buffer)
+ : frames_per_buffer_(frames_per_buffer),
+ bytes_per_buffer_(frames_per_buffer_ * sizeof(int16_t)),
+ play_count_(0),
+ rec_count_(0),
+ pulse_time_(0) {}
+
+ // Insert periodic impulses in first two samples of `destination`.
+ void Read(void* destination, size_t num_frames) override {
+ ASSERT_EQ(num_frames, frames_per_buffer_);
+ if (play_count_ == 0) {
+ PRINT("[");
+ }
+ play_count_++;
+ memset(destination, 0, bytes_per_buffer_);
+ if (play_count_ % (kNumCallbacksPerSecond / kImpulseFrequencyInHz) == 0) {
+ if (pulse_time_ == 0) {
+ pulse_time_ = rtc::TimeMillis();
+ }
+ PRINT(".");
+ const int16_t impulse = std::numeric_limits<int16_t>::max();
+ int16_t* ptr16 = static_cast<int16_t*>(destination);
+ for (size_t i = 0; i < 2; ++i) {
+ ptr16[i] = impulse;
+ }
+ }
+ }
+
+ // Detect received impulses in `source`, derive time between transmission and
+ // detection and add the calculated delay to list of latencies.
+ void Write(const void* source, size_t num_frames) override {
+ ASSERT_EQ(num_frames, frames_per_buffer_);
+ rec_count_++;
+ if (pulse_time_ == 0) {
+ // Avoid detection of new impulse response until a new impulse has
+ // been transmitted (sets `pulse_time_` to value larger than zero).
+ return;
+ }
+ const int16_t* ptr16 = static_cast<const int16_t*>(source);
+ std::vector<int16_t> vec(ptr16, ptr16 + num_frames);
+ // Find max value in the audio buffer.
+ int max = *std::max_element(vec.begin(), vec.end());
+ // Find index (element position in vector) of the max element.
+ int index_of_max =
+ std::distance(vec.begin(), std::find(vec.begin(), vec.end(), max));
+ if (max > kImpulseThreshold) {
+ PRINTD("(%d,%d)", max, index_of_max);
+ int64_t now_time = rtc::TimeMillis();
+ int extra_delay = IndexToMilliseconds(static_cast<double>(index_of_max));
+ PRINTD("[%d]", static_cast<int>(now_time - pulse_time_));
+ PRINTD("[%d]", extra_delay);
+ // Total latency is the difference between transmit time and detection
+ // tome plus the extra delay within the buffer in which we detected the
+ // received impulse. It is transmitted at sample 0 but can be received
+ // at sample N where N > 0. The term `extra_delay` accounts for N and it
+ // is a value between 0 and 10ms.
+ latencies_.push_back(now_time - pulse_time_ + extra_delay);
+ pulse_time_ = 0;
+ } else {
+ PRINTD("-");
+ }
+ }
+
+ size_t num_latency_values() const { return latencies_.size(); }
+
+ int min_latency() const {
+ if (latencies_.empty())
+ return 0;
+ return *std::min_element(latencies_.begin(), latencies_.end());
+ }
+
+ int max_latency() const {
+ if (latencies_.empty())
+ return 0;
+ return *std::max_element(latencies_.begin(), latencies_.end());
+ }
+
+ int average_latency() const {
+ if (latencies_.empty())
+ return 0;
+ return 0.5 + static_cast<double>(
+ std::accumulate(latencies_.begin(), latencies_.end(), 0)) /
+ latencies_.size();
+ }
+
+ void PrintResults() const {
+ PRINT("] ");
+ for (auto it = latencies_.begin(); it != latencies_.end(); ++it) {
+ PRINT("%d ", *it);
+ }
+ PRINT("\n");
+ PRINT("%s[min, max, avg]=[%d, %d, %d] ms\n", kTag, min_latency(),
+ max_latency(), average_latency());
+ }
+
+ int IndexToMilliseconds(double index) const {
+ return static_cast<int>(10.0 * (index / frames_per_buffer_) + 0.5);
+ }
+
+ private:
+ const size_t frames_per_buffer_;
+ const size_t bytes_per_buffer_;
+ size_t play_count_;
+ size_t rec_count_;
+ int64_t pulse_time_;
+ std::vector<int> latencies_;
+};
+
+// Mocks the AudioTransport object and proxies actions for the two callbacks
+// (RecordedDataIsAvailable and NeedMorePlayData) to different implementations
+// of AudioStreamInterface.
+class MockAudioTransportAndroid : public test::MockAudioTransport {
+ public:
+ explicit MockAudioTransportAndroid(int type)
+ : num_callbacks_(0),
+ type_(type),
+ play_count_(0),
+ rec_count_(0),
+ audio_stream_(nullptr) {}
+
+ virtual ~MockAudioTransportAndroid() {}
+
+ // Set default actions of the mock object. We are delegating to fake
+ // implementations (of AudioStreamInterface) here.
+ void HandleCallbacks(rtc::Event* test_is_done,
+ AudioStreamInterface* audio_stream,
+ int num_callbacks) {
+ test_is_done_ = test_is_done;
+ audio_stream_ = audio_stream;
+ num_callbacks_ = num_callbacks;
+ if (play_mode()) {
+ ON_CALL(*this, NeedMorePlayData(_, _, _, _, _, _, _, _))
+ .WillByDefault(
+ Invoke(this, &MockAudioTransportAndroid::RealNeedMorePlayData));
+ }
+ if (rec_mode()) {
+ ON_CALL(*this, RecordedDataIsAvailable(_, _, _, _, _, _, _, _, _, _))
+ .WillByDefault(Invoke(
+ this, &MockAudioTransportAndroid::RealRecordedDataIsAvailable));
+ }
+ }
+
+ int32_t RealRecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ const uint32_t& newMicLevel) {
+ EXPECT_TRUE(rec_mode()) << "No test is expecting these callbacks.";
+ rec_count_++;
+ // Process the recorded audio stream if an AudioStreamInterface
+ // implementation exists.
+ if (audio_stream_) {
+ audio_stream_->Write(audioSamples, nSamples);
+ }
+ if (ReceivedEnoughCallbacks()) {
+ test_is_done_->Set();
+ }
+ return 0;
+ }
+
+ int32_t RealNeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut, // NOLINT
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) {
+ EXPECT_TRUE(play_mode()) << "No test is expecting these callbacks.";
+ play_count_++;
+ nSamplesOut = nSamples;
+ // Read (possibly processed) audio stream samples to be played out if an
+ // AudioStreamInterface implementation exists.
+ if (audio_stream_) {
+ audio_stream_->Read(audioSamples, nSamples);
+ }
+ if (ReceivedEnoughCallbacks()) {
+ test_is_done_->Set();
+ }
+ return 0;
+ }
+
+ bool ReceivedEnoughCallbacks() {
+ bool recording_done = false;
+ if (rec_mode())
+ recording_done = rec_count_ >= num_callbacks_;
+ else
+ recording_done = true;
+
+ bool playout_done = false;
+ if (play_mode())
+ playout_done = play_count_ >= num_callbacks_;
+ else
+ playout_done = true;
+
+ return recording_done && playout_done;
+ }
+
+ bool play_mode() const { return type_ & kPlayout; }
+ bool rec_mode() const { return type_ & kRecording; }
+
+ private:
+ rtc::Event* test_is_done_;
+ size_t num_callbacks_;
+ int type_;
+ size_t play_count_;
+ size_t rec_count_;
+ AudioStreamInterface* audio_stream_;
+ std::unique_ptr<LatencyMeasuringAudioStream> latency_audio_stream_;
+};
+
+// AudioDeviceTest test fixture.
+class AudioDeviceTest : public ::testing::Test {
+ protected:
+ AudioDeviceTest() {
+ // One-time initialization of JVM and application context. Ensures that we
+ // can do calls between C++ and Java. Initializes both Java and OpenSL ES
+ // implementations.
+ // Creates an audio device using a default audio layer.
+ jni_ = AttachCurrentThreadIfNeeded();
+ context_ = test::GetAppContextForTest(jni_);
+ audio_device_ = CreateJavaAudioDeviceModule(jni_, context_.obj());
+ EXPECT_NE(audio_device_.get(), nullptr);
+ EXPECT_EQ(0, audio_device_->Init());
+ audio_manager_ = GetAudioManager(jni_, context_);
+ UpdateParameters();
+ }
+ virtual ~AudioDeviceTest() { EXPECT_EQ(0, audio_device_->Terminate()); }
+
+ int total_delay_ms() const { return 10; }
+
+ void UpdateParameters() {
+ int input_sample_rate = GetDefaultSampleRate(jni_, audio_manager_);
+ int output_sample_rate = GetDefaultSampleRate(jni_, audio_manager_);
+ bool stereo_playout_is_available;
+ bool stereo_record_is_available;
+ audio_device_->StereoPlayoutIsAvailable(&stereo_playout_is_available);
+ audio_device_->StereoRecordingIsAvailable(&stereo_record_is_available);
+ GetAudioParameters(jni_, context_, audio_manager_, input_sample_rate,
+ output_sample_rate, stereo_playout_is_available,
+ stereo_record_is_available, &input_parameters_,
+ &output_parameters_);
+ }
+
+ void SetActiveAudioLayer(AudioDeviceModule::AudioLayer audio_layer) {
+ audio_device_ = CreateAudioDevice(audio_layer);
+ EXPECT_NE(audio_device_.get(), nullptr);
+ EXPECT_EQ(0, audio_device_->Init());
+ UpdateParameters();
+ }
+
+ int playout_sample_rate() const { return output_parameters_.sample_rate(); }
+ int record_sample_rate() const { return input_parameters_.sample_rate(); }
+ size_t playout_channels() const { return output_parameters_.channels(); }
+ size_t record_channels() const { return input_parameters_.channels(); }
+ size_t playout_frames_per_10ms_buffer() const {
+ return output_parameters_.frames_per_10ms_buffer();
+ }
+ size_t record_frames_per_10ms_buffer() const {
+ return input_parameters_.frames_per_10ms_buffer();
+ }
+
+ rtc::scoped_refptr<AudioDeviceModule> audio_device() const {
+ return audio_device_;
+ }
+
+ rtc::scoped_refptr<AudioDeviceModule> CreateAudioDevice(
+ AudioDeviceModule::AudioLayer audio_layer) {
+#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
+ if (audio_layer == AudioDeviceModule::kAndroidAAudioAudio) {
+ return rtc::scoped_refptr<AudioDeviceModule>(
+ CreateAAudioAudioDeviceModule(jni_, context_.obj()));
+ }
+#endif
+ if (audio_layer == AudioDeviceModule::kAndroidJavaAudio) {
+ return rtc::scoped_refptr<AudioDeviceModule>(
+ CreateJavaAudioDeviceModule(jni_, context_.obj()));
+ } else if (audio_layer == AudioDeviceModule::kAndroidOpenSLESAudio) {
+ return rtc::scoped_refptr<AudioDeviceModule>(
+ CreateOpenSLESAudioDeviceModule(jni_, context_.obj()));
+ } else if (audio_layer ==
+ AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio) {
+ return rtc::scoped_refptr<AudioDeviceModule>(
+ CreateJavaInputAndOpenSLESOutputAudioDeviceModule(jni_,
+ context_.obj()));
+ } else {
+ return nullptr;
+ }
+ }
+
+ // Returns file name relative to the resource root given a sample rate.
+ std::string GetFileName(int sample_rate) {
+ EXPECT_TRUE(sample_rate == 48000 || sample_rate == 44100);
+ char fname[64];
+ snprintf(fname, sizeof(fname), "audio_device/audio_short%d",
+ sample_rate / 1000);
+ std::string file_name(webrtc::test::ResourcePath(fname, "pcm"));
+ EXPECT_TRUE(test::FileExists(file_name));
+#ifdef ENABLE_PRINTF
+ PRINT("file name: %s\n", file_name.c_str());
+ const size_t bytes = test::GetFileSize(file_name);
+ PRINT("file size: %zu [bytes]\n", bytes);
+ PRINT("file size: %zu [samples]\n", bytes / kBytesPerSample);
+ const int seconds =
+ static_cast<int>(bytes / (sample_rate * kBytesPerSample));
+ PRINT("file size: %d [secs]\n", seconds);
+ PRINT("file size: %zu [callbacks]\n", seconds * kNumCallbacksPerSecond);
+#endif
+ return file_name;
+ }
+
+ AudioDeviceModule::AudioLayer GetActiveAudioLayer() const {
+ AudioDeviceModule::AudioLayer audio_layer;
+ EXPECT_EQ(0, audio_device()->ActiveAudioLayer(&audio_layer));
+ return audio_layer;
+ }
+
+ int TestDelayOnAudioLayer(
+ const AudioDeviceModule::AudioLayer& layer_to_test) {
+ rtc::scoped_refptr<AudioDeviceModule> audio_device;
+ audio_device = CreateAudioDevice(layer_to_test);
+ EXPECT_NE(audio_device.get(), nullptr);
+ uint16_t playout_delay;
+ EXPECT_EQ(0, audio_device->PlayoutDelay(&playout_delay));
+ return playout_delay;
+ }
+
+ AudioDeviceModule::AudioLayer TestActiveAudioLayer(
+ const AudioDeviceModule::AudioLayer& layer_to_test) {
+ rtc::scoped_refptr<AudioDeviceModule> audio_device;
+ audio_device = CreateAudioDevice(layer_to_test);
+ EXPECT_NE(audio_device.get(), nullptr);
+ AudioDeviceModule::AudioLayer active;
+ EXPECT_EQ(0, audio_device->ActiveAudioLayer(&active));
+ return active;
+ }
+
+ // One way to ensure that the engine object is valid is to create an
+ // SL Engine interface since it exposes creation methods of all the OpenSL ES
+ // object types and it is only supported on the engine object. This method
+ // also verifies that the engine interface supports at least one interface.
+ // Note that, the test below is not a full test of the SLEngineItf object
+ // but only a simple sanity test to check that the global engine object is OK.
+ void ValidateSLEngine(SLObjectItf engine_object) {
+ EXPECT_NE(nullptr, engine_object);
+ // Get the SL Engine interface which is exposed by the engine object.
+ SLEngineItf engine;
+ SLresult result =
+ (*engine_object)->GetInterface(engine_object, SL_IID_ENGINE, &engine);
+ EXPECT_EQ(result, SL_RESULT_SUCCESS) << "GetInterface() on engine failed";
+ // Ensure that the SL Engine interface exposes at least one interface.
+ SLuint32 object_id = SL_OBJECTID_ENGINE;
+ SLuint32 num_supported_interfaces = 0;
+ result = (*engine)->QueryNumSupportedInterfaces(engine, object_id,
+ &num_supported_interfaces);
+ EXPECT_EQ(result, SL_RESULT_SUCCESS)
+ << "QueryNumSupportedInterfaces() failed";
+ EXPECT_GE(num_supported_interfaces, 1u);
+ }
+
+ // Volume control is currently only supported for the Java output audio layer.
+ // For OpenSL ES, the internal stream volume is always on max level and there
+ // is no need for this test to set it to max.
+ bool AudioLayerSupportsVolumeControl() const {
+ return GetActiveAudioLayer() == AudioDeviceModule::kAndroidJavaAudio;
+ }
+
+ void SetMaxPlayoutVolume() {
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ uint32_t max_volume;
+ EXPECT_EQ(0, audio_device()->MaxSpeakerVolume(&max_volume));
+ EXPECT_EQ(0, audio_device()->SetSpeakerVolume(max_volume));
+ }
+
+ void DisableBuiltInAECIfAvailable() {
+ if (audio_device()->BuiltInAECIsAvailable()) {
+ EXPECT_EQ(0, audio_device()->EnableBuiltInAEC(false));
+ }
+ }
+
+ void StartPlayout() {
+ EXPECT_FALSE(audio_device()->PlayoutIsInitialized());
+ EXPECT_FALSE(audio_device()->Playing());
+ EXPECT_EQ(0, audio_device()->InitPlayout());
+ EXPECT_TRUE(audio_device()->PlayoutIsInitialized());
+ EXPECT_EQ(0, audio_device()->StartPlayout());
+ EXPECT_TRUE(audio_device()->Playing());
+ }
+
+ void StopPlayout() {
+ EXPECT_EQ(0, audio_device()->StopPlayout());
+ EXPECT_FALSE(audio_device()->Playing());
+ EXPECT_FALSE(audio_device()->PlayoutIsInitialized());
+ }
+
+ void StartRecording() {
+ EXPECT_FALSE(audio_device()->RecordingIsInitialized());
+ EXPECT_FALSE(audio_device()->Recording());
+ EXPECT_EQ(0, audio_device()->InitRecording());
+ EXPECT_TRUE(audio_device()->RecordingIsInitialized());
+ EXPECT_EQ(0, audio_device()->StartRecording());
+ EXPECT_TRUE(audio_device()->Recording());
+ }
+
+ void StopRecording() {
+ EXPECT_EQ(0, audio_device()->StopRecording());
+ EXPECT_FALSE(audio_device()->Recording());
+ }
+
+ int GetMaxSpeakerVolume() const {
+ uint32_t max_volume(0);
+ EXPECT_EQ(0, audio_device()->MaxSpeakerVolume(&max_volume));
+ return max_volume;
+ }
+
+ int GetMinSpeakerVolume() const {
+ uint32_t min_volume(0);
+ EXPECT_EQ(0, audio_device()->MinSpeakerVolume(&min_volume));
+ return min_volume;
+ }
+
+ int GetSpeakerVolume() const {
+ uint32_t volume(0);
+ EXPECT_EQ(0, audio_device()->SpeakerVolume(&volume));
+ return volume;
+ }
+
+ JNIEnv* jni_;
+ ScopedJavaLocalRef<jobject> context_;
+ rtc::Event test_is_done_;
+ rtc::scoped_refptr<AudioDeviceModule> audio_device_;
+ ScopedJavaLocalRef<jobject> audio_manager_;
+ AudioParameters output_parameters_;
+ AudioParameters input_parameters_;
+};
+
+TEST_F(AudioDeviceTest, ConstructDestruct) {
+ // Using the test fixture to create and destruct the audio device module.
+}
+
+// Verify that it is possible to explicitly create the two types of supported
+// ADMs. These two tests overrides the default selection of native audio layer
+// by ignoring if the device supports low-latency output or not.
+TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForCombinedJavaOpenSLCombo) {
+ AudioDeviceModule::AudioLayer expected_layer =
+ AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio;
+ AudioDeviceModule::AudioLayer active_layer =
+ TestActiveAudioLayer(expected_layer);
+ EXPECT_EQ(expected_layer, active_layer);
+}
+
+TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForJavaInBothDirections) {
+ AudioDeviceModule::AudioLayer expected_layer =
+ AudioDeviceModule::kAndroidJavaAudio;
+ AudioDeviceModule::AudioLayer active_layer =
+ TestActiveAudioLayer(expected_layer);
+ EXPECT_EQ(expected_layer, active_layer);
+}
+
+TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForOpenSLInBothDirections) {
+ AudioDeviceModule::AudioLayer expected_layer =
+ AudioDeviceModule::kAndroidOpenSLESAudio;
+ AudioDeviceModule::AudioLayer active_layer =
+ TestActiveAudioLayer(expected_layer);
+ EXPECT_EQ(expected_layer, active_layer);
+}
+
+// TODO(bugs.webrtc.org/8914)
+// TODO(phensman): Add test for AAudio/Java combination when this combination
+// is supported.
+#if !defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
+#define MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections \
+ DISABLED_CorrectAudioLayerIsUsedForAAudioInBothDirections
+#else
+#define MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections \
+ CorrectAudioLayerIsUsedForAAudioInBothDirections
+#endif
+TEST_F(AudioDeviceTest,
+ MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections) {
+ AudioDeviceModule::AudioLayer expected_layer =
+ AudioDeviceModule::kAndroidAAudioAudio;
+ AudioDeviceModule::AudioLayer active_layer =
+ TestActiveAudioLayer(expected_layer);
+ EXPECT_EQ(expected_layer, active_layer);
+}
+
+// The Android ADM supports two different delay reporting modes. One for the
+// low-latency output path (in combination with OpenSL ES), and one for the
+// high-latency output path (Java backends in both directions). These two tests
+// verifies that the audio device reports correct delay estimate given the
+// selected audio layer. Note that, this delay estimate will only be utilized
+// if the HW AEC is disabled.
+// Delay should be 75 ms in high latency and 25 ms in low latency.
+TEST_F(AudioDeviceTest, UsesCorrectDelayEstimateForHighLatencyOutputPath) {
+ EXPECT_EQ(75, TestDelayOnAudioLayer(AudioDeviceModule::kAndroidJavaAudio));
+}
+
+TEST_F(AudioDeviceTest, UsesCorrectDelayEstimateForLowLatencyOutputPath) {
+ EXPECT_EQ(25,
+ TestDelayOnAudioLayer(
+ AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio));
+}
+
+TEST_F(AudioDeviceTest, InitTerminate) {
+ // Initialization is part of the test fixture.
+ EXPECT_TRUE(audio_device()->Initialized());
+ EXPECT_EQ(0, audio_device()->Terminate());
+ EXPECT_FALSE(audio_device()->Initialized());
+}
+
+TEST_F(AudioDeviceTest, Devices) {
+ // Device enumeration is not supported. Verify fixed values only.
+ EXPECT_EQ(1, audio_device()->PlayoutDevices());
+ EXPECT_EQ(1, audio_device()->RecordingDevices());
+}
+
+TEST_F(AudioDeviceTest, IsAcousticEchoCancelerSupported) {
+ PRINT("%sAcoustic Echo Canceler support: %s\n", kTag,
+ audio_device()->BuiltInAECIsAvailable() ? "Yes" : "No");
+}
+
+TEST_F(AudioDeviceTest, IsNoiseSuppressorSupported) {
+ PRINT("%sNoise Suppressor support: %s\n", kTag,
+ audio_device()->BuiltInNSIsAvailable() ? "Yes" : "No");
+}
+
+// Verify that playout side is configured for mono by default.
+TEST_F(AudioDeviceTest, UsesMonoPlayoutByDefault) {
+ EXPECT_EQ(1u, output_parameters_.channels());
+}
+
+// Verify that recording side is configured for mono by default.
+TEST_F(AudioDeviceTest, UsesMonoRecordingByDefault) {
+ EXPECT_EQ(1u, input_parameters_.channels());
+}
+
+TEST_F(AudioDeviceTest, SpeakerVolumeShouldBeAvailable) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ bool available;
+ EXPECT_EQ(0, audio_device()->SpeakerVolumeIsAvailable(&available));
+ EXPECT_TRUE(available);
+}
+
+TEST_F(AudioDeviceTest, MaxSpeakerVolumeIsPositive) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ StartPlayout();
+ EXPECT_GT(GetMaxSpeakerVolume(), 0);
+ StopPlayout();
+}
+
+TEST_F(AudioDeviceTest, MinSpeakerVolumeIsZero) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ EXPECT_EQ(GetMinSpeakerVolume(), 0);
+}
+
+TEST_F(AudioDeviceTest, DefaultSpeakerVolumeIsWithinMinMax) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ const int default_volume = GetSpeakerVolume();
+ EXPECT_GE(default_volume, GetMinSpeakerVolume());
+ EXPECT_LE(default_volume, GetMaxSpeakerVolume());
+}
+
+TEST_F(AudioDeviceTest, SetSpeakerVolumeActuallySetsVolume) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ const int default_volume = GetSpeakerVolume();
+ const int max_volume = GetMaxSpeakerVolume();
+ EXPECT_EQ(0, audio_device()->SetSpeakerVolume(max_volume));
+ int new_volume = GetSpeakerVolume();
+ EXPECT_EQ(new_volume, max_volume);
+ EXPECT_EQ(0, audio_device()->SetSpeakerVolume(default_volume));
+}
+
+// Tests that playout can be initiated, started and stopped. No audio callback
+// is registered in this test.
+TEST_F(AudioDeviceTest, StartStopPlayout) {
+ StartPlayout();
+ StopPlayout();
+ StartPlayout();
+ StopPlayout();
+}
+
+// Tests that recording can be initiated, started and stopped. No audio callback
+// is registered in this test.
+TEST_F(AudioDeviceTest, StartStopRecording) {
+ StartRecording();
+ StopRecording();
+ StartRecording();
+ StopRecording();
+}
+
+// Verify that calling StopPlayout() will leave us in an uninitialized state
+// which will require a new call to InitPlayout(). This test does not call
+// StartPlayout() while being uninitialized since doing so will hit a
+// RTC_DCHECK and death tests are not supported on Android.
+TEST_F(AudioDeviceTest, StopPlayoutRequiresInitToRestart) {
+ EXPECT_EQ(0, audio_device()->InitPlayout());
+ EXPECT_EQ(0, audio_device()->StartPlayout());
+ EXPECT_EQ(0, audio_device()->StopPlayout());
+ EXPECT_FALSE(audio_device()->PlayoutIsInitialized());
+}
+
+// Verify that calling StopRecording() will leave us in an uninitialized state
+// which will require a new call to InitRecording(). This test does not call
+// StartRecording() while being uninitialized since doing so will hit a
+// RTC_DCHECK and death tests are not supported on Android.
+TEST_F(AudioDeviceTest, StopRecordingRequiresInitToRestart) {
+ EXPECT_EQ(0, audio_device()->InitRecording());
+ EXPECT_EQ(0, audio_device()->StartRecording());
+ EXPECT_EQ(0, audio_device()->StopRecording());
+ EXPECT_FALSE(audio_device()->RecordingIsInitialized());
+}
+
+// Start playout and verify that the native audio layer starts asking for real
+// audio samples to play out using the NeedMorePlayData callback.
+TEST_F(AudioDeviceTest, StartPlayoutVerifyCallbacks) {
+ MockAudioTransportAndroid mock(kPlayout);
+ mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks);
+ EXPECT_CALL(mock, NeedMorePlayData(playout_frames_per_10ms_buffer(),
+ kBytesPerSample, playout_channels(),
+ playout_sample_rate(), NotNull(), _, _, _))
+ .Times(AtLeast(kNumCallbacks));
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartPlayout();
+ test_is_done_.Wait(kTestTimeOutInMilliseconds);
+ StopPlayout();
+}
+
+// Start recording and verify that the native audio layer starts feeding real
+// audio samples via the RecordedDataIsAvailable callback.
+TEST_F(AudioDeviceTest, StartRecordingVerifyCallbacks) {
+ MockAudioTransportAndroid mock(kRecording);
+ mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks);
+ EXPECT_CALL(
+ mock, RecordedDataIsAvailable(NotNull(), record_frames_per_10ms_buffer(),
+ kBytesPerSample, record_channels(),
+ record_sample_rate(), _, 0, 0, false, _, _))
+ .Times(AtLeast(kNumCallbacks));
+
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartRecording();
+ test_is_done_.Wait(kTestTimeOutInMilliseconds);
+ StopRecording();
+}
+
+// Start playout and recording (full-duplex audio) and verify that audio is
+// active in both directions.
+TEST_F(AudioDeviceTest, StartPlayoutAndRecordingVerifyCallbacks) {
+ MockAudioTransportAndroid mock(kPlayout | kRecording);
+ mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks);
+ EXPECT_CALL(mock, NeedMorePlayData(playout_frames_per_10ms_buffer(),
+ kBytesPerSample, playout_channels(),
+ playout_sample_rate(), NotNull(), _, _, _))
+ .Times(AtLeast(kNumCallbacks));
+ EXPECT_CALL(
+ mock, RecordedDataIsAvailable(NotNull(), record_frames_per_10ms_buffer(),
+ kBytesPerSample, record_channels(),
+ record_sample_rate(), _, 0, 0, false, _, _))
+ .Times(AtLeast(kNumCallbacks));
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartPlayout();
+ StartRecording();
+ test_is_done_.Wait(kTestTimeOutInMilliseconds);
+ StopRecording();
+ StopPlayout();
+}
+
+// Start playout and read audio from an external PCM file when the audio layer
+// asks for data to play out. Real audio is played out in this test but it does
+// not contain any explicit verification that the audio quality is perfect.
+TEST_F(AudioDeviceTest, RunPlayoutWithFileAsSource) {
+ // TODO(henrika): extend test when mono output is supported.
+ EXPECT_EQ(1u, playout_channels());
+ NiceMock<MockAudioTransportAndroid> mock(kPlayout);
+ const int num_callbacks = kFilePlayTimeInSec * kNumCallbacksPerSecond;
+ std::string file_name = GetFileName(playout_sample_rate());
+ std::unique_ptr<FileAudioStream> file_audio_stream(
+ new FileAudioStream(num_callbacks, file_name, playout_sample_rate()));
+ mock.HandleCallbacks(&test_is_done_, file_audio_stream.get(), num_callbacks);
+ // SetMaxPlayoutVolume();
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartPlayout();
+ test_is_done_.Wait(kTestTimeOutInMilliseconds);
+ StopPlayout();
+}
+
+// It should be possible to create an OpenSL engine object if OpenSL ES based
+// audio is requested in any direction.
+TEST_F(AudioDeviceTest, TestCreateOpenSLEngine) {
+ // Verify that the global (singleton) OpenSL Engine can be acquired.
+ OpenSLEngineManager engine_manager;
+ SLObjectItf engine_object = engine_manager.GetOpenSLEngine();
+ EXPECT_NE(nullptr, engine_object);
+ // Perform a simple sanity check of the created engine object.
+ ValidateSLEngine(engine_object);
+}
+
+// The audio device module only suppors the same sample rate in both directions.
+// In addition, in full-duplex low-latency mode (OpenSL ES), both input and
+// output must use the same native buffer size to allow for usage of the fast
+// audio track in Android.
+TEST_F(AudioDeviceTest, VerifyAudioParameters) {
+ EXPECT_EQ(output_parameters_.sample_rate(), input_parameters_.sample_rate());
+ SetActiveAudioLayer(AudioDeviceModule::kAndroidOpenSLESAudio);
+ EXPECT_EQ(output_parameters_.frames_per_buffer(),
+ input_parameters_.frames_per_buffer());
+}
+
+TEST_F(AudioDeviceTest, ShowAudioParameterInfo) {
+ const bool low_latency_out = false;
+ const bool low_latency_in = false;
+ PRINT("PLAYOUT:\n");
+ PRINT("%saudio layer: %s\n", kTag,
+ low_latency_out ? "Low latency OpenSL" : "Java/JNI based AudioTrack");
+ PRINT("%ssample rate: %d Hz\n", kTag, output_parameters_.sample_rate());
+ PRINT("%schannels: %zu\n", kTag, output_parameters_.channels());
+ PRINT("%sframes per buffer: %zu <=> %.2f ms\n", kTag,
+ output_parameters_.frames_per_buffer(),
+ output_parameters_.GetBufferSizeInMilliseconds());
+ PRINT("RECORD: \n");
+ PRINT("%saudio layer: %s\n", kTag,
+ low_latency_in ? "Low latency OpenSL" : "Java/JNI based AudioRecord");
+ PRINT("%ssample rate: %d Hz\n", kTag, input_parameters_.sample_rate());
+ PRINT("%schannels: %zu\n", kTag, input_parameters_.channels());
+ PRINT("%sframes per buffer: %zu <=> %.2f ms\n", kTag,
+ input_parameters_.frames_per_buffer(),
+ input_parameters_.GetBufferSizeInMilliseconds());
+}
+
+// Add device-specific information to the test for logging purposes.
+TEST_F(AudioDeviceTest, ShowDeviceInfo) {
+ std::string model =
+ JavaToNativeString(jni_, Java_BuildInfo_getDeviceModel(jni_));
+ std::string brand = JavaToNativeString(jni_, Java_BuildInfo_getBrand(jni_));
+ std::string manufacturer =
+ JavaToNativeString(jni_, Java_BuildInfo_getDeviceManufacturer(jni_));
+
+ PRINT("%smodel: %s\n", kTag, model.c_str());
+ PRINT("%sbrand: %s\n", kTag, brand.c_str());
+ PRINT("%smanufacturer: %s\n", kTag, manufacturer.c_str());
+}
+
+// Add Android build information to the test for logging purposes.
+TEST_F(AudioDeviceTest, ShowBuildInfo) {
+ std::string release =
+ JavaToNativeString(jni_, Java_BuildInfo_getBuildRelease(jni_));
+ std::string build_id =
+ JavaToNativeString(jni_, Java_BuildInfo_getAndroidBuildId(jni_));
+ std::string build_type =
+ JavaToNativeString(jni_, Java_BuildInfo_getBuildType(jni_));
+ int sdk = Java_BuildInfo_getSdkVersion(jni_);
+
+ PRINT("%sbuild release: %s\n", kTag, release.c_str());
+ PRINT("%sbuild id: %s\n", kTag, build_id.c_str());
+ PRINT("%sbuild type: %s\n", kTag, build_type.c_str());
+ PRINT("%sSDK version: %d\n", kTag, sdk);
+}
+
+// Basic test of the AudioParameters class using default construction where
+// all members are set to zero.
+TEST_F(AudioDeviceTest, AudioParametersWithDefaultConstruction) {
+ AudioParameters params;
+ EXPECT_FALSE(params.is_valid());
+ EXPECT_EQ(0, params.sample_rate());
+ EXPECT_EQ(0U, params.channels());
+ EXPECT_EQ(0U, params.frames_per_buffer());
+ EXPECT_EQ(0U, params.frames_per_10ms_buffer());
+ EXPECT_EQ(0U, params.GetBytesPerFrame());
+ EXPECT_EQ(0U, params.GetBytesPerBuffer());
+ EXPECT_EQ(0U, params.GetBytesPer10msBuffer());
+ EXPECT_EQ(0.0f, params.GetBufferSizeInMilliseconds());
+}
+
+// Basic test of the AudioParameters class using non default construction.
+TEST_F(AudioDeviceTest, AudioParametersWithNonDefaultConstruction) {
+ const int kSampleRate = 48000;
+ const size_t kChannels = 1;
+ const size_t kFramesPerBuffer = 480;
+ const size_t kFramesPer10msBuffer = 480;
+ const size_t kBytesPerFrame = 2;
+ const float kBufferSizeInMs = 10.0f;
+ AudioParameters params(kSampleRate, kChannels, kFramesPerBuffer);
+ EXPECT_TRUE(params.is_valid());
+ EXPECT_EQ(kSampleRate, params.sample_rate());
+ EXPECT_EQ(kChannels, params.channels());
+ EXPECT_EQ(kFramesPerBuffer, params.frames_per_buffer());
+ EXPECT_EQ(static_cast<size_t>(kSampleRate / 100),
+ params.frames_per_10ms_buffer());
+ EXPECT_EQ(kBytesPerFrame, params.GetBytesPerFrame());
+ EXPECT_EQ(kBytesPerFrame * kFramesPerBuffer, params.GetBytesPerBuffer());
+ EXPECT_EQ(kBytesPerFrame * kFramesPer10msBuffer,
+ params.GetBytesPer10msBuffer());
+ EXPECT_EQ(kBufferSizeInMs, params.GetBufferSizeInMilliseconds());
+}
+
+// Start playout and recording and store recorded data in an intermediate FIFO
+// buffer from which the playout side then reads its samples in the same order
+// as they were stored. Under ideal circumstances, a callback sequence would
+// look like: ...+-+-+-+-+-+-+-..., where '+' means 'packet recorded' and '-'
+// means 'packet played'. Under such conditions, the FIFO would only contain
+// one packet on average. However, under more realistic conditions, the size
+// of the FIFO will vary more due to an unbalance between the two sides.
+// This test tries to verify that the device maintains a balanced callback-
+// sequence by running in loopback for kFullDuplexTimeInSec seconds while
+// measuring the size (max and average) of the FIFO. The size of the FIFO is
+// increased by the recording side and decreased by the playout side.
+// TODO(henrika): tune the final test parameters after running tests on several
+// different devices.
+// Disabling this test on bots since it is difficult to come up with a robust
+// test condition that all worked as intended. The main issue is that, when
+// swarming is used, an initial latency can be built up when the both sides
+// starts at different times. Hence, the test can fail even if audio works
+// as intended. Keeping the test so it can be enabled manually.
+// http://bugs.webrtc.org/7744
+TEST_F(AudioDeviceTest, DISABLED_RunPlayoutAndRecordingInFullDuplex) {
+ EXPECT_EQ(record_channels(), playout_channels());
+ EXPECT_EQ(record_sample_rate(), playout_sample_rate());
+ NiceMock<MockAudioTransportAndroid> mock(kPlayout | kRecording);
+ std::unique_ptr<FifoAudioStream> fifo_audio_stream(
+ new FifoAudioStream(playout_frames_per_10ms_buffer()));
+ mock.HandleCallbacks(&test_is_done_, fifo_audio_stream.get(),
+ kFullDuplexTimeInSec * kNumCallbacksPerSecond);
+ SetMaxPlayoutVolume();
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartRecording();
+ StartPlayout();
+ test_is_done_.Wait(
+ std::max(kTestTimeOutInMilliseconds, 1000 * kFullDuplexTimeInSec));
+ StopPlayout();
+ StopRecording();
+
+ // These thresholds are set rather high to accomodate differences in hardware
+ // in several devices, so this test can be used in swarming.
+ // See http://bugs.webrtc.org/6464
+ EXPECT_LE(fifo_audio_stream->average_size(), 60u);
+ EXPECT_LE(fifo_audio_stream->largest_size(), 70u);
+}
+
+// Measures loopback latency and reports the min, max and average values for
+// a full duplex audio session.
+// The latency is measured like so:
+// - Insert impulses periodically on the output side.
+// - Detect the impulses on the input side.
+// - Measure the time difference between the transmit time and receive time.
+// - Store time differences in a vector and calculate min, max and average.
+// This test requires a special hardware called Audio Loopback Dongle.
+// See http://source.android.com/devices/audio/loopback.html for details.
+TEST_F(AudioDeviceTest, DISABLED_MeasureLoopbackLatency) {
+ EXPECT_EQ(record_channels(), playout_channels());
+ EXPECT_EQ(record_sample_rate(), playout_sample_rate());
+ NiceMock<MockAudioTransportAndroid> mock(kPlayout | kRecording);
+ std::unique_ptr<LatencyMeasuringAudioStream> latency_audio_stream(
+ new LatencyMeasuringAudioStream(playout_frames_per_10ms_buffer()));
+ mock.HandleCallbacks(&test_is_done_, latency_audio_stream.get(),
+ kMeasureLatencyTimeInSec * kNumCallbacksPerSecond);
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ SetMaxPlayoutVolume();
+ DisableBuiltInAECIfAvailable();
+ StartRecording();
+ StartPlayout();
+ test_is_done_.Wait(
+ std::max(kTestTimeOutInMilliseconds, 1000 * kMeasureLatencyTimeInSec));
+ StopPlayout();
+ StopRecording();
+ // Verify that the correct number of transmitted impulses are detected.
+ EXPECT_EQ(latency_audio_stream->num_latency_values(),
+ static_cast<size_t>(
+ kImpulseFrequencyInHz * kMeasureLatencyTimeInSec - 1));
+ latency_audio_stream->PrintResults();
+}
+
+TEST(JavaAudioDeviceTest, TestRunningTwoAdmsSimultaneously) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> context = test::GetAppContextForTest(jni);
+
+ // Create and start the first ADM.
+ rtc::scoped_refptr<AudioDeviceModule> adm_1 =
+ CreateJavaAudioDeviceModule(jni, context.obj());
+ EXPECT_EQ(0, adm_1->Init());
+ EXPECT_EQ(0, adm_1->InitRecording());
+ EXPECT_EQ(0, adm_1->StartRecording());
+
+ // Create and start a second ADM. Expect this to fail due to the microphone
+ // already being in use.
+ rtc::scoped_refptr<AudioDeviceModule> adm_2 =
+ CreateJavaAudioDeviceModule(jni, context.obj());
+ int32_t err = adm_2->Init();
+ err |= adm_2->InitRecording();
+ err |= adm_2->StartRecording();
+ EXPECT_NE(0, err);
+
+ // Stop and terminate second adm.
+ adm_2->StopRecording();
+ adm_2->Terminate();
+
+ // Stop first ADM.
+ EXPECT_EQ(0, adm_1->StopRecording());
+ EXPECT_EQ(0, adm_1->Terminate());
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/codecs/DEPS b/third_party/libwebrtc/sdk/android/native_unittests/codecs/DEPS
new file mode 100644
index 0000000000..fb2c30fab1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/codecs/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+media/base/media_constants.h",
+]
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/codecs/wrapper_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/codecs/wrapper_unittest.cc
new file mode 100644
index 0000000000..c858095d05
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/codecs/wrapper_unittest.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "absl/memory/memory.h"
+#include "media/base/media_constants.h"
+#include "sdk/android/generated_native_unittests_jni/CodecsWrapperTestHelper_jni.h"
+#include "sdk/android/native_api/codecs/wrapper.h"
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+TEST(JavaCodecsWrapperTest, JavaToNativeVideoCodecInfo) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_video_codec_info =
+ jni::Java_CodecsWrapperTestHelper_createTestVideoCodecInfo(env);
+
+ const SdpVideoFormat video_format =
+ JavaToNativeVideoCodecInfo(env, j_video_codec_info.obj());
+
+ EXPECT_EQ(cricket::kH264CodecName, video_format.name);
+ const auto it =
+ video_format.parameters.find(cricket::kH264FmtpProfileLevelId);
+ ASSERT_NE(it, video_format.parameters.end());
+ EXPECT_EQ(cricket::kH264ProfileLevelConstrainedBaseline, it->second);
+}
+
+TEST(JavaCodecsWrapperTest, JavaToNativeResolutionBitrateLimits) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_fake_encoder =
+ jni::Java_CodecsWrapperTestHelper_createFakeVideoEncoder(env);
+
+ auto encoder = jni::JavaToNativeVideoEncoder(env, j_fake_encoder);
+ ASSERT_TRUE(encoder);
+
+ // Check that the bitrate limits correctly passed from Java to native.
+ const std::vector<VideoEncoder::ResolutionBitrateLimits> bitrate_limits =
+ encoder->GetEncoderInfo().resolution_bitrate_limits;
+ ASSERT_EQ(bitrate_limits.size(), 1u);
+ EXPECT_EQ(bitrate_limits[0].frame_size_pixels, 640 * 360);
+ EXPECT_EQ(bitrate_limits[0].min_start_bitrate_bps, 300000);
+ EXPECT_EQ(bitrate_limits[0].min_bitrate_bps, 200000);
+ EXPECT_EQ(bitrate_limits[0].max_bitrate_bps, 1000000);
+}
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/java_types_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/java_types_unittest.cc
new file mode 100644
index 0000000000..4e7a6ed7ca
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/java_types_unittest.cc
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <vector>
+
+#include "sdk/android/generated_native_unittests_jni/JavaTypesTestHelper_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+TEST(JavaTypesTest, TestJavaToNativeStringMap) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_map =
+ jni::Java_JavaTypesTestHelper_createTestStringMap(env);
+
+ std::map<std::string, std::string> output = JavaToNativeStringMap(env, j_map);
+
+ std::map<std::string, std::string> expected{
+ {"one", "1"}, {"two", "2"}, {"three", "3"},
+ };
+ EXPECT_EQ(expected, output);
+}
+
+TEST(JavaTypesTest, TestNativeToJavaToNativeIntArray) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+
+ std::vector<int32_t> test_data{1, 20, 300};
+
+ ScopedJavaLocalRef<jintArray> array = NativeToJavaIntArray(env, test_data);
+ EXPECT_EQ(test_data, JavaToNativeIntArray(env, array));
+}
+
+TEST(JavaTypesTest, TestNativeToJavaToNativeByteArray) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+
+ std::vector<int8_t> test_data{1, 20, 30};
+
+ ScopedJavaLocalRef<jbyteArray> array = NativeToJavaByteArray(env, test_data);
+ EXPECT_EQ(test_data, JavaToNativeByteArray(env, array));
+}
+
+TEST(JavaTypesTest, TestNativeToJavaToNativeIntArrayLeakTest) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+
+ std::vector<int32_t> test_data{1, 20, 300};
+
+ for (int i = 0; i < 2000; i++) {
+ ScopedJavaLocalRef<jintArray> array = NativeToJavaIntArray(env, test_data);
+ EXPECT_EQ(test_data, JavaToNativeIntArray(env, array));
+ }
+}
+
+TEST(JavaTypesTest, TestNativeToJavaToNativeByteArrayLeakTest) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+
+ std::vector<int8_t> test_data{1, 20, 30};
+
+ for (int i = 0; i < 2000; i++) {
+ ScopedJavaLocalRef<jbyteArray> array =
+ NativeToJavaByteArray(env, test_data);
+ EXPECT_EQ(test_data, JavaToNativeByteArray(env, array));
+ }
+}
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/ApplicationContextProvider.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/ApplicationContextProvider.java
new file mode 100644
index 0000000000..e10d34710d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/ApplicationContextProvider.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+public class ApplicationContextProvider {
+ @CalledByNative
+ public static Context getApplicationContextForTest() {
+ return ContextUtils.getApplicationContext();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/BuildInfo.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/BuildInfo.java
new file mode 100644
index 0000000000..0440ae4209
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/BuildInfo.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.Build;
+import org.webrtc.CalledByNative;
+
+public final class BuildInfo {
+ public static String getDevice() {
+ return Build.DEVICE;
+ }
+
+ @CalledByNative
+ public static String getDeviceModel() {
+ return Build.MODEL;
+ }
+
+ public static String getProduct() {
+ return Build.PRODUCT;
+ }
+
+ @CalledByNative
+ public static String getBrand() {
+ return Build.BRAND;
+ }
+
+ @CalledByNative
+ public static String getDeviceManufacturer() {
+ return Build.MANUFACTURER;
+ }
+
+ @CalledByNative
+ public static String getAndroidBuildId() {
+ return Build.ID;
+ }
+
+ @CalledByNative
+ public static String getBuildType() {
+ return Build.TYPE;
+ }
+
+ @CalledByNative
+ public static String getBuildRelease() {
+ return Build.VERSION.RELEASE;
+ }
+
+ @CalledByNative
+ public static int getSdkVersion() {
+ return Build.VERSION.SDK_INT;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java
new file mode 100644
index 0000000000..70151d3b78
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class CodecsWrapperTestHelper {
+ @CalledByNative
+ public static VideoCodecInfo createTestVideoCodecInfo() {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(
+ VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
+
+ VideoCodecInfo codec_info = new VideoCodecInfo("H264", params);
+ return codec_info;
+ }
+
+ @CalledByNative
+ public static VideoEncoder createFakeVideoEncoder() {
+ return new FakeVideoEncoder();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/FakeVideoEncoder.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/FakeVideoEncoder.java
new file mode 100644
index 0000000000..513f145518
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/FakeVideoEncoder.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoEncoder;
+
+/**
+ * An implementation of VideoEncoder that is used for testing of functionalities of
+ * VideoEncoderWrapper.
+ */
+class FakeVideoEncoder implements VideoEncoder {
+ @Override
+ public VideoCodecStatus initEncode(Settings settings, Callback encodeCallback) {
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus encode(VideoFrame frame, EncodeInfo info) {
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate) {
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public ScalingSettings getScalingSettings() {
+ return ScalingSettings.OFF;
+ }
+
+ @Override
+ public ResolutionBitrateLimits[] getResolutionBitrateLimits() {
+ ResolutionBitrateLimits resolution_bitrate_limits[] = {
+ new ResolutionBitrateLimits(/* frameSizePixels = */ 640 * 360,
+ /* minStartBitrateBps = */ 300000,
+ /* minBitrateBps = */ 200000,
+ /* maxBitrateBps = */ 1000000)};
+
+ return resolution_bitrate_limits;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return "FakeVideoEncoder";
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaTypesTestHelper.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaTypesTestHelper.java
new file mode 100644
index 0000000000..6695ef79af
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaTypesTestHelper.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class JavaTypesTestHelper {
+ @CalledByNative
+ public static Map createTestStringMap() {
+ Map<String, String> testMap = new HashMap<String, String>();
+ testMap.put("one", "1");
+ testMap.put("two", "2");
+ testMap.put("three", "3");
+ return testMap;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaVideoSourceTestHelper.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaVideoSourceTestHelper.java
new file mode 100644
index 0000000000..2803acb450
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaVideoSourceTestHelper.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class JavaVideoSourceTestHelper {
+ @CalledByNative
+ public static void startCapture(CapturerObserver observer, boolean success) {
+ observer.onCapturerStarted(success);
+ }
+
+ @CalledByNative
+ public static void stopCapture(CapturerObserver observer) {
+ observer.onCapturerStopped();
+ }
+
+ @CalledByNative
+ public static void deliverFrame(
+ int width, int height, int rotation, long timestampNs, CapturerObserver observer) {
+ observer.onFrameCaptured(
+ new VideoFrame(JavaI420Buffer.allocate(width, height), rotation, timestampNs));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java
new file mode 100644
index 0000000000..445a6733ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import org.webrtc.PeerConnectionFactory.InitializationOptions;
+
+public class PeerConnectionFactoryInitializationHelper {
+ private static class MockLoader implements NativeLibraryLoader {
+ @Override
+ public boolean load(String name) {
+ return true;
+ }
+ }
+
+ @CalledByNative
+ public static void initializeFactoryForTests() {
+ Context ctx = ContextUtils.getApplicationContext();
+ InitializationOptions options = InitializationOptions.builder(ctx)
+ .setNativeLibraryLoader(new MockLoader())
+ .createInitializationOptions();
+
+ PeerConnectionFactory.initialize(options);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/DEPS b/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/DEPS
new file mode 100644
index 0000000000..ed77eb5d6d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/DEPS
@@ -0,0 +1,6 @@
+include_rules = [
+ "+logging/rtc_event_log/rtc_event_log_factory.h",
+ "+media/base",
+ "+media/engine",
+ "+modules/audio_processing/include/audio_processing.h",
+]
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc
new file mode 100644
index 0000000000..8bb6e33e65
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/native_api/peerconnection/peer_connection_factory.h"
+
+#include <memory>
+
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "media/base/media_engine.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/generated_native_unittests_jni/PeerConnectionFactoryInitializationHelper_jni.h"
+#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
+#include "sdk/android/native_api/jni/jvm.h"
+#include "sdk/android/native_unittests/application_context_provider.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+// Create native peer connection factory, that will be wrapped by java one
+rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> CreateTestPCF(
+ JNIEnv* jni,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread) {
+ // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+ // ThreadManager only WrapCurrentThread()s the thread where it is first
+ // created. Since the semantics around when auto-wrapping happens in
+ // webrtc/rtc_base/ are convoluted, we simply wrap here to avoid having to
+ // think about ramifications of auto-wrapping there.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ PeerConnectionFactoryDependencies pcf_deps;
+ pcf_deps.network_thread = network_thread;
+ pcf_deps.worker_thread = worker_thread;
+ pcf_deps.signaling_thread = signaling_thread;
+ pcf_deps.task_queue_factory = CreateDefaultTaskQueueFactory();
+ pcf_deps.call_factory = CreateCallFactory();
+ pcf_deps.event_log_factory =
+ std::make_unique<RtcEventLogFactory>(pcf_deps.task_queue_factory.get());
+
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = pcf_deps.task_queue_factory.get();
+ media_deps.adm =
+ CreateJavaAudioDeviceModule(jni, GetAppContextForTest(jni).obj());
+ media_deps.video_encoder_factory =
+ std::make_unique<webrtc::InternalEncoderFactory>();
+ media_deps.video_decoder_factory =
+ std::make_unique<webrtc::InternalDecoderFactory>();
+ SetMediaEngineDefaults(&media_deps);
+ pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ RTC_LOG(LS_INFO) << "Media engine created: " << pcf_deps.media_engine.get();
+
+ auto factory = CreateModularPeerConnectionFactory(std::move(pcf_deps));
+ RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << factory.get();
+ RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+ "WebRTC/libjingle init likely failed on this device";
+
+ return factory;
+}
+
+TEST(PeerConnectionFactoryTest, NativeToJavaPeerConnectionFactory) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ RTC_LOG(LS_INFO) << "Initializing java peer connection factory.";
+ jni::Java_PeerConnectionFactoryInitializationHelper_initializeFactoryForTests(
+ jni);
+ RTC_LOG(LS_INFO) << "Java peer connection factory initialized.";
+
+ auto socket_server = std::make_unique<rtc::PhysicalSocketServer>();
+
+ // Create threads.
+ auto network_thread = std::make_unique<rtc::Thread>(socket_server.get());
+ network_thread->SetName("network_thread", nullptr);
+ RTC_CHECK(network_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<rtc::Thread> worker_thread = rtc::Thread::Create();
+ worker_thread->SetName("worker_thread", nullptr);
+ RTC_CHECK(worker_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
+ signaling_thread->SetName("signaling_thread", NULL);
+ RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory =
+ CreateTestPCF(jni, network_thread.get(), worker_thread.get(),
+ signaling_thread.get());
+
+ jobject java_factory = NativeToJavaPeerConnectionFactory(
+ jni, factory, std::move(socket_server), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread));
+
+ RTC_LOG(LS_INFO) << java_factory;
+
+ EXPECT_NE(java_factory, nullptr);
+}
+
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
new file mode 100644
index 0000000000..5cbd4aafe1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
@@ -0,0 +1,275 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/stacktrace/stacktrace.h"
+
+#include <dlfcn.h>
+
+#include <atomic>
+#include <memory>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/event.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/inline.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+// A simple atomic spin event. Implemented with std::atomic_flag, since the C++
+// standard guarantees that that type is implemented with actual atomic
+// instructions (as opposed to e.g. with a mutex). Uses sequentially consistent
+// memory order since this is a test, where simplicity trumps performance.
+class SimpleSpinEvent {
+ public:
+ // Initialize the event to its blocked state.
+ SimpleSpinEvent() {
+ static_cast<void>(blocked_.test_and_set(std::memory_order_seq_cst));
+ }
+
+ // Busy-wait for the event to become unblocked, and block it behind us as we
+ // leave.
+ void Wait() {
+ bool was_blocked;
+ do {
+ // Check if the event was blocked, and set it to blocked.
+ was_blocked = blocked_.test_and_set(std::memory_order_seq_cst);
+ } while (was_blocked);
+ }
+
+ // Unblock the event.
+ void Set() { blocked_.clear(std::memory_order_seq_cst); }
+
+ private:
+ std::atomic_flag blocked_;
+};
+
+// Returns the execution address relative to the .so base address. This matches
+// the addresses we get from GetStacktrace().
+RTC_NO_INLINE uint32_t GetCurrentRelativeExecutionAddress() {
+ void* pc = __builtin_return_address(0);
+ Dl_info dl_info = {};
+ const bool success = dladdr(pc, &dl_info);
+ EXPECT_TRUE(success);
+ return static_cast<uint32_t>(reinterpret_cast<uintptr_t>(pc) -
+ reinterpret_cast<uintptr_t>(dl_info.dli_fbase));
+}
+
+// Returns true if any of the stack trace element is inside the specified
+// region.
+bool StackTraceContainsRange(const std::vector<StackTraceElement>& stack_trace,
+ uintptr_t pc_low,
+ uintptr_t pc_high) {
+ for (const StackTraceElement& stack_trace_element : stack_trace) {
+ if (pc_low <= stack_trace_element.relative_address &&
+ pc_high >= stack_trace_element.relative_address) {
+ return true;
+ }
+ }
+ return false;
+}
+
+class DeadlockInterface {
+ public:
+ virtual ~DeadlockInterface() {}
+
+ // This function should deadlock until Release() is called.
+ virtual void Deadlock() = 0;
+
+ // This function should release the thread stuck in Deadlock().
+ virtual void Release() = 0;
+};
+
+struct ThreadParams {
+ volatile int tid;
+ // Signaled when the deadlock region is entered.
+ SimpleSpinEvent deadlock_start_event;
+ DeadlockInterface* volatile deadlock_impl;
+ // Defines an address range within the deadlock will occur.
+ volatile uint32_t deadlock_region_start_address;
+ volatile uint32_t deadlock_region_end_address;
+ // Signaled when the deadlock is done.
+ rtc::Event deadlock_done_event;
+};
+
+class RtcEventDeadlock : public DeadlockInterface {
+ private:
+ void Deadlock() override { event.Wait(rtc::Event::kForever); }
+ void Release() override { event.Set(); }
+
+ rtc::Event event;
+};
+
+class RtcCriticalSectionDeadlock : public DeadlockInterface {
+ public:
+ RtcCriticalSectionDeadlock()
+ : mutex_lock_(std::make_unique<MutexLock>(&mutex_)) {}
+
+ private:
+ void Deadlock() override { MutexLock lock(&mutex_); }
+
+ void Release() override { mutex_lock_.reset(); }
+
+ Mutex mutex_;
+ std::unique_ptr<MutexLock> mutex_lock_;
+};
+
+class SpinDeadlock : public DeadlockInterface {
+ public:
+ SpinDeadlock() : is_deadlocked_(true) {}
+
+ private:
+ void Deadlock() override {
+ while (is_deadlocked_) {
+ }
+ }
+
+ void Release() override { is_deadlocked_ = false; }
+
+ std::atomic<bool> is_deadlocked_;
+};
+
+class SleepDeadlock : public DeadlockInterface {
+ private:
+ void Deadlock() override { sleep(1000000); }
+
+ void Release() override {
+ // The interrupt itself will break free from the sleep.
+ }
+};
+
+void TestStacktrace(std::unique_ptr<DeadlockInterface> deadlock_impl) {
+ // Set params that will be sent to other thread.
+ ThreadParams params;
+ params.deadlock_impl = deadlock_impl.get();
+
+ // Spawn thread.
+ auto thread = rtc::PlatformThread::SpawnJoinable(
+ [&params] {
+ params.tid = gettid();
+ params.deadlock_region_start_address =
+ GetCurrentRelativeExecutionAddress();
+ params.deadlock_start_event.Set();
+ params.deadlock_impl->Deadlock();
+ params.deadlock_region_end_address =
+ GetCurrentRelativeExecutionAddress();
+ params.deadlock_done_event.Set();
+ },
+ "StacktraceTest");
+
+ // Wait until the thread has entered the deadlock region, and take a very
+ // brief nap to give it time to reach the actual deadlock.
+ params.deadlock_start_event.Wait();
+ SleepMs(1);
+
+ // Acquire the stack trace of the thread which should now be deadlocking.
+ std::vector<StackTraceElement> stack_trace = GetStackTrace(params.tid);
+
+ // Release the deadlock so that the thread can continue.
+ deadlock_impl->Release();
+
+ // Wait until the thread has left the deadlock.
+ params.deadlock_done_event.Wait(rtc::Event::kForever);
+
+ // Assert that the stack trace contains the deadlock region.
+ EXPECT_TRUE(StackTraceContainsRange(stack_trace,
+ params.deadlock_region_start_address,
+ params.deadlock_region_end_address))
+ << "Deadlock region: ["
+ << rtc::ToHex(params.deadlock_region_start_address) << ", "
+ << rtc::ToHex(params.deadlock_region_end_address)
+ << "] not contained in: " << StackTraceToString(stack_trace);
+}
+
+class LookoutLogSink final : public rtc::LogSink {
+ public:
+ explicit LookoutLogSink(std::string look_for)
+ : look_for_(std::move(look_for)) {}
+ void OnLogMessage(const std::string& message) override {
+ OnLogMessage(absl::string_view(message));
+ }
+ void OnLogMessage(absl::string_view message) override {
+ if (message.find(look_for_) != std::string::npos) {
+ when_found_.Set();
+ }
+ }
+ rtc::Event& WhenFound() { return when_found_; }
+
+ private:
+ const std::string look_for_;
+ rtc::Event when_found_;
+};
+
+} // namespace
+
+TEST(Stacktrace, TestCurrentThread) {
+ const uint32_t start_addr = GetCurrentRelativeExecutionAddress();
+ const std::vector<StackTraceElement> stack_trace = GetStackTrace();
+ const uint32_t end_addr = GetCurrentRelativeExecutionAddress();
+ EXPECT_TRUE(StackTraceContainsRange(stack_trace, start_addr, end_addr))
+ << "Caller region: [" << rtc::ToHex(start_addr) << ", "
+ << rtc::ToHex(end_addr)
+ << "] not contained in: " << StackTraceToString(stack_trace);
+}
+
+TEST(Stacktrace, TestSpinLock) {
+ TestStacktrace(std::make_unique<SpinDeadlock>());
+}
+
+TEST(Stacktrace, TestSleep) {
+ TestStacktrace(std::make_unique<SleepDeadlock>());
+}
+
+// Stack traces originating from kernel space does not include user space stack
+// traces for ARM 32.
+#ifdef WEBRTC_ARCH_ARM64
+
+TEST(Stacktrace, TestRtcEvent) {
+ TestStacktrace(std::make_unique<RtcEventDeadlock>());
+}
+
+TEST(Stacktrace, TestRtcCriticalSection) {
+ TestStacktrace(std::make_unique<RtcCriticalSectionDeadlock>());
+}
+
+#endif
+
+TEST(Stacktrace, TestRtcEventDeadlockDetection) {
+ // Start looking for the expected log output.
+ LookoutLogSink sink(/*look_for=*/"Probable deadlock");
+ rtc::LogMessage::AddLogToStream(&sink, rtc::LS_WARNING);
+
+ // Start a thread that waits for an event.
+ rtc::Event ev;
+ auto thread = rtc::PlatformThread::SpawnJoinable(
+ [&ev] { ev.Wait(rtc::Event::kForever); },
+ "TestRtcEventDeadlockDetection");
+
+ // The message should appear after 3 sec. We'll wait up to 10 sec in an
+ // attempt to not be flaky.
+ EXPECT_TRUE(sink.WhenFound().Wait(10000));
+
+ // Unblock the thread and shut it down.
+ ev.Set();
+ thread.Finalize();
+ rtc::LogMessage::RemoveLogToStream(&sink);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/test_jni_onload.cc b/third_party/libwebrtc/sdk/android/native_unittests/test_jni_onload.cc
new file mode 100644
index 0000000000..dafe49c474
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/test_jni_onload.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "rtc_base/checks.h"
+#include "sdk/android/native_api/base/init.h"
+#include "sdk/android/native_api/jni/java_types.h"
+
+// This is called by the VM when the shared library is first loaded.
+JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) {
+ webrtc::InitAndroid(vm);
+ return JNI_VERSION_1_4;
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/video/video_source_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/video/video_source_unittest.cc
new file mode 100644
index 0000000000..3c4eed1fc3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/video/video_source_unittest.cc
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "api/video/video_sink_interface.h"
+#include "sdk/android/generated_native_unittests_jni/JavaVideoSourceTestHelper_jni.h"
+#include "sdk/android/native_api/video/video_source.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+class TestVideoSink : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ void OnFrame(const VideoFrame& frame) { frames_.push_back(frame); }
+
+ std::vector<VideoFrame> GetFrames() {
+ std::vector<VideoFrame> temp = frames_;
+ frames_.clear();
+ return temp;
+ }
+
+ private:
+ std::vector<VideoFrame> frames_;
+};
+} // namespace
+
+TEST(JavaVideoSourceTest, CreateJavaVideoSource) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+
+ ASSERT_NE(nullptr, video_track_source);
+ EXPECT_NE(nullptr,
+ video_track_source->GetJavaVideoCapturerObserver(env).obj());
+}
+
+TEST(JavaVideoSourceTest, OnFrameCapturedFrameIsDeliveredToSink) {
+ TestVideoSink test_video_sink;
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+ video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ true /* success */);
+ const int width = 20;
+ const int height = 32;
+ const int rotation = 180;
+ const int64_t timestamp = 987654321;
+ jni::Java_JavaVideoSourceTestHelper_deliverFrame(
+ env, width, height, rotation, timestamp,
+ video_track_source->GetJavaVideoCapturerObserver(env));
+
+ std::vector<VideoFrame> frames = test_video_sink.GetFrames();
+ ASSERT_EQ(1u, frames.size());
+ webrtc::VideoFrame frame = frames[0];
+ EXPECT_EQ(width, frame.width());
+ EXPECT_EQ(height, frame.height());
+ EXPECT_EQ(rotation, frame.rotation());
+}
+
+TEST(JavaVideoSourceTest,
+ OnFrameCapturedFrameIsDeliveredToSinkWithPreservedTimestamp) {
+ TestVideoSink test_video_sink;
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, false /* align_timestamps */);
+ video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ true /* success */);
+ const int width = 20;
+ const int height = 32;
+ const int rotation = 180;
+ const int64_t timestamp = 987654321;
+ jni::Java_JavaVideoSourceTestHelper_deliverFrame(
+ env, width, height, rotation, 987654321,
+ video_track_source->GetJavaVideoCapturerObserver(env));
+
+ std::vector<VideoFrame> frames = test_video_sink.GetFrames();
+ ASSERT_EQ(1u, frames.size());
+ webrtc::VideoFrame frame = frames[0];
+ EXPECT_EQ(width, frame.width());
+ EXPECT_EQ(height, frame.height());
+ EXPECT_EQ(rotation, frame.rotation());
+ EXPECT_EQ(timestamp / 1000, frame.timestamp_us());
+}
+
+TEST(JavaVideoSourceTest, CapturerStartedSuccessStateBecomesLive) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ true /* success */);
+
+ EXPECT_EQ(VideoTrackSourceInterface::SourceState::kLive,
+ video_track_source->state());
+}
+
+TEST(JavaVideoSourceTest, CapturerStartedFailureStateBecomesEnded) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ false /* success */);
+
+ EXPECT_EQ(VideoTrackSourceInterface::SourceState::kEnded,
+ video_track_source->state());
+}
+
+TEST(JavaVideoSourceTest, CapturerStoppedStateBecomesEnded) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ true /* success */);
+ jni::Java_JavaVideoSourceTestHelper_stopCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env));
+
+ EXPECT_EQ(VideoTrackSourceInterface::SourceState::kEnded,
+ video_track_source->state());
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java
new file mode 100644
index 0000000000..ad40898e4c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java
@@ -0,0 +1,684 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaFormat;
+import android.os.SystemClock;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.ThreadUtils.ThreadChecker;
+
+/**
+ * Android hardware video decoder.
+ */
+@SuppressWarnings("deprecation")
+// Cannot support API 16 without using deprecated methods.
+// TODO(sakal): Rename to MediaCodecVideoDecoder once the deprecated implementation is removed.
+class AndroidVideoDecoder implements VideoDecoder, VideoSink {
+ private static final String TAG = "AndroidVideoDecoder";
+
+ // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
+ private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
+ private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
+ private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
+ private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
+ private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
+ private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
+
+ // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
+ // this timeout.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+
+ // WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a
+ // long timeout (500 ms) to prevent this from causing the codec to return an error.
+ private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
+
+ // Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds).
+ // If this timeout is exceeded, the output thread will unblock and check if the decoder is still
+ // running. If it is, it will block on dequeue again. Otherwise, it will stop and release the
+ // MediaCodec.
+ private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
+
+ private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
+ private final String codecName;
+ private final VideoCodecMimeType codecType;
+
+ private static class FrameInfo {
+ final long decodeStartTimeMs;
+ final int rotation;
+
+ FrameInfo(long decodeStartTimeMs, int rotation) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.rotation = rotation;
+ }
+ }
+
+ private final BlockingDeque<FrameInfo> frameInfos;
+ private int colorFormat;
+
+ // Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats
+ // those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder
+ // thread and is immutable while the codec is running.
+ @Nullable private Thread outputThread;
+
+ // Checker that ensures work is run on the output thread.
+ private ThreadChecker outputThreadChecker;
+
+ // Checker that ensures work is run on the decoder thread. The decoder thread is owned by the
+ // caller and must be used to call initDecode, decode, and release.
+ private ThreadChecker decoderThreadChecker;
+
+ private volatile boolean running;
+ @Nullable private volatile Exception shutdownException;
+
+ // Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
+ // or the output thread. Accesses should be protected with this lock.
+ private final Object dimensionLock = new Object();
+ private int width;
+ private int height;
+ private int stride;
+ private int sliceHeight;
+
+ // Whether the decoder has finished the first frame. The codec may not change output dimensions
+ // after delivering the first frame. Only accessed on the output thread while the decoder is
+ // running.
+ private boolean hasDecodedFirstFrame;
+ // Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed
+ // on the decoder thread.
+ private boolean keyFrameRequired;
+
+ private final @Nullable EglBase.Context sharedContext;
+ // Valid and immutable while the decoder is running.
+ @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable private Surface surface;
+
+ private static class DecodedTextureMetadata {
+ final long presentationTimestampUs;
+ final Integer decodeTimeMs;
+
+ DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) {
+ this.presentationTimestampUs = presentationTimestampUs;
+ this.decodeTimeMs = decodeTimeMs;
+ }
+ }
+
+ // Metadata for the last frame rendered to the texture.
+ private final Object renderedTextureMetadataLock = new Object();
+ @Nullable private DecodedTextureMetadata renderedTextureMetadata;
+
+ // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid
+ // and immutable while the decoder is running.
+ @Nullable private Callback callback;
+
+ // Valid and immutable while the decoder is running.
+ @Nullable private MediaCodecWrapper codec;
+
+ AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) {
+ if (!isSupportedColorFormat(colorFormat)) {
+ throw new IllegalArgumentException("Unsupported color format: " + colorFormat);
+ }
+ Logging.d(TAG,
+ "ctor name: " + codecName + " type: " + codecType + " color format: " + colorFormat
+ + " context: " + sharedContext);
+ this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
+ this.codecName = codecName;
+ this.codecType = codecType;
+ this.colorFormat = colorFormat;
+ this.sharedContext = sharedContext;
+ this.frameInfos = new LinkedBlockingDeque<>();
+ }
+
+ @Override
+ public VideoCodecStatus initDecode(Settings settings, Callback callback) {
+ this.decoderThreadChecker = new ThreadChecker();
+
+ this.callback = callback;
+ if (sharedContext != null) {
+ surfaceTextureHelper = createSurfaceTextureHelper();
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ surfaceTextureHelper.startListening(this);
+ }
+ return initDecodeInternal(settings.width, settings.height);
+ }
+
+ // Internal variant is used when restarting the codec due to reconfiguration.
+ private VideoCodecStatus initDecodeInternal(int width, int height) {
+ decoderThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG,
+ "initDecodeInternal name: " + codecName + " type: " + codecType + " width: " + width
+ + " height: " + height);
+ if (outputThread != null) {
+ Logging.e(TAG, "initDecodeInternal called while the codec is already running");
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ // Note: it is not necessary to initialize dimensions under the lock, since the output thread
+ // is not running.
+ this.width = width;
+ this.height = height;
+
+ stride = width;
+ sliceHeight = height;
+ hasDecodedFirstFrame = false;
+ keyFrameRequired = true;
+
+ try {
+ codec = mediaCodecWrapperFactory.createByCodecName(codecName);
+ } catch (IOException | IllegalArgumentException | IllegalStateException e) {
+ Logging.e(TAG, "Cannot create media decoder " + codecName);
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
+ if (sharedContext == null) {
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ }
+ codec.configure(format, surface, null, 0);
+ codec.start();
+ } catch (IllegalStateException | IllegalArgumentException e) {
+ Logging.e(TAG, "initDecode failed", e);
+ release();
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+ running = true;
+ outputThread = createOutputThread();
+ outputThread.start();
+
+ Logging.d(TAG, "initDecodeInternal done");
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
+ decoderThreadChecker.checkIsOnValidThread();
+ if (codec == null || callback == null) {
+ Logging.d(TAG, "decode uninitalized, codec: " + (codec != null) + ", callback: " + callback);
+ return VideoCodecStatus.UNINITIALIZED;
+ }
+
+ if (frame.buffer == null) {
+ Logging.e(TAG, "decode() - no input data");
+ return VideoCodecStatus.ERR_PARAMETER;
+ }
+
+ int size = frame.buffer.remaining();
+ if (size == 0) {
+ Logging.e(TAG, "decode() - input buffer empty");
+ return VideoCodecStatus.ERR_PARAMETER;
+ }
+
+ // Load dimensions from shared memory under the dimension lock.
+ final int width;
+ final int height;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ }
+
+ // Check if the resolution changed and reset the codec if necessary.
+ if (frame.encodedWidth * frame.encodedHeight > 0
+ && (frame.encodedWidth != width || frame.encodedHeight != height)) {
+ VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ }
+
+ if (keyFrameRequired) {
+ // Need to process a key frame first.
+ if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
+ Logging.e(TAG, "decode() - key frame required first");
+ return VideoCodecStatus.NO_OUTPUT;
+ }
+ }
+
+ int index;
+ try {
+ index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueInputBuffer failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ if (index < 0) {
+ // Decoder is falling behind. No input buffers available.
+ // The decoder can't simply drop frames; it might lose a key frame.
+ Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind");
+ return VideoCodecStatus.ERROR;
+ }
+
+ ByteBuffer buffer;
+ try {
+ buffer = codec.getInputBuffer(index);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (buffer.capacity() < size) {
+ Logging.e(TAG, "decode() - HW buffer too small");
+ return VideoCodecStatus.ERROR;
+ }
+ buffer.put(frame.buffer);
+
+ frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
+ try {
+ codec.queueInputBuffer(index, 0 /* offset */, size,
+ TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "queueInputBuffer failed", e);
+ frameInfos.pollLast();
+ return VideoCodecStatus.ERROR;
+ }
+ if (keyFrameRequired) {
+ keyFrameRequired = false;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return codecName;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ // TODO(sakal): This is not called on the correct thread but is still called synchronously.
+ // Re-enable the check once this is called on the correct thread.
+ // decoderThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "release");
+ VideoCodecStatus status = releaseInternal();
+ if (surface != null) {
+ releaseSurface();
+ surface = null;
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper.dispose();
+ surfaceTextureHelper = null;
+ }
+ synchronized (renderedTextureMetadataLock) {
+ renderedTextureMetadata = null;
+ }
+ callback = null;
+ frameInfos.clear();
+ return status;
+ }
+
+ // Internal variant is used when restarting the codec due to reconfiguration.
+ private VideoCodecStatus releaseInternal() {
+ if (!running) {
+ Logging.d(TAG, "release: Decoder is not running.");
+ return VideoCodecStatus.OK;
+ }
+ try {
+ // The outputThread actually stops and releases the codec once running is false.
+ running = false;
+ if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ // Log an exception to capture the stack trace and turn it into a TIMEOUT error.
+ Logging.e(TAG, "Media decoder release timeout", new RuntimeException());
+ return VideoCodecStatus.TIMEOUT;
+ }
+ if (shutdownException != null) {
+ // Log the exception and turn it into an error. Wrap the exception in a new exception to
+ // capture both the output thread's stack trace and this thread's stack trace.
+ Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException));
+ shutdownException = null;
+ return VideoCodecStatus.ERROR;
+ }
+ } finally {
+ codec = null;
+ outputThread = null;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
+ decoderThreadChecker.checkIsOnValidThread();
+ VideoCodecStatus status = releaseInternal();
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ return initDecodeInternal(newWidth, newHeight);
+ }
+
+ private Thread createOutputThread() {
+ return new Thread("AndroidVideoDecoder.outputThread") {
+ @Override
+ public void run() {
+ outputThreadChecker = new ThreadChecker();
+ while (running) {
+ deliverDecodedFrame();
+ }
+ releaseCodecOnOutputThread();
+ }
+ };
+ }
+
+ // Visible for testing.
+ protected void deliverDecodedFrame() {
+ outputThreadChecker.checkIsOnValidThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ // Block until an output buffer is available (up to 100 milliseconds). If the timeout is
+ // exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
+ // thread's loop. Blocking here prevents the output thread from busy-waiting while the codec
+ // is idle.
+ int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
+ if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ reformat(codec.getOutputFormat());
+ return;
+ }
+
+ if (index < 0) {
+ Logging.v(TAG, "dequeueOutputBuffer returned " + index);
+ return;
+ }
+
+ FrameInfo frameInfo = frameInfos.poll();
+ Integer decodeTimeMs = null;
+ int rotation = 0;
+ if (frameInfo != null) {
+ decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
+ rotation = frameInfo.rotation;
+ }
+
+ hasDecodedFirstFrame = true;
+
+ if (surfaceTextureHelper != null) {
+ deliverTextureFrame(index, info, rotation, decodeTimeMs);
+ } else {
+ deliverByteFrame(index, info, rotation, decodeTimeMs);
+ }
+
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "deliverDecodedFrame failed", e);
+ }
+ }
+
+ private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
+ final int rotation, final Integer decodeTimeMs) {
+ // Load dimensions from shared memory under the dimension lock.
+ final int width;
+ final int height;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ }
+
+ synchronized (renderedTextureMetadataLock) {
+ if (renderedTextureMetadata != null) {
+ codec.releaseOutputBuffer(index, false);
+ return; // We are still waiting for texture for the previous frame, drop this one.
+ }
+ surfaceTextureHelper.setTextureSize(width, height);
+ surfaceTextureHelper.setFrameRotation(rotation);
+ renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs);
+ codec.releaseOutputBuffer(index, /* render= */ true);
+ }
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ final VideoFrame newFrame;
+ final Integer decodeTimeMs;
+ final long timestampNs;
+ synchronized (renderedTextureMetadataLock) {
+ if (renderedTextureMetadata == null) {
+ throw new IllegalStateException(
+ "Rendered texture metadata was null in onTextureFrameAvailable.");
+ }
+ timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000;
+ decodeTimeMs = renderedTextureMetadata.decodeTimeMs;
+ renderedTextureMetadata = null;
+ }
+ // Change timestamp of frame.
+ final VideoFrame frameWithModifiedTimeStamp =
+ new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs);
+ callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */);
+ }
+
+ private void deliverByteFrame(
+ int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) {
+ // Load dimensions from shared memory under the dimension lock.
+ int width;
+ int height;
+ int stride;
+ int sliceHeight;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ stride = this.stride;
+ sliceHeight = this.sliceHeight;
+ }
+
+ // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
+ // bytes for each of the U and V channels.
+ if (info.size < width * height * 3 / 2) {
+ Logging.e(TAG, "Insufficient output buffer size: " + info.size);
+ return;
+ }
+
+ if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
+ // Some codecs (Exynos) report an incorrect stride. Correct it here.
+ // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
+ // 2 * size / (3 * height).
+ stride = info.size * 2 / (height * 3);
+ }
+
+ ByteBuffer buffer = codec.getOutputBuffer(index);
+ buffer.position(info.offset);
+ buffer.limit(info.offset + info.size);
+ buffer = buffer.slice();
+
+ final VideoFrame.Buffer frameBuffer;
+ if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
+ frameBuffer = copyI420Buffer(buffer, stride, sliceHeight, width, height);
+ } else {
+ // All other supported color formats are NV12.
+ frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height);
+ }
+ codec.releaseOutputBuffer(index, /* render= */ false);
+
+ long presentationTimeNs = info.presentationTimeUs * 1000;
+ VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
+
+ // Note that qp is parsed on the C++ side.
+ callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
+ frame.release();
+ }
+
+ private VideoFrame.Buffer copyNV12ToI420Buffer(
+ ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
+ // toI420 copies the buffer.
+ return new NV12Buffer(width, height, stride, sliceHeight, buffer, null /* releaseCallback */)
+ .toI420();
+ }
+
+ private VideoFrame.Buffer copyI420Buffer(
+ ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
+ if (stride % 2 != 0) {
+ throw new AssertionError("Stride is not divisible by two: " + stride);
+ }
+
+ // Note that the case with odd `sliceHeight` is handled in a special way.
+ // The chroma height contained in the payload is rounded down instead of
+ // up, making it one row less than what we expect in WebRTC. Therefore, we
+ // have to duplicate the last chroma rows for this case. Also, the offset
+ // between the Y plane and the U plane is unintuitive for this case. See
+ // http://bugs.webrtc.org/6651 for more info.
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
+
+ final int uvStride = stride / 2;
+
+ final int yPos = 0;
+ final int yEnd = yPos + stride * height;
+ final int uPos = yPos + stride * sliceHeight;
+ final int uEnd = uPos + uvStride * chromaHeight;
+ final int vPos = uPos + uvStride * sliceHeight / 2;
+ final int vEnd = vPos + uvStride * chromaHeight;
+
+ VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height);
+
+ buffer.limit(yEnd);
+ buffer.position(yPos);
+ copyPlane(
+ buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height);
+
+ buffer.limit(uEnd);
+ buffer.position(uPos);
+ copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(),
+ chromaWidth, chromaHeight);
+ if (sliceHeight % 2 == 1) {
+ buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
+
+ ByteBuffer dataU = frameBuffer.getDataU();
+ dataU.position(frameBuffer.getStrideU() * chromaHeight); // Seek to beginning of last row.
+ dataU.put(buffer); // Copy the last row.
+ }
+
+ buffer.limit(vEnd);
+ buffer.position(vPos);
+ copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(),
+ chromaWidth, chromaHeight);
+ if (sliceHeight % 2 == 1) {
+ buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
+
+ ByteBuffer dataV = frameBuffer.getDataV();
+ dataV.position(frameBuffer.getStrideV() * chromaHeight); // Seek to beginning of last row.
+ dataV.put(buffer); // Copy the last row.
+ }
+
+ return frameBuffer;
+ }
+
+ private void reformat(MediaFormat format) {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Decoder format changed: " + format.toString());
+ final int newWidth;
+ final int newHeight;
+ if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) {
+ newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT)
+ - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT);
+ newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM)
+ - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP);
+ } else {
+ newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
+ newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
+ }
+ // Compare to existing width, height, and save values under the dimension lock.
+ synchronized (dimensionLock) {
+ if (newWidth != width || newHeight != height) {
+ if (hasDecodedFirstFrame) {
+ stopOnOutputThread(new RuntimeException("Unexpected size change. "
+ + "Configured " + width + "*" + height + ". "
+ + "New " + newWidth + "*" + newHeight));
+ return;
+ } else if (newWidth <= 0 || newHeight <= 0) {
+ Logging.w(TAG,
+ "Unexpected format dimensions. Configured " + width + "*" + height + ". "
+ + "New " + newWidth + "*" + newHeight + ". Skip it");
+ return;
+ }
+ width = newWidth;
+ height = newHeight;
+ }
+ }
+
+ // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
+ // color format updates.
+ if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+ colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
+ if (!isSupportedColorFormat(colorFormat)) {
+ stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
+ return;
+ }
+ }
+
+ // Save stride and sliceHeight under the dimension lock.
+ synchronized (dimensionLock) {
+ if (format.containsKey(MEDIA_FORMAT_KEY_STRIDE)) {
+ stride = format.getInteger(MEDIA_FORMAT_KEY_STRIDE);
+ }
+ if (format.containsKey(MEDIA_FORMAT_KEY_SLICE_HEIGHT)) {
+ sliceHeight = format.getInteger(MEDIA_FORMAT_KEY_SLICE_HEIGHT);
+ }
+ Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+ stride = Math.max(width, stride);
+ sliceHeight = Math.max(height, sliceHeight);
+ }
+ }
+
+ private void releaseCodecOnOutputThread() {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Releasing MediaCodec on output thread");
+ try {
+ codec.stop();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder stop failed", e);
+ }
+ try {
+ codec.release();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ // Propagate exceptions caught during release back to the main thread.
+ shutdownException = e;
+ }
+ Logging.d(TAG, "Release on output thread done");
+ }
+
+ private void stopOnOutputThread(Exception e) {
+ outputThreadChecker.checkIsOnValidThread();
+ running = false;
+ shutdownException = e;
+ }
+
+ private boolean isSupportedColorFormat(int colorFormat) {
+ for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) {
+ if (supported == colorFormat) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // Visible for testing.
+ protected SurfaceTextureHelper createSurfaceTextureHelper() {
+ return SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
+ }
+
+ // Visible for testing.
+ // TODO(sakal): Remove once Robolectric commit fa991a0 has been rolled to WebRTC.
+ protected void releaseSurface() {
+ surface.release();
+ }
+
+ // Visible for testing.
+ protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) {
+ return JavaI420Buffer.allocate(width, height);
+ }
+
+ // Visible for testing.
+ protected void copyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
+ YuvHelper.copyPlane(src, srcStride, dst, dstStride, width, height);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
new file mode 100644
index 0000000000..3b5f5d2931
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
+class BaseBitrateAdjuster implements BitrateAdjuster {
+ protected int targetBitrateBps;
+ protected double targetFramerateFps;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ this.targetBitrateBps = targetBitrateBps;
+ this.targetFramerateFps = targetFramerateFps;
+ }
+
+ @Override
+ public void reportEncodedFrame(int size) {
+ // No op.
+ }
+
+ @Override
+ public int getAdjustedBitrateBps() {
+ return targetBitrateBps;
+ }
+
+ @Override
+ public double getAdjustedFramerateFps() {
+ return targetFramerateFps;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java
new file mode 100644
index 0000000000..bfa08bad89
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Object that adjusts the bitrate of a hardware codec. */
+interface BitrateAdjuster {
+ /**
+ * Sets the target bitrate in bits per second and framerate in frames per second.
+ */
+ void setTargets(int targetBitrateBps, double targetFramerateFps);
+
+ /**
+ * Should be used to report the size of an encoded frame to the bitrate adjuster. Use
+ * getAdjustedBitrateBps to get the updated bitrate after calling this method.
+ */
+ void reportEncodedFrame(int size);
+
+ /** Gets the current bitrate. */
+ int getAdjustedBitrateBps();
+
+ /** Gets the current framerate. */
+ double getAdjustedFramerateFps();
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java
new file mode 100644
index 0000000000..9b410ceaef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @CalledByNative is used by the JNI generator to create the necessary JNI
+ * bindings and expose this method to native code.
+ */
+@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
+@Retention(RetentionPolicy.CLASS)
+public @interface CalledByNative {
+ /*
+ * If present, tells which inner class the method belongs to.
+ */
+ public String value() default "";
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java
new file mode 100644
index 0000000000..8a00a7fadb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @CalledByNativeUnchecked is used to generate JNI bindings that do not check for exceptions.
+ * It only makes sense to use this annotation on methods that declare a throws... spec.
+ * However, note that the exception received native side maybe an 'unchecked' (RuntimeExpception)
+ * such as NullPointerException, so the native code should differentiate these cases.
+ * Usage of this should be very rare; where possible handle exceptions in the Java side and use a
+ * return value to indicate success / failure.
+ */
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.CLASS)
+public @interface CalledByNativeUnchecked {
+ /*
+ * If present, tells which inner class the method belongs to.
+ */
+ public String value() default "";
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
new file mode 100644
index 0000000000..a54f7201b2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.os.Handler;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+@SuppressWarnings("deprecation")
+class Camera1Session implements CameraSession {
+ private static final String TAG = "Camera1Session";
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+
+ private static final Histogram camera1StartTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
+ private static final Histogram camera1StopTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
+ private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
+ "WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
+
+ private static enum SessionState { RUNNING, STOPPED }
+
+ private final Handler cameraThreadHandler;
+ private final Events events;
+ private final boolean captureToTexture;
+ private final Context applicationContext;
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final int cameraId;
+ private final Camera camera;
+ private final Camera.CameraInfo info;
+ private final CaptureFormat captureFormat;
+ // Used only for stats. Only used on the camera thread.
+ private final long constructionTimeNs; // Construction time of this class.
+
+ private SessionState state;
+ private boolean firstFrameReported;
+
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ public static void create(final CreateSessionCallback callback, final Events events,
+ final boolean captureToTexture, final Context applicationContext,
+ final SurfaceTextureHelper surfaceTextureHelper, final String cameraName,
+ final int width, final int height, final int framerate) {
+ final long constructionTimeNs = System.nanoTime();
+ Logging.d(TAG, "Open camera " + cameraName);
+ events.onCameraOpening();
+
+ final int cameraId;
+ try {
+ cameraId = Camera1Enumerator.getCameraIndex(cameraName);
+ } catch (IllegalArgumentException e) {
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ final Camera camera;
+ try {
+ camera = Camera.open(cameraId);
+ } catch (RuntimeException e) {
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ if (camera == null) {
+ callback.onFailure(
+ FailureType.ERROR, "Camera.open returned null for camera id = " + cameraId);
+ return;
+ }
+
+ try {
+ camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
+ } catch (IOException | RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ final Camera.CameraInfo info = new Camera.CameraInfo();
+ Camera.getCameraInfo(cameraId, info);
+
+ final CaptureFormat captureFormat;
+ try {
+ final Camera.Parameters parameters = camera.getParameters();
+ captureFormat = findClosestCaptureFormat(parameters, width, height, framerate);
+ final Size pictureSize = findClosestPictureSize(parameters, width, height);
+ updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
+ } catch (RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ if (!captureToTexture) {
+ final int frameSize = captureFormat.frameSize();
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ camera.addCallbackBuffer(buffer.array());
+ }
+ }
+
+ // Calculate orientation manually and send it as CVO instead.
+ try {
+ camera.setDisplayOrientation(0 /* degrees */);
+ } catch (RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
+ surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
+ }
+
+ private static void updateCameraParameters(Camera camera, Camera.Parameters parameters,
+ CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) {
+ final List<String> focusModes = parameters.getSupportedFocusModes();
+
+ parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
+ parameters.setPreviewSize(captureFormat.width, captureFormat.height);
+ parameters.setPictureSize(pictureSize.width, pictureSize.height);
+ if (!captureToTexture) {
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ }
+
+ if (parameters.isVideoStabilizationSupported()) {
+ parameters.setVideoStabilization(true);
+ }
+ if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ }
+ camera.setParameters(parameters);
+ }
+
+ private static CaptureFormat findClosestCaptureFormat(
+ Camera.Parameters parameters, int width, int height, int framerate) {
+ // Find closest supported format for `width` x `height` @ `framerate`.
+ final List<CaptureFormat.FramerateRange> supportedFramerates =
+ Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
+ Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
+
+ final CaptureFormat.FramerateRange fpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
+
+ final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
+ CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
+
+ return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
+ }
+
+ private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) {
+ return CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
+ }
+
+ private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera,
+ Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) {
+ Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
+
+ this.cameraThreadHandler = new Handler();
+ this.events = events;
+ this.captureToTexture = captureToTexture;
+ this.applicationContext = applicationContext;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraId = cameraId;
+ this.camera = camera;
+ this.info = info;
+ this.captureFormat = captureFormat;
+ this.constructionTimeNs = constructionTimeNs;
+
+ surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
+
+ startCapturing();
+ }
+
+ @Override
+ public void stop() {
+ Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
+ checkIsOnCameraThread();
+ if (state != SessionState.STOPPED) {
+ final long stopStartTime = System.nanoTime();
+ stopInternal();
+ final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+ camera1StopTimeMsHistogram.addSample(stopTimeMs);
+ }
+ }
+
+ private void startCapturing() {
+ Logging.d(TAG, "Start capturing");
+ checkIsOnCameraThread();
+
+ state = SessionState.RUNNING;
+
+ camera.setErrorCallback(new Camera.ErrorCallback() {
+ @Override
+ public void onError(int error, Camera camera) {
+ String errorMessage;
+ if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
+ errorMessage = "Camera server died!";
+ } else {
+ errorMessage = "Camera error: " + error;
+ }
+ Logging.e(TAG, errorMessage);
+ stopInternal();
+ if (error == Camera.CAMERA_ERROR_EVICTED) {
+ events.onCameraDisconnected(Camera1Session.this);
+ } else {
+ events.onCameraError(Camera1Session.this, errorMessage);
+ }
+ }
+ });
+
+ if (captureToTexture) {
+ listenForTextureFrames();
+ } else {
+ listenForBytebufferFrames();
+ }
+ try {
+ camera.startPreview();
+ } catch (RuntimeException e) {
+ stopInternal();
+ events.onCameraError(this, e.getMessage());
+ }
+ }
+
+ private void stopInternal() {
+ Logging.d(TAG, "Stop internal");
+ checkIsOnCameraThread();
+ if (state == SessionState.STOPPED) {
+ Logging.d(TAG, "Camera is already stopped");
+ return;
+ }
+
+ state = SessionState.STOPPED;
+ surfaceTextureHelper.stopListening();
+ // Note: stopPreview or other driver code might deadlock. Deadlock in
+ // Camera._stopPreview(Native Method) has been observed on
+ // Nexus 5 (hammerhead), OS version LMY48I.
+ camera.stopPreview();
+ camera.release();
+ events.onCameraClosed(this);
+ Logging.d(TAG, "Stop done");
+ }
+
+ private void listenForTextureFrames() {
+ surfaceTextureHelper.startListening((VideoFrame frame) -> {
+ checkIsOnCameraThread();
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Texture frame captured but camera is no longer running.");
+ return;
+ }
+
+ if (!firstFrameReported) {
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera1StartTimeMsHistogram.addSample(startTimeMs);
+ firstFrameReported = true;
+ }
+
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ final VideoFrame modifiedFrame =
+ new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
+ (TextureBufferImpl) frame.getBuffer(),
+ /* mirror= */ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT,
+ /* rotation= */ 0),
+ /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
+ events.onFrameCaptured(Camera1Session.this, modifiedFrame);
+ modifiedFrame.release();
+ });
+ }
+
+ private void listenForBytebufferFrames() {
+ camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
+ @Override
+ public void onPreviewFrame(final byte[] data, Camera callbackCamera) {
+ checkIsOnCameraThread();
+
+ if (callbackCamera != camera) {
+ Logging.e(TAG, "Callback from a different camera. This should never happen.");
+ return;
+ }
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
+ return;
+ }
+
+ final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+
+ if (!firstFrameReported) {
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera1StartTimeMsHistogram.addSample(startTimeMs);
+ firstFrameReported = true;
+ }
+
+ VideoFrame.Buffer frameBuffer = new NV21Buffer(
+ data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> {
+ if (state == SessionState.RUNNING) {
+ camera.addCallbackBuffer(data);
+ }
+ }));
+ final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
+ events.onFrameCaptured(Camera1Session.this, frame);
+ frame.release();
+ }
+ });
+ }
+
+ private int getFrameOrientation() {
+ int rotation = CameraSession.getDeviceOrientation(applicationContext);
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+ rotation = 360 - rotation;
+ }
+ return (info.orientation + rotation) % 360;
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
new file mode 100644
index 0000000000..d5ee80c73e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
@@ -0,0 +1,428 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Handler;
+import android.util.Range;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+class Camera2Session implements CameraSession {
+ private static final String TAG = "Camera2Session";
+
+ private static final Histogram camera2StartTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
+ private static final Histogram camera2StopTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
+ private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
+ "WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
+
+ private static enum SessionState { RUNNING, STOPPED }
+
+ private final Handler cameraThreadHandler;
+ private final CreateSessionCallback callback;
+ private final Events events;
+ private final Context applicationContext;
+ private final CameraManager cameraManager;
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final String cameraId;
+ private final int width;
+ private final int height;
+ private final int framerate;
+
+ // Initialized at start
+ private CameraCharacteristics cameraCharacteristics;
+ private int cameraOrientation;
+ private boolean isCameraFrontFacing;
+ private int fpsUnitFactor;
+ private CaptureFormat captureFormat;
+
+ // Initialized when camera opens
+ @Nullable private CameraDevice cameraDevice;
+ @Nullable private Surface surface;
+
+ // Initialized when capture session is created
+ @Nullable private CameraCaptureSession captureSession;
+
+ // State
+ private SessionState state = SessionState.RUNNING;
+ private boolean firstFrameReported;
+
+ // Used only for stats. Only used on the camera thread.
+ private final long constructionTimeNs; // Construction time of this class.
+
+ private class CameraStateCallback extends CameraDevice.StateCallback {
+ private String getErrorDescription(int errorCode) {
+ switch (errorCode) {
+ case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
+ return "Camera device has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
+ return "Camera device could not be opened due to a device policy.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
+ return "Camera device is in use already.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
+ return "Camera service has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
+ return "Camera device could not be opened because"
+ + " there are too many other open camera devices.";
+ default:
+ return "Unknown camera error: " + errorCode;
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ checkIsOnCameraThread();
+ final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
+ state = SessionState.STOPPED;
+ stopInternal();
+ if (startFailure) {
+ callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
+ } else {
+ events.onCameraDisconnected(Camera2Session.this);
+ }
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int errorCode) {
+ checkIsOnCameraThread();
+ reportError(getErrorDescription(errorCode));
+ }
+
+ @Override
+ public void onOpened(CameraDevice camera) {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Camera opened.");
+ cameraDevice = camera;
+
+ surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ try {
+ camera.createCaptureSession(
+ Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to create capture session. " + e);
+ return;
+ }
+ }
+
+ @Override
+ public void onClosed(CameraDevice camera) {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Camera device closed.");
+ events.onCameraClosed(Camera2Session.this);
+ }
+ }
+
+ private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
+ @Override
+ public void onConfigureFailed(CameraCaptureSession session) {
+ checkIsOnCameraThread();
+ session.close();
+ reportError("Failed to configure capture session.");
+ }
+
+ @Override
+ public void onConfigured(CameraCaptureSession session) {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "Camera capture session configured.");
+ captureSession = session;
+ try {
+ /*
+ * The viable options for video capture requests are:
+ * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
+ * post-processing.
+ * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
+ * quality.
+ */
+ final CaptureRequest.Builder captureRequestBuilder =
+ cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ // Set auto exposure fps range.
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
+ new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
+ captureFormat.framerate.max / fpsUnitFactor));
+ captureRequestBuilder.set(
+ CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
+ chooseStabilizationMode(captureRequestBuilder);
+ chooseFocusMode(captureRequestBuilder);
+
+ captureRequestBuilder.addTarget(surface);
+ session.setRepeatingRequest(
+ captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to start capture request. " + e);
+ return;
+ }
+
+ surfaceTextureHelper.startListening((VideoFrame frame) -> {
+ checkIsOnCameraThread();
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Texture frame captured but camera is no longer running.");
+ return;
+ }
+
+ if (!firstFrameReported) {
+ firstFrameReported = true;
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera2StartTimeMsHistogram.addSample(startTimeMs);
+ }
+
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ // Also, undo camera orientation, we report it as rotation instead.
+ final VideoFrame modifiedFrame =
+ new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
+ (TextureBufferImpl) frame.getBuffer(),
+ /* mirror= */ isCameraFrontFacing,
+ /* rotation= */ -cameraOrientation),
+ /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
+ events.onFrameCaptured(Camera2Session.this, modifiedFrame);
+ modifiedFrame.release();
+ });
+ Logging.d(TAG, "Camera device successfully started.");
+ callback.onDone(Camera2Session.this);
+ }
+
+ // Prefers optical stabilization over software stabilization if available. Only enables one of
+ // the stabilization modes at a time because having both enabled can cause strange results.
+ private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
+ final int[] availableOpticalStabilization = cameraCharacteristics.get(
+ CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
+ if (availableOpticalStabilization != null) {
+ for (int mode : availableOpticalStabilization) {
+ if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
+ captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+ CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
+ Logging.d(TAG, "Using optical stabilization.");
+ return;
+ }
+ }
+ }
+ // If no optical mode is available, try software.
+ final int[] availableVideoStabilization = cameraCharacteristics.get(
+ CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
+ if (availableVideoStabilization != null) {
+ for (int mode : availableVideoStabilization) {
+ if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
+ captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+ CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
+ Logging.d(TAG, "Using video stabilization.");
+ return;
+ }
+ }
+ }
+ Logging.d(TAG, "Stabilization not available.");
+ }
+
+ private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
+ final int[] availableFocusModes =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ for (int mode : availableFocusModes) {
+ if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
+ captureRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
+ Logging.d(TAG, "Using continuous video auto-focus.");
+ return;
+ }
+ }
+ Logging.d(TAG, "Auto-focus is not available.");
+ }
+ }
+
+ private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
+ @Override
+ public void onCaptureFailed(
+ CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
+ Logging.d(TAG, "Capture failed: " + failure);
+ }
+ }
+
+ public static void create(CreateSessionCallback callback, Events events,
+ Context applicationContext, CameraManager cameraManager,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
+ int framerate) {
+ new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
+ cameraId, width, height, framerate);
+ }
+
+ private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
+ CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
+ int width, int height, int framerate) {
+ Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
+
+ constructionTimeNs = System.nanoTime();
+
+ this.cameraThreadHandler = new Handler();
+ this.callback = callback;
+ this.events = events;
+ this.applicationContext = applicationContext;
+ this.cameraManager = cameraManager;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraId = cameraId;
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+
+ start();
+ }
+
+ private void start() {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "start");
+
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+ } catch (CameraAccessException | IllegalArgumentException e) {
+ reportError("getCameraCharacteristics(): " + e.getMessage());
+ return;
+ }
+ cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+
+ findCaptureFormat();
+
+ if (captureFormat == null) {
+ // findCaptureFormat reports an error already.
+ return;
+ }
+
+ openCamera();
+ }
+
+ private void findCaptureFormat() {
+ checkIsOnCameraThread();
+
+ Range<Integer>[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
+ List<CaptureFormat.FramerateRange> framerateRanges =
+ Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
+ List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
+ Logging.d(TAG, "Available preview sizes: " + sizes);
+ Logging.d(TAG, "Available fps ranges: " + framerateRanges);
+
+ if (framerateRanges.isEmpty() || sizes.isEmpty()) {
+ reportError("No supported capture formats.");
+ return;
+ }
+
+ final CaptureFormat.FramerateRange bestFpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
+
+ final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
+ CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
+
+ captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
+ Logging.d(TAG, "Using capture format: " + captureFormat);
+ }
+
+ @SuppressLint("MissingPermission")
+ private void openCamera() {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Opening camera " + cameraId);
+ events.onCameraOpening();
+
+ try {
+ cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
+ } catch (CameraAccessException | IllegalArgumentException | SecurityException e) {
+ reportError("Failed to open camera: " + e);
+ return;
+ }
+ }
+
+ @Override
+ public void stop() {
+ Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
+ checkIsOnCameraThread();
+ if (state != SessionState.STOPPED) {
+ final long stopStartTime = System.nanoTime();
+ state = SessionState.STOPPED;
+ stopInternal();
+ final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+ camera2StopTimeMsHistogram.addSample(stopTimeMs);
+ }
+ }
+
+ private void stopInternal() {
+ Logging.d(TAG, "Stop internal");
+ checkIsOnCameraThread();
+
+ surfaceTextureHelper.stopListening();
+
+ if (captureSession != null) {
+ captureSession.close();
+ captureSession = null;
+ }
+ if (surface != null) {
+ surface.release();
+ surface = null;
+ }
+ if (cameraDevice != null) {
+ cameraDevice.close();
+ cameraDevice = null;
+ }
+
+ Logging.d(TAG, "Stop done");
+ }
+
+ private void reportError(String error) {
+ checkIsOnCameraThread();
+ Logging.e(TAG, "Error: " + error);
+
+ final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
+ state = SessionState.STOPPED;
+ stopInternal();
+ if (startFailure) {
+ callback.onFailure(FailureType.ERROR, error);
+ } else {
+ events.onCameraError(this, error);
+ }
+ }
+
+ private int getFrameOrientation() {
+ int rotation = CameraSession.getDeviceOrientation(applicationContext);
+ if (!isCameraFrontFacing) {
+ rotation = 360 - rotation;
+ }
+ return (cameraOrientation + rotation) % 360;
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
new file mode 100644
index 0000000000..1922a529e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
@@ -0,0 +1,458 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.Looper;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+abstract class CameraCapturer implements CameraVideoCapturer {
+ enum SwitchState {
+ IDLE, // No switch requested.
+ PENDING, // Waiting for previous capture session to open.
+ IN_PROGRESS, // Waiting for new switched capture session to start.
+ }
+
+ private static final String TAG = "CameraCapturer";
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private final static int OPEN_CAMERA_TIMEOUT = 10000;
+
+ private final CameraEnumerator cameraEnumerator;
+ private final CameraEventsHandler eventsHandler;
+ private final Handler uiThreadHandler;
+
+ @Nullable
+ private final CameraSession.CreateSessionCallback createSessionCallback =
+ new CameraSession.CreateSessionCallback() {
+ @Override
+ public void onDone(CameraSession session) {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "Create session done. Switch state: " + switchState);
+ uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
+ synchronized (stateLock) {
+ capturerObserver.onCapturerStarted(true /* success */);
+ sessionOpening = false;
+ currentSession = session;
+ cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
+ firstFrameObserved = false;
+ stateLock.notifyAll();
+
+ if (switchState == SwitchState.IN_PROGRESS) {
+ switchState = SwitchState.IDLE;
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
+ switchEventsHandler = null;
+ }
+ } else if (switchState == SwitchState.PENDING) {
+ String selectedCameraName = pendingCameraName;
+ pendingCameraName = null;
+ switchState = SwitchState.IDLE;
+ switchCameraInternal(switchEventsHandler, selectedCameraName);
+ }
+ }
+ }
+
+ @Override
+ public void onFailure(CameraSession.FailureType failureType, String error) {
+ checkIsOnCameraThread();
+ uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
+ synchronized (stateLock) {
+ capturerObserver.onCapturerStarted(false /* success */);
+ openAttemptsRemaining--;
+
+ if (openAttemptsRemaining <= 0) {
+ Logging.w(TAG, "Opening camera failed, passing: " + error);
+ sessionOpening = false;
+ stateLock.notifyAll();
+
+ if (switchState != SwitchState.IDLE) {
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(error);
+ switchEventsHandler = null;
+ }
+ switchState = SwitchState.IDLE;
+ }
+
+ if (failureType == CameraSession.FailureType.DISCONNECTED) {
+ eventsHandler.onCameraDisconnected();
+ } else {
+ eventsHandler.onCameraError(error);
+ }
+ } else {
+ Logging.w(TAG, "Opening camera failed, retry: " + error);
+ createSessionInternal(OPEN_CAMERA_DELAY_MS);
+ }
+ }
+ }
+ };
+
+ @Nullable
+ private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
+ @Override
+ public void onCameraOpening() {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (currentSession != null) {
+ Logging.w(TAG, "onCameraOpening while session was open.");
+ return;
+ }
+ eventsHandler.onCameraOpening(cameraName);
+ }
+ }
+
+ @Override
+ public void onCameraError(CameraSession session, String error) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onCameraError from another session: " + error);
+ return;
+ }
+ eventsHandler.onCameraError(error);
+ stopCapture();
+ }
+ }
+
+ @Override
+ public void onCameraDisconnected(CameraSession session) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onCameraDisconnected from another session.");
+ return;
+ }
+ eventsHandler.onCameraDisconnected();
+ stopCapture();
+ }
+ }
+
+ @Override
+ public void onCameraClosed(CameraSession session) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession && currentSession != null) {
+ Logging.d(TAG, "onCameraClosed from another session.");
+ return;
+ }
+ eventsHandler.onCameraClosed();
+ }
+ }
+
+ @Override
+ public void onFrameCaptured(CameraSession session, VideoFrame frame) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onFrameCaptured from another session.");
+ return;
+ }
+ if (!firstFrameObserved) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameObserved = true;
+ }
+ cameraStatistics.addFrame();
+ capturerObserver.onFrameCaptured(frame);
+ }
+ }
+ };
+
+ private final Runnable openCameraTimeoutRunnable = new Runnable() {
+ @Override
+ public void run() {
+ eventsHandler.onCameraError("Camera failed to start within timeout.");
+ }
+ };
+
+ // Initialized on initialize
+ // -------------------------
+ private Handler cameraThreadHandler;
+ private Context applicationContext;
+ private org.webrtc.CapturerObserver capturerObserver;
+ private SurfaceTextureHelper surfaceHelper;
+
+ private final Object stateLock = new Object();
+ private boolean sessionOpening; /* guarded by stateLock */
+ @Nullable private CameraSession currentSession; /* guarded by stateLock */
+ private String cameraName; /* guarded by stateLock */
+ private String pendingCameraName; /* guarded by stateLock */
+ private int width; /* guarded by stateLock */
+ private int height; /* guarded by stateLock */
+ private int framerate; /* guarded by stateLock */
+ private int openAttemptsRemaining; /* guarded by stateLock */
+ private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
+ @Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
+ // Valid from onDone call until stopCapture, otherwise null.
+ @Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
+ private boolean firstFrameObserved; /* guarded by stateLock */
+
+ public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
+ CameraEnumerator cameraEnumerator) {
+ if (eventsHandler == null) {
+ eventsHandler = new CameraEventsHandler() {
+ @Override
+ public void onCameraError(String errorDescription) {}
+ @Override
+ public void onCameraDisconnected() {}
+ @Override
+ public void onCameraFreezed(String errorDescription) {}
+ @Override
+ public void onCameraOpening(String cameraName) {}
+ @Override
+ public void onFirstFrameAvailable() {}
+ @Override
+ public void onCameraClosed() {}
+ };
+ }
+
+ this.eventsHandler = eventsHandler;
+ this.cameraEnumerator = cameraEnumerator;
+ this.cameraName = cameraName;
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+ uiThreadHandler = new Handler(Looper.getMainLooper());
+
+ if (deviceNames.isEmpty()) {
+ throw new RuntimeException("No cameras attached.");
+ }
+ if (!deviceNames.contains(this.cameraName)) {
+ throw new IllegalArgumentException(
+ "Camera name " + this.cameraName + " does not match any known camera device.");
+ }
+ }
+
+ @Override
+ public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ org.webrtc.CapturerObserver capturerObserver) {
+ this.applicationContext = applicationContext;
+ this.capturerObserver = capturerObserver;
+ this.surfaceHelper = surfaceTextureHelper;
+ this.cameraThreadHandler = surfaceTextureHelper.getHandler();
+ }
+
+ @Override
+ public void startCapture(int width, int height, int framerate) {
+ Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
+ if (applicationContext == null) {
+ throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
+ }
+
+ synchronized (stateLock) {
+ if (sessionOpening || currentSession != null) {
+ Logging.w(TAG, "Session already open");
+ return;
+ }
+
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+
+ sessionOpening = true;
+ openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
+ createSessionInternal(0);
+ }
+ }
+
+ private void createSessionInternal(int delayMs) {
+ uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
+ cameraThreadHandler.postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
+ surfaceHelper, cameraName, width, height, framerate);
+ }
+ }, delayMs);
+ }
+
+ @Override
+ public void stopCapture() {
+ Logging.d(TAG, "Stop capture");
+
+ synchronized (stateLock) {
+ while (sessionOpening) {
+ Logging.d(TAG, "Stop capture: Waiting for session to open");
+ try {
+ stateLock.wait();
+ } catch (InterruptedException e) {
+ Logging.w(TAG, "Stop capture interrupted while waiting for the session to open.");
+ Thread.currentThread().interrupt();
+ return;
+ }
+ }
+
+ if (currentSession != null) {
+ Logging.d(TAG, "Stop capture: Nulling session");
+ cameraStatistics.release();
+ cameraStatistics = null;
+ final CameraSession oldSession = currentSession;
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ oldSession.stop();
+ }
+ });
+ currentSession = null;
+ capturerObserver.onCapturerStopped();
+ } else {
+ Logging.d(TAG, "Stop capture: No session open");
+ }
+ }
+
+ Logging.d(TAG, "Stop capture done");
+ }
+
+ @Override
+ public void changeCaptureFormat(int width, int height, int framerate) {
+ Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
+ synchronized (stateLock) {
+ stopCapture();
+ startCapture(width, height, framerate);
+ }
+ }
+
+ @Override
+ public void dispose() {
+ Logging.d(TAG, "dispose");
+ stopCapture();
+ }
+
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
+ Logging.d(TAG, "switchCamera");
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+
+ if (deviceNames.size() < 2) {
+ reportCameraSwitchError("No camera to switch to.", switchEventsHandler);
+ return;
+ }
+
+ int cameraNameIndex = deviceNames.indexOf(cameraName);
+ String cameraName = deviceNames.get((cameraNameIndex + 1) % deviceNames.size());
+ switchCameraInternal(switchEventsHandler, cameraName);
+ }
+ });
+ }
+
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler, final String cameraName) {
+ Logging.d(TAG, "switchCamera");
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ switchCameraInternal(switchEventsHandler, cameraName);
+ }
+ });
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return false;
+ }
+
+ public void printStackTrace() {
+ Thread cameraThread = null;
+ if (cameraThreadHandler != null) {
+ cameraThread = cameraThreadHandler.getLooper().getThread();
+ }
+ if (cameraThread != null) {
+ StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
+ if (cameraStackTrace.length > 0) {
+ Logging.d(TAG, "CameraCapturer stack trace:");
+ for (StackTraceElement traceElem : cameraStackTrace) {
+ Logging.d(TAG, traceElem.toString());
+ }
+ }
+ }
+ }
+
+ private void reportCameraSwitchError(
+ String error, @Nullable CameraSwitchHandler switchEventsHandler) {
+ Logging.e(TAG, error);
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(error);
+ }
+ }
+
+ private void switchCameraInternal(
+ @Nullable final CameraSwitchHandler switchEventsHandler, final String selectedCameraName) {
+ Logging.d(TAG, "switchCamera internal");
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+
+ if (!deviceNames.contains(selectedCameraName)) {
+ reportCameraSwitchError("Attempted to switch to unknown camera device " + selectedCameraName,
+ switchEventsHandler);
+ return;
+ }
+
+ synchronized (stateLock) {
+ if (switchState != SwitchState.IDLE) {
+ reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
+ return;
+ }
+ if (!sessionOpening && currentSession == null) {
+ reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
+ return;
+ }
+
+ this.switchEventsHandler = switchEventsHandler;
+ if (sessionOpening) {
+ switchState = SwitchState.PENDING;
+ pendingCameraName = selectedCameraName;
+ return;
+ } else {
+ switchState = SwitchState.IN_PROGRESS;
+ }
+
+ Logging.d(TAG, "switchCamera: Stopping session");
+ cameraStatistics.release();
+ cameraStatistics = null;
+ final CameraSession oldSession = currentSession;
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ oldSession.stop();
+ }
+ });
+ currentSession = null;
+
+ cameraName = selectedCameraName;
+
+ sessionOpening = true;
+ openAttemptsRemaining = 1;
+ createSessionInternal(0);
+ }
+ Logging.d(TAG, "switchCamera done");
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ Logging.e(TAG, "Check is on camera thread failed.");
+ throw new RuntimeException("Not on camera thread.");
+ }
+ }
+
+ protected String getCameraName() {
+ synchronized (stateLock) {
+ return cameraName;
+ }
+ }
+
+ abstract protected void createCameraSession(
+ CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
+ Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
+ int width, int height, int framerate);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java
new file mode 100644
index 0000000000..8d137854d8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.Matrix;
+import android.view.WindowManager;
+import android.view.Surface;
+
+interface CameraSession {
+ enum FailureType { ERROR, DISCONNECTED }
+
+ // Callbacks are fired on the camera thread.
+ interface CreateSessionCallback {
+ void onDone(CameraSession session);
+ void onFailure(FailureType failureType, String error);
+ }
+
+ // Events are fired on the camera thread.
+ interface Events {
+ void onCameraOpening();
+ void onCameraError(CameraSession session, String error);
+ void onCameraDisconnected(CameraSession session);
+ void onCameraClosed(CameraSession session);
+ void onFrameCaptured(CameraSession session, VideoFrame frame);
+ }
+
+ /**
+ * Stops the capture. Waits until no more calls to capture observer will be made.
+ * If waitCameraStop is true, also waits for the camera to stop.
+ */
+ void stop();
+
+ static int getDeviceOrientation(Context context) {
+ final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+ switch (wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ return 90;
+ case Surface.ROTATION_180:
+ return 180;
+ case Surface.ROTATION_270:
+ return 270;
+ case Surface.ROTATION_0:
+ default:
+ return 0;
+ }
+ }
+
+ static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
+ TextureBufferImpl buffer, boolean mirror, int rotation) {
+ final Matrix transformMatrix = new Matrix();
+ // Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture.
+ transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f);
+ if (mirror) {
+ transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f);
+ }
+ transformMatrix.preRotate(rotation);
+ transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f);
+
+ // The width and height are not affected by rotation since Camera2Session has set them to the
+ // value they should be after undoing the rotation.
+ return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
new file mode 100644
index 0000000000..96a15bbfe1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
+ * bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
+ * target bitrate by unacceptable margins.
+ */
+class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
+ // Change the bitrate at most once every three seconds.
+ private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
+ // Maximum bitrate adjustment scale - no more than 4 times.
+ private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
+ // Amount of adjustment steps to reach maximum scale.
+ private static final int BITRATE_ADJUSTMENT_STEPS = 20;
+
+ private static final double BITS_PER_BYTE = 8.0;
+
+ // How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
+ private double deviationBytes;
+ private double timeSinceLastAdjustmentMs;
+ private int bitrateAdjustmentScaleExp;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
+ // Rescale the accumulator level if the accumulator max decreases
+ deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
+ }
+ super.setTargets(targetBitrateBps, targetFramerateFps);
+ }
+
+ @Override
+ public void reportEncodedFrame(int size) {
+ if (targetFramerateFps == 0) {
+ return;
+ }
+
+ // Accumulate the difference between actual and expected frame sizes.
+ double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFramerateFps;
+ deviationBytes += (size - expectedBytesPerFrame);
+ timeSinceLastAdjustmentMs += 1000.0 / targetFramerateFps;
+
+ // Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
+ // shortfall of the target.
+ double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
+
+ // Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
+ // bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
+ double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
+ deviationBytes = Math.min(deviationBytes, deviationCap);
+ deviationBytes = Math.max(deviationBytes, -deviationCap);
+
+ // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
+ // from the target value.
+ if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
+ return;
+ }
+
+ if (deviationBytes > deviationThresholdBytes) {
+ // Encoder generates too high bitrate - need to reduce the scale.
+ int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
+ bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
+ // Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
+ // This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
+ bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
+ deviationBytes = deviationThresholdBytes;
+ } else if (deviationBytes < -deviationThresholdBytes) {
+ // Encoder generates too low bitrate - need to increase the scale.
+ int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
+ bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
+ // Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
+ // This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
+ bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
+ deviationBytes = -deviationThresholdBytes;
+ }
+ timeSinceLastAdjustmentMs = 0;
+ }
+
+ private double getBitrateAdjustmentScale() {
+ return Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
+ (double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS);
+ }
+
+ @Override
+ public int getAdjustedBitrateBps() {
+ return (int) (targetBitrateBps * getBitrateAdjustmentScale());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java
new file mode 100644
index 0000000000..254a17c750
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java
@@ -0,0 +1,365 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.GLException;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import androidx.annotation.Nullable;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+class EglBase10Impl implements EglBase10 {
+ private static final String TAG = "EglBase10Impl";
+ // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ private final EGL10 egl;
+ private EGLContext eglContext;
+ @Nullable private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+ // EGL wrapper for an actual EGLContext.
+ private static class Context implements EglBase10.Context {
+ private final EGL10 egl;
+ private final EGLContext eglContext;
+ private final EGLConfig eglContextConfig;
+
+ @Override
+ public EGLContext getRawContext() {
+ return eglContext;
+ }
+
+ @Override
+ public long getNativeEglContext() {
+ EGLContext previousContext = egl.eglGetCurrentContext();
+ EGLDisplay currentDisplay = egl.eglGetCurrentDisplay();
+ EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL10.EGL_DRAW);
+ EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL10.EGL_READ);
+ EGLSurface tempEglSurface = null;
+
+ if (currentDisplay == EGL10.EGL_NO_DISPLAY) {
+ currentDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ }
+
+ try {
+ if (previousContext != eglContext) {
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, 1, EGL10.EGL_HEIGHT, 1, EGL10.EGL_NONE};
+ tempEglSurface =
+ egl.eglCreatePbufferSurface(currentDisplay, eglContextConfig, surfaceAttribs);
+ if (!egl.eglMakeCurrent(currentDisplay, tempEglSurface, tempEglSurface, eglContext)) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to make temporary EGL surface active: " + egl.eglGetError());
+ }
+ }
+
+ return nativeGetCurrentNativeEGLContext();
+ } finally {
+ if (tempEglSurface != null) {
+ egl.eglMakeCurrent(
+ currentDisplay, previousDrawSurface, previousReadSurface, previousContext);
+ egl.eglDestroySurface(currentDisplay, tempEglSurface);
+ }
+ }
+ }
+
+ public Context(EGL10 egl, EGLContext eglContext, EGLConfig eglContextConfig) {
+ this.egl = egl;
+ this.eglContext = eglContext;
+ this.eglContextConfig = eglContextConfig;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) {
+ this.egl = (EGL10) EGLContext.getEGL();
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(egl, eglDisplay, configAttributes);
+ final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
+ Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
+ }
+
+ @Override
+ public void createSurface(Surface surface) {
+ /**
+ * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+ * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+ * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+ */
+ class FakeSurfaceHolder implements SurfaceHolder {
+ private final Surface surface;
+
+ FakeSurfaceHolder(Surface surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ public void addCallback(Callback callback) {}
+
+ @Override
+ public void removeCallback(Callback callback) {}
+
+ @Override
+ public boolean isCreating() {
+ return false;
+ }
+
+ @Deprecated
+ @Override
+ public void setType(int i) {}
+
+ @Override
+ public void setFixedSize(int i, int i2) {}
+
+ @Override
+ public void setSizeFromLayout() {}
+
+ @Override
+ public void setFormat(int i) {}
+
+ @Override
+ public void setKeepScreenOn(boolean b) {}
+
+ @Nullable
+ @Override
+ public Canvas lockCanvas() {
+ return null;
+ }
+
+ @Nullable
+ @Override
+ public Canvas lockCanvas(Rect rect) {
+ return null;
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas) {}
+
+ @Nullable
+ @Override
+ public Rect getSurfaceFrame() {
+ return null;
+ }
+
+ @Override
+ public Surface getSurface() {
+ return surface;
+ }
+ }
+
+ createSurfaceInternal(new FakeSurfaceHolder(surface));
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+
+ @Override
+ public org.webrtc.EglBase.Context getEglBaseContext() {
+ return new Context(egl, eglContext, eglConfig);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ egl.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new GLException(egl.eglGetError(),
+ "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new GLException(egl.eglGetError(),
+ "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ egl.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ @Override
+ public void swapBuffers(long timeStampNs) {
+ // Setting presentation time is not supported for EGL 1.0.
+ swapBuffers();
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new GLException(egl.eglGetError(),
+ "Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new GLException(egl.eglGetError(),
+ "Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGL10 egl, EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new GLException(
+ egl.eglGetError(), "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLContext createEglContext(@Nullable EGLContext sharedContext, EGLDisplay eglDisplay,
+ EGLConfig eglConfig, int openGlesVersion) {
+ if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE};
+ EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+ }
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ return eglContext;
+ }
+
+ private static native long nativeGetCurrentNativeEGLContext();
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java
new file mode 100644
index 0000000000..caf45b091e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java
@@ -0,0 +1,271 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.opengl.GLException;
+import android.os.Build;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
+class EglBase14Impl implements EglBase14 {
+ private static final String TAG = "EglBase14Impl";
+ private EGLContext eglContext;
+ @Nullable private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
+ public static class Context implements EglBase14.Context {
+ private final EGLContext egl14Context;
+
+ @Override
+ public EGLContext getRawContext() {
+ return egl14Context;
+ }
+
+ @Override
+ public long getNativeEglContext() {
+ return egl14Context.getNativeHandle();
+ }
+
+ public Context(android.opengl.EGLContext eglContext) {
+ this.egl14Context = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ // `sharedContext` may be null.
+ public EglBase14Impl(EGLContext sharedContext, int[] configAttributes) {
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
+ Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
+ }
+
+ // Create EGLSurface from the Android Surface.
+ @Override
+ public void createSurface(Surface surface) {
+ createSurfaceInternal(surface);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either Surface or SurfaceTexture.
+ private void createSurfaceInternal(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+
+ @Override
+ public Context getEglBaseContext() {
+ return new Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL14.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ synchronized (EglBase.lock) {
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ }
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ @Override
+ public void swapBuffers(long timeStampNs) {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ // See
+ // https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+ EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new GLException(EGL14.eglGetError(),
+ "Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new GLException(EGL14.eglGetError(),
+ "Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(
+ eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(@Nullable EGLContext sharedContext,
+ EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) {
+ if (sharedContext != null && sharedContext == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL14.EGL_NONE};
+ EGLContext rootContext = sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+ }
+ if (eglContext == EGL14.EGL_NO_CONTEXT) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ return eglContext;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java
new file mode 100644
index 0000000000..fe9481e182
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Empty class for use in libjingle_peerconnection_java because all targets require at least one
+ * Java file.
+ */
+class Empty {}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java
new file mode 100644
index 0000000000..e28b7b5a26
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with
+ * hardware codecs that assume the framerate never changes.
+ */
+class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
+ private static final int DEFAULT_FRAMERATE_FPS = 30;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ // Keep frame rate unchanged and adjust bit rate.
+ this.targetFramerateFps = DEFAULT_FRAMERATE_FPS;
+ this.targetBitrateBps = (int) (targetBitrateBps * DEFAULT_FRAMERATE_FPS / targetFramerateFps);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java
new file mode 100644
index 0000000000..34144e2f75
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java
@@ -0,0 +1,281 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import androidx.annotation.Nullable;
+import java.nio.FloatBuffer;
+
+/**
+ * Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input
+ * sources (OES, RGB, or YUV) using a generic fragment shader as input. The generic fragment shader
+ * should sample pixel values from the function "sample" that will be provided by this class and
+ * provides an abstraction for the input source type (OES, RGB, or YUV). The texture coordinate
+ * variable name will be "tc" and the texture matrix in the vertex shader will be "tex_mat". The
+ * simplest possible generic shader that just draws pixel from the frame unmodified looks like:
+ * void main() {
+ * gl_FragColor = sample(tc);
+ * }
+ * This class covers the cases for most simple shaders and generates the necessary boiler plate.
+ * Advanced shaders can always implement RendererCommon.GlDrawer directly.
+ */
+class GlGenericDrawer implements RendererCommon.GlDrawer {
+ /**
+ * The different shader types representing different input sources. YUV here represents three
+ * separate Y, U, V textures.
+ */
+ public static enum ShaderType { OES, RGB, YUV }
+
+ /**
+ * The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set
+ * uniform variables in the shader before a frame is drawn.
+ */
+ public static interface ShaderCallbacks {
+ /**
+ * This callback is called when a new shader has been compiled and created. It will be called
+ * for the first frame as well as when the shader type is changed. This callback can be used to
+ * do custom initialization of the shader that only needs to happen once.
+ */
+ void onNewShader(GlShader shader);
+
+ /**
+ * This callback is called before rendering a frame. It can be used to do custom preparation of
+ * the shader that needs to happen every frame.
+ */
+ void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight);
+ }
+
+ private static final String INPUT_VERTEX_COORDINATE_NAME = "in_pos";
+ private static final String INPUT_TEXTURE_COORDINATE_NAME = "in_tc";
+ private static final String TEXTURE_MATRIX_NAME = "tex_mat";
+ private static final String DEFAULT_VERTEX_SHADER_STRING = "varying vec2 tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "uniform mat4 tex_mat;\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " tc = (tex_mat * in_tc).xy;\n"
+ + "}\n";
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1)
+ // is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) {
+ final StringBuilder stringBuilder = new StringBuilder();
+ if (shaderType == ShaderType.OES) {
+ stringBuilder.append("#extension GL_OES_EGL_image_external : require\n");
+ }
+ stringBuilder.append("precision mediump float;\n");
+ stringBuilder.append("varying vec2 tc;\n");
+
+ if (shaderType == ShaderType.YUV) {
+ stringBuilder.append("uniform sampler2D y_tex;\n");
+ stringBuilder.append("uniform sampler2D u_tex;\n");
+ stringBuilder.append("uniform sampler2D v_tex;\n");
+
+ // Add separate function for sampling texture.
+ // yuv_to_rgb_mat is inverse of the matrix defined in YuvConverter.
+ stringBuilder.append("vec4 sample(vec2 p) {\n");
+ stringBuilder.append(" float y = texture2D(y_tex, p).r * 1.16438;\n");
+ stringBuilder.append(" float u = texture2D(u_tex, p).r;\n");
+ stringBuilder.append(" float v = texture2D(v_tex, p).r;\n");
+ stringBuilder.append(" return vec4(y + 1.59603 * v - 0.874202,\n");
+ stringBuilder.append(" y - 0.391762 * u - 0.812968 * v + 0.531668,\n");
+ stringBuilder.append(" y + 2.01723 * u - 1.08563, 1);\n");
+ stringBuilder.append("}\n");
+ stringBuilder.append(genericFragmentSource);
+ } else {
+ final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D";
+ stringBuilder.append("uniform ").append(samplerName).append(" tex;\n");
+
+ // Update the sampling function in-place.
+ stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, "));
+ }
+
+ return stringBuilder.toString();
+ }
+
+ private final String genericFragmentSource;
+ private final String vertexShader;
+ private final ShaderCallbacks shaderCallbacks;
+ @Nullable private ShaderType currentShaderType;
+ @Nullable private GlShader currentShader;
+ private int inPosLocation;
+ private int inTcLocation;
+ private int texMatrixLocation;
+
+ public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks);
+ }
+
+ public GlGenericDrawer(
+ String vertexShader, String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ this.vertexShader = vertexShader;
+ this.genericFragmentSource = genericFragmentSource;
+ this.shaderCallbacks = shaderCallbacks;
+ }
+
+ // Visible for testing.
+ GlShader createShader(ShaderType shaderType) {
+ return new GlShader(
+ vertexShader, createFragmentShaderString(genericFragmentSource, shaderType));
+ }
+
+ /**
+ * Draw an OES texture frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ @Override
+ public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
+ // Draw the texture.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+
+ /**
+ * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
+ * are allocated at the first call to this function.
+ */
+ @Override
+ public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ // Draw the texture.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ /**
+ * Draw a YUV frame with specified texture transformation matrix. Required resources are allocated
+ * at the first call to this function.
+ */
+ @Override
+ public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ }
+ // Draw the textures.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the textures as a precaution.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+ }
+
+ private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth,
+ int frameHeight, int viewportWidth, int viewportHeight) {
+ final GlShader shader;
+ if (shaderType.equals(currentShaderType)) {
+ // Same shader type as before, reuse exising shader.
+ shader = currentShader;
+ } else {
+ // Allocate new shader.
+ currentShaderType = null;
+ if (currentShader != null) {
+ currentShader.release();
+ currentShader = null;
+ }
+
+ shader = createShader(shaderType);
+ currentShaderType = shaderType;
+ currentShader = shader;
+
+ shader.useProgram();
+ // Set input texture units.
+ if (shaderType == ShaderType.YUV) {
+ GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
+ } else {
+ GLES20.glUniform1i(shader.getUniformLocation("tex"), 0);
+ }
+
+ GlUtil.checkNoGLES2Error("Create shader");
+ shaderCallbacks.onNewShader(shader);
+ texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME);
+ inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME);
+ inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME);
+ }
+
+ shader.useProgram();
+
+ // Upload the vertex coordinates.
+ GLES20.glEnableVertexAttribArray(inPosLocation);
+ GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2,
+ /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
+ FULL_RECTANGLE_BUFFER);
+
+ // Upload the texture coordinates.
+ GLES20.glEnableVertexAttribArray(inTcLocation);
+ GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2,
+ /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
+ FULL_RECTANGLE_TEXTURE_BUFFER);
+
+ // Upload the texture transformation matrix.
+ GLES20.glUniformMatrix4fv(
+ texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */);
+
+ // Do custom per-frame shader preparation.
+ shaderCallbacks.onPrepareShader(
+ shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ GlUtil.checkNoGLES2Error("Prepare shader");
+ }
+
+ /**
+ * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
+ */
+ @Override
+ public void release() {
+ if (currentShader != null) {
+ currentShader.release();
+ currentShader = null;
+ currentShaderType = null;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java
new file mode 100644
index 0000000000..abb79c6582
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Map;
+import java.util.HashMap;
+
+/** Container for static helper functions related to dealing with H264 codecs. */
+class H264Utils {
+ public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
+ public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
+ public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
+
+ public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0";
+ public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
+ public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
+ public static final String H264_CONSTRAINED_HIGH_3_1 =
+ H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
+ public static final String H264_CONSTRAINED_BASELINE_3_1 =
+ H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
+
+ public static Map<String, String> getDefaultH264Params(boolean isHighProfile) {
+ final Map<String, String> params = new HashMap<>();
+ params.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1");
+ params.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1");
+ params.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID,
+ isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1
+ : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
+ return params;
+ }
+
+ public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC =
+ new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false));
+ public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC =
+ new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true));
+
+ public static boolean isSameH264Profile(
+ Map<String, String> params1, Map<String, String> params2) {
+ return nativeIsSameH264Profile(params1, params2);
+ }
+
+ private static native boolean nativeIsSameH264Profile(
+ Map<String, String> params1, Map<String, String> params2);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
new file mode 100644
index 0000000000..42a3ccfbfd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
@@ -0,0 +1,763 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Bundle;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.ThreadUtils.ThreadChecker;
+
+/**
+ * Android hardware video encoder.
+ */
+class HardwareVideoEncoder implements VideoEncoder {
+ private static final String TAG = "HardwareVideoEncoder";
+
+ // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
+ // in OMX_Video.h
+ private static final int VIDEO_ControlRateConstant = 2;
+ // Key associated with the bitrate control mode value (above). Not present as a MediaFormat
+ // constant until API level 21.
+ private static final String KEY_BITRATE_MODE = "bitrate-mode";
+
+ private static final int VIDEO_AVC_PROFILE_HIGH = 8;
+ private static final int VIDEO_AVC_LEVEL_3 = 0x100;
+
+ private static final int MAX_VIDEO_FRAMERATE = 30;
+
+ // See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc.
+ private static final int MAX_ENCODER_Q_SIZE = 2;
+
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+ private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
+
+ // Size of the input frames should be multiple of 16 for the H/W encoder.
+ private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16;
+
+ /**
+ * Keeps track of the number of output buffers that have been passed down the pipeline and not yet
+ * released. We need to wait for this to go down to zero before operations invalidating the output
+ * buffers, i.e., stop() and getOutputBuffer().
+ */
+ private static class BusyCount {
+ private final Object countLock = new Object();
+ private int count;
+
+ public void increment() {
+ synchronized (countLock) {
+ count++;
+ }
+ }
+
+ // This method may be called on an arbitrary thread.
+ public void decrement() {
+ synchronized (countLock) {
+ count--;
+ if (count == 0) {
+ countLock.notifyAll();
+ }
+ }
+ }
+
+ // The increment and waitForZero methods are called on the same thread (deliverEncodedImage,
+ // running on the output thread). Hence, after waitForZero returns, the count will stay zero
+ // until the same thread calls increment.
+ public void waitForZero() {
+ boolean wasInterrupted = false;
+ synchronized (countLock) {
+ while (count > 0) {
+ try {
+ countLock.wait();
+ } catch (InterruptedException e) {
+ Logging.e(TAG, "Interrupted while waiting on busy count", e);
+ wasInterrupted = true;
+ }
+ }
+ }
+
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ }
+ }
+ // --- Initialized on construction.
+ private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
+ private final String codecName;
+ private final VideoCodecMimeType codecType;
+ private final Integer surfaceColorFormat;
+ private final Integer yuvColorFormat;
+ private final YuvFormat yuvFormat;
+ private final Map<String, String> params;
+ private final int keyFrameIntervalSec; // Base interval for generating key frames.
+ // Interval at which to force a key frame. Used to reduce color distortions caused by some
+ // Qualcomm video encoders.
+ private final long forcedKeyFrameNs;
+ private final BitrateAdjuster bitrateAdjuster;
+ // EGL context shared with the application. Used to access texture inputs.
+ private final EglBase14.Context sharedContext;
+
+ // Drawer used to draw input textures onto the codec's input surface.
+ private final GlRectDrawer textureDrawer = new GlRectDrawer();
+ private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
+ // A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
+ // pre-populated with all the information that can't be sent through MediaCodec.
+ private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>();
+
+ private final ThreadChecker encodeThreadChecker = new ThreadChecker();
+ private final ThreadChecker outputThreadChecker = new ThreadChecker();
+ private final BusyCount outputBuffersBusyCount = new BusyCount();
+
+ // --- Set on initialize and immutable until release.
+ private Callback callback;
+ private boolean automaticResizeOn;
+
+ // --- Valid and immutable while an encoding session is running.
+ @Nullable private MediaCodecWrapper codec;
+ // Thread that delivers encoded frames to the user callback.
+ @Nullable private Thread outputThread;
+
+ // EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
+ // input surface. Making this base current allows textures from the context to be drawn onto the
+ // surface.
+ @Nullable private EglBase14 textureEglBase;
+ // Input surface for the codec. The encoder will draw input textures onto this surface.
+ @Nullable private Surface textureInputSurface;
+
+ private int width;
+ private int height;
+ // Y-plane strides in the encoder's input
+ private int stride;
+ // Y-plane slice-height in the encoder's input
+ private int sliceHeight;
+ private boolean useSurfaceMode;
+
+ // --- Only accessed from the encoding thread.
+ // Presentation timestamp of next frame to encode.
+ private long nextPresentationTimestampUs;
+ // Presentation timestamp of the last requested (or forced) key frame.
+ private long lastKeyFrameNs;
+
+ // --- Only accessed on the output thread.
+ // Contents of the last observed config frame output by the MediaCodec. Used by H.264.
+ @Nullable private ByteBuffer configBuffer;
+ private int adjustedBitrate;
+
+ // Whether the encoder is running. Volatile so that the output thread can watch this value and
+ // exit when the encoder stops.
+ private volatile boolean running;
+ // Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
+ // value to send exceptions thrown during release back to the encoder thread.
+ @Nullable private volatile Exception shutdownException;
+
+ /**
+ * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
+ * intervals, and bitrateAdjuster.
+ *
+ * @param codecName the hardware codec implementation to use
+ * @param codecType the type of the given video codec (eg. VP8, VP9, H264 or AV1)
+ * @param surfaceColorFormat color format for surface mode or null if not available
+ * @param yuvColorFormat color format for bytebuffer mode
+ * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
+ * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
+ * used to reduce distortion caused by some codec implementations
+ * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
+ * desired bitrates
+ * @throws IllegalArgumentException if colorFormat is unsupported
+ */
+ public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
+ Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
+ BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
+ this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
+ this.codecName = codecName;
+ this.codecType = codecType;
+ this.surfaceColorFormat = surfaceColorFormat;
+ this.yuvColorFormat = yuvColorFormat;
+ this.yuvFormat = YuvFormat.valueOf(yuvColorFormat);
+ this.params = params;
+ this.keyFrameIntervalSec = keyFrameIntervalSec;
+ this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
+ this.bitrateAdjuster = bitrateAdjuster;
+ this.sharedContext = sharedContext;
+
+ // Allow construction on a different thread.
+ encodeThreadChecker.detachThread();
+ }
+
+ @Override
+ public VideoCodecStatus initEncode(Settings settings, Callback callback) {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ this.callback = callback;
+ automaticResizeOn = settings.automaticResizeOn;
+
+ if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0
+ || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) {
+ Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned.");
+ return VideoCodecStatus.ERR_SIZE;
+ }
+ this.width = settings.width;
+ this.height = settings.height;
+ useSurfaceMode = canUseSurface();
+
+ if (settings.startBitrate != 0 && settings.maxFramerate != 0) {
+ bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate);
+ }
+ adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
+
+ Logging.d(TAG,
+ "initEncode: " + width + " x " + height + ". @ " + settings.startBitrate
+ + "kbps. Fps: " + settings.maxFramerate + " Use surface mode: " + useSurfaceMode);
+ return initEncodeInternal();
+ }
+
+ private VideoCodecStatus initEncodeInternal() {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ nextPresentationTimestampUs = 0;
+ lastKeyFrameNs = -1;
+
+ try {
+ codec = mediaCodecWrapperFactory.createByCodecName(codecName);
+ } catch (IOException | IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot create media encoder " + codecName);
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat;
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
+ format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ format.setFloat(
+ MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps());
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
+ if (codecType == VideoCodecMimeType.H264) {
+ String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
+ if (profileLevelId == null) {
+ profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1;
+ }
+ switch (profileLevelId) {
+ case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1:
+ format.setInteger("profile", VIDEO_AVC_PROFILE_HIGH);
+ format.setInteger("level", VIDEO_AVC_LEVEL_3);
+ break;
+ case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1:
+ break;
+ default:
+ Logging.w(TAG, "Unknown profile level id: " + profileLevelId);
+ }
+ }
+ Logging.d(TAG, "Format: " + format);
+ codec.configure(
+ format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (useSurfaceMode) {
+ textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE);
+ textureInputSurface = codec.createInputSurface();
+ textureEglBase.createSurface(textureInputSurface);
+ textureEglBase.makeCurrent();
+ }
+
+ MediaFormat inputFormat = codec.getInputFormat();
+ stride = getStride(inputFormat, width);
+ sliceHeight = getSliceHeight(inputFormat, height);
+
+ codec.start();
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initEncodeInternal failed", e);
+ release();
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ running = true;
+ outputThreadChecker.detachThread();
+ outputThread = createOutputThread();
+ outputThread.start();
+
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ final VideoCodecStatus returnValue;
+ if (outputThread == null) {
+ returnValue = VideoCodecStatus.OK;
+ } else {
+ // The outputThread actually stops and releases the codec once running is false.
+ running = false;
+ if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media encoder release timeout");
+ returnValue = VideoCodecStatus.TIMEOUT;
+ } else if (shutdownException != null) {
+ // Log the exception and turn it into an error.
+ Logging.e(TAG, "Media encoder release exception", shutdownException);
+ returnValue = VideoCodecStatus.ERROR;
+ } else {
+ returnValue = VideoCodecStatus.OK;
+ }
+ }
+
+ textureDrawer.release();
+ videoFrameDrawer.release();
+ if (textureEglBase != null) {
+ textureEglBase.release();
+ textureEglBase = null;
+ }
+ if (textureInputSurface != null) {
+ textureInputSurface.release();
+ textureInputSurface = null;
+ }
+ outputBuilders.clear();
+
+ codec = null;
+ outputThread = null;
+
+ // Allow changing thread after release.
+ encodeThreadChecker.detachThread();
+
+ return returnValue;
+ }
+
+ @Override
+ public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (codec == null) {
+ return VideoCodecStatus.UNINITIALIZED;
+ }
+
+ final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
+ final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer;
+
+ // If input resolution changed, restart the codec with the new resolution.
+ final int frameWidth = videoFrame.getBuffer().getWidth();
+ final int frameHeight = videoFrame.getBuffer().getHeight();
+ final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer;
+ if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) {
+ VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ }
+
+ if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
+ // Too many frames in the encoder. Drop this frame.
+ Logging.e(TAG, "Dropped frame, encoder queue full");
+ return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
+ }
+
+ boolean requestedKeyFrame = false;
+ for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
+ if (frameType == EncodedImage.FrameType.VideoFrameKey) {
+ requestedKeyFrame = true;
+ }
+ }
+
+ if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
+ requestKeyFrame(videoFrame.getTimestampNs());
+ }
+
+ // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
+ // subsampled at one byte per four pixels.
+ int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
+ EncodedImage.Builder builder = EncodedImage.builder()
+ .setCaptureTimeNs(videoFrame.getTimestampNs())
+ .setEncodedWidth(videoFrame.getBuffer().getWidth())
+ .setEncodedHeight(videoFrame.getBuffer().getHeight())
+ .setRotation(videoFrame.getRotation());
+ outputBuilders.offer(builder);
+
+ long presentationTimestampUs = nextPresentationTimestampUs;
+ // Round frame duration down to avoid bitrate overshoot.
+ long frameDurationUs =
+ (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps());
+ nextPresentationTimestampUs += frameDurationUs;
+
+ final VideoCodecStatus returnValue;
+ if (useSurfaceMode) {
+ returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs);
+ } else {
+ returnValue =
+ encodeByteBuffer(videoFrame, presentationTimestampUs, videoFrameBuffer, bufferSize);
+ }
+
+ // Check if the queue was successful.
+ if (returnValue != VideoCodecStatus.OK) {
+ // Keep the output builders in sync with buffers in the codec.
+ outputBuilders.pollLast();
+ }
+
+ return returnValue;
+ }
+
+ private VideoCodecStatus encodeTextureBuffer(
+ VideoFrame videoFrame, long presentationTimestampUs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ try {
+ // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+ // but it's a workaround for bug webrtc:5147.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // It is not necessary to release this frame because it doesn't own the buffer.
+ VideoFrame derotatedFrame =
+ new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
+ videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
+ textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "encodeTexture failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs,
+ VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
+ encodeThreadChecker.checkIsOnValidThread();
+ // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
+ int index;
+ try {
+ index = codec.dequeueInputBuffer(0 /* timeout */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueInputBuffer failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (index == -1) {
+ // Encoder is falling behind. No input buffers available. Drop the frame.
+ Logging.d(TAG, "Dropped frame, no input buffers available");
+ return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
+ }
+
+ ByteBuffer buffer;
+ try {
+ buffer = codec.getInputBuffer(index);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ fillInputBuffer(buffer, videoFrameBuffer);
+
+ try {
+ codec.queueInputBuffer(
+ index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "queueInputBuffer failed", e);
+ // IllegalStateException thrown when the codec is in the wrong state.
+ return VideoCodecStatus.ERROR;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (framerate > MAX_VIDEO_FRAMERATE) {
+ framerate = MAX_VIDEO_FRAMERATE;
+ }
+ bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRates(RateControlParameters rcParameters) {
+ encodeThreadChecker.checkIsOnValidThread();
+ bitrateAdjuster.setTargets(rcParameters.bitrate.getSum(), rcParameters.framerateFps);
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public ScalingSettings getScalingSettings() {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (automaticResizeOn) {
+ if (codecType == VideoCodecMimeType.VP8) {
+ final int kLowVp8QpThreshold = 29;
+ final int kHighVp8QpThreshold = 95;
+ return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold);
+ } else if (codecType == VideoCodecMimeType.H264) {
+ final int kLowH264QpThreshold = 24;
+ final int kHighH264QpThreshold = 37;
+ return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
+ }
+ }
+ return ScalingSettings.OFF;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return codecName;
+ }
+
+ @Override
+ public EncoderInfo getEncoderInfo() {
+ // Since our MediaCodec is guaranteed to encode 16-pixel-aligned frames only, we set alignment
+ // value to be 16. Additionally, this encoder produces a single stream. So it should not require
+ // alignment for all layers.
+ return new EncoderInfo(
+ /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT,
+ /* applyAlignmentToAllSimulcastLayers= */ false);
+ }
+
+ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) {
+ encodeThreadChecker.checkIsOnValidThread();
+ VideoCodecStatus status = release();
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+
+ if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0
+ || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) {
+ Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned.");
+ return VideoCodecStatus.ERR_SIZE;
+ }
+ width = newWidth;
+ height = newHeight;
+ useSurfaceMode = newUseSurfaceMode;
+ return initEncodeInternal();
+ }
+
+ private boolean shouldForceKeyFrame(long presentationTimestampNs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
+ }
+
+ private void requestKeyFrame(long presentationTimestampNs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+ // indicate this in queueInputBuffer() below and guarantee _this_ frame
+ // be encoded as a key frame, but sadly that flag is ignored. Instead,
+ // we request a key frame "soon".
+ try {
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ codec.setParameters(b);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "requestKeyFrame failed", e);
+ return;
+ }
+ lastKeyFrameNs = presentationTimestampNs;
+ }
+
+ private Thread createOutputThread() {
+ return new Thread() {
+ @Override
+ public void run() {
+ while (running) {
+ deliverEncodedImage();
+ }
+ releaseCodecOnOutputThread();
+ }
+ };
+ }
+
+ // Visible for testing.
+ protected void deliverEncodedImage() {
+ outputThreadChecker.checkIsOnValidThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
+ if (index < 0) {
+ if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffersBusyCount.waitForZero();
+ }
+ return;
+ }
+
+ ByteBuffer codecOutputBuffer = codec.getOutputBuffer(index);
+ codecOutputBuffer.position(info.offset);
+ codecOutputBuffer.limit(info.offset + info.size);
+
+ if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
+ configBuffer = ByteBuffer.allocateDirect(info.size);
+ configBuffer.put(codecOutputBuffer);
+ } else {
+ bitrateAdjuster.reportEncodedFrame(info.size);
+ if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
+ updateBitrate();
+ }
+
+ final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+ if (isKeyFrame) {
+ Logging.d(TAG, "Sync frame generated");
+ }
+
+ final ByteBuffer frameBuffer;
+ if (isKeyFrame && codecType == VideoCodecMimeType.H264) {
+ Logging.d(TAG,
+ "Prepending config frame of size " + configBuffer.capacity()
+ + " to output buffer with offset " + info.offset + ", size " + info.size);
+ // For H.264 key frame prepend SPS and PPS NALs at the start.
+ frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
+ configBuffer.rewind();
+ frameBuffer.put(configBuffer);
+ frameBuffer.put(codecOutputBuffer);
+ frameBuffer.rewind();
+ } else {
+ frameBuffer = codecOutputBuffer.slice();
+ }
+
+ final EncodedImage.FrameType frameType = isKeyFrame
+ ? EncodedImage.FrameType.VideoFrameKey
+ : EncodedImage.FrameType.VideoFrameDelta;
+
+ outputBuffersBusyCount.increment();
+ EncodedImage.Builder builder = outputBuilders.poll();
+ EncodedImage encodedImage = builder
+ .setBuffer(frameBuffer,
+ () -> {
+ // This callback should not throw any exceptions since
+ // it may be called on an arbitrary thread.
+ // Check bug webrtc:11230 for more details.
+ try {
+ codec.releaseOutputBuffer(index, false);
+ } catch (Exception e) {
+ Logging.e(TAG, "releaseOutputBuffer failed", e);
+ }
+ outputBuffersBusyCount.decrement();
+ })
+ .setFrameType(frameType)
+ .createEncodedImage();
+ // TODO(mellem): Set codec-specific info.
+ callback.onEncodedFrame(encodedImage, new CodecSpecificInfo());
+ // Note that the callback may have retained the image.
+ encodedImage.release();
+ }
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "deliverOutput failed", e);
+ }
+ }
+
+ private void releaseCodecOnOutputThread() {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Releasing MediaCodec on output thread");
+ outputBuffersBusyCount.waitForZero();
+ try {
+ codec.stop();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder stop failed", e);
+ }
+ try {
+ codec.release();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder release failed", e);
+ // Propagate exceptions caught during release back to the main thread.
+ shutdownException = e;
+ }
+ configBuffer = null;
+ Logging.d(TAG, "Release on output thread done");
+ }
+
+ private VideoCodecStatus updateBitrate() {
+ outputThreadChecker.checkIsOnValidThread();
+ adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
+ try {
+ Bundle params = new Bundle();
+ params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
+ codec.setParameters(params);
+ return VideoCodecStatus.OK;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "updateBitrate failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ }
+
+ private boolean canUseSurface() {
+ return sharedContext != null && surfaceColorFormat != null;
+ }
+
+ private static int getStride(MediaFormat inputFormat, int width) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null
+ && inputFormat.containsKey(MediaFormat.KEY_STRIDE)) {
+ return inputFormat.getInteger(MediaFormat.KEY_STRIDE);
+ }
+ return width;
+ }
+
+ private static int getSliceHeight(MediaFormat inputFormat, int height) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null
+ && inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
+ return inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
+ }
+ return height;
+ }
+
+ // Visible for testing.
+ protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer videoFrameBuffer) {
+ yuvFormat.fillBuffer(buffer, videoFrameBuffer, stride, sliceHeight);
+ }
+
+ /**
+ * Enumeration of supported YUV color formats used for MediaCodec's input.
+ */
+ private enum YuvFormat {
+ I420 {
+ @Override
+ void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) {
+ /*
+ * According to the docs in Android MediaCodec, the stride of the U and V planes can be
+ * calculated based on the color format, though it is generally undefined and depends on the
+ * device and release.
+ * <p/> Assuming the width and height, dstStrideY and dstSliceHeightY are
+ * even, it works fine when we define the stride and slice-height of the dst U/V plane to be
+ * half of the dst Y plane.
+ */
+ int dstStrideU = dstStrideY / 2;
+ int dstSliceHeight = dstSliceHeightY / 2;
+ VideoFrame.I420Buffer i420 = srcBuffer.toI420();
+ YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(),
+ dstStrideY, dstSliceHeightY, dstStrideU, dstSliceHeight);
+ i420.release();
+ }
+ },
+ NV12 {
+ @Override
+ void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) {
+ VideoFrame.I420Buffer i420 = srcBuffer.toI420();
+ YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(),
+ dstStrideY, dstSliceHeightY);
+ i420.release();
+ }
+ };
+
+ abstract void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY);
+
+ static YuvFormat valueOf(int colorFormat) {
+ switch (colorFormat) {
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
+ return I420;
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
+ case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ return NV12;
+ default:
+ throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java
new file mode 100644
index 0000000000..c1d2d61a71
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Class for holding the native pointer of a histogram. Since there is no way to destroy a
+ * histogram, please don't create unnecessary instances of this object. This class is thread safe.
+ *
+ * Usage example:
+ * private static final Histogram someMetricHistogram =
+ * Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50);
+ * someMetricHistogram.addSample(someVariable);
+ */
+class Histogram {
+ private final long handle;
+
+ private Histogram(long handle) {
+ this.handle = handle;
+ }
+
+ static public Histogram createCounts(String name, int min, int max, int bucketCount) {
+ return new Histogram(0);
+ }
+
+ static public Histogram createEnumeration(String name, int max) {
+ return new Histogram(0);
+ }
+
+ public void addSample(int sample) {
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java
new file mode 100644
index 0000000000..f391db61a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CalledByNative;
+import org.webrtc.Loggable;
+import org.webrtc.Logging.Severity;
+
+class JNILogging {
+ private final Loggable loggable;
+
+ public JNILogging(Loggable loggable) {
+ this.loggable = loggable;
+ }
+
+ @CalledByNative
+ public void logToInjectable(String message, Integer severity, String tag) {
+ loggable.onLogMessage(message, Severity.values()[severity], tag);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java
new file mode 100644
index 0000000000..e1b2e513d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Class with static JNI helper functions that are used in many places. */
+public class JniCommon {
+ /** Functions to increment/decrement an rtc::RefCountInterface pointer. */
+ public static native void nativeAddRef(long refCountedPointer);
+ public static native void nativeReleaseRef(long refCountedPointer);
+
+ public static native ByteBuffer nativeAllocateByteBuffer(int size);
+ public static native void nativeFreeByteBuffer(ByteBuffer buffer);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
new file mode 100644
index 0000000000..d5ccae9688
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Container class for static constants and helpers used with MediaCodec. */
+// We are forced to use the old API because we want to support API level < 21.
+@SuppressWarnings("deprecation")
+class MediaCodecUtils {
+ private static final String TAG = "MediaCodecUtils";
+
+ // Prefixes for supported hardware encoder/decoder component names.
+ static final String EXYNOS_PREFIX = "OMX.Exynos.";
+ static final String INTEL_PREFIX = "OMX.Intel.";
+ static final String NVIDIA_PREFIX = "OMX.Nvidia.";
+ static final String QCOM_PREFIX = "OMX.qcom.";
+ static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = {
+ "OMX.google.", "OMX.SEC.", "c2.android"};
+
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
+ static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+
+ // Color formats supported by hardware decoder - in order of preference.
+ static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+
+ // Color formats supported by hardware encoder - in order of preference.
+ static final int[] ENCODER_COLOR_FORMATS = {
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+
+ // Color formats supported by texture mode encoding - in order of preference.
+ static final int[] TEXTURE_COLOR_FORMATS =
+ new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
+
+ static @Nullable Integer selectColorFormat(
+ int[] supportedColorFormats, CodecCapabilities capabilities) {
+ for (int supportedColorFormat : supportedColorFormats) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ return codecColorFormat;
+ }
+ }
+ }
+ return null;
+ }
+
+ static boolean codecSupportsType(MediaCodecInfo info, VideoCodecMimeType type) {
+ for (String mimeType : info.getSupportedTypes()) {
+ if (type.mimeType().equals(mimeType)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static Map<String, String> getCodecProperties(VideoCodecMimeType type, boolean highProfile) {
+ switch (type) {
+ case VP8:
+ case VP9:
+ case AV1:
+ return new HashMap<String, String>();
+ case H264:
+ return H264Utils.getDefaultH264Params(highProfile);
+ default:
+ throw new IllegalArgumentException("Unsupported codec: " + type);
+ }
+ }
+
+ static boolean isHardwareAccelerated(MediaCodecInfo info) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isHardwareAcceleratedQOrHigher(info);
+ }
+ return !isSoftwareOnly(info);
+ }
+
+ @TargetApi(29)
+ private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isHardwareAccelerated();
+ }
+
+ static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isSoftwareOnlyQOrHigher(codecInfo);
+ }
+ String name = codecInfo.getName();
+ for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) {
+ if (name.startsWith(prefix)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @TargetApi(29)
+ private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isSoftwareOnly();
+ }
+
+ private MediaCodecUtils() {
+ // This class should not be instantiated.
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
new file mode 100644
index 0000000000..bf591dda26
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
+import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Factory for decoders backed by Android MediaCodec API. */
+@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
+class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
+ private static final String TAG = "MediaCodecVideoDecoderFactory";
+
+ private final @Nullable EglBase.Context sharedContext;
+ private final @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate;
+
+ /**
+ * MediaCodecVideoDecoderFactory with support of codecs filtering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param codecAllowedPredicate optional predicate to test if codec allowed. All codecs are
+ * allowed when predicate is not provided.
+ */
+ public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
+ @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) {
+ this.sharedContext = sharedContext;
+ this.codecAllowedPredicate = codecAllowedPredicate;
+ }
+
+ @Nullable
+ @Override
+ public VideoDecoder createDecoder(VideoCodecInfo codecType) {
+ VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName());
+ MediaCodecInfo info = findCodecForType(type);
+
+ if (info == null) {
+ return null;
+ }
+
+ CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
+ return new AndroidVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type,
+ MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
+ sharedContext);
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
+ // Generate a list of supported codecs in order of preference:
+ // VP8, VP9, H264 (high profile), and H264 (baseline profile).
+ for (VideoCodecMimeType type : new VideoCodecMimeType[] {VideoCodecMimeType.VP8,
+ VideoCodecMimeType.VP9, VideoCodecMimeType.H264, VideoCodecMimeType.AV1}) {
+ MediaCodecInfo codec = findCodecForType(type);
+ if (codec != null) {
+ String name = type.name();
+ if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
+ }
+
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)));
+ }
+ }
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+
+ private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = null;
+ try {
+ info = MediaCodecList.getCodecInfoAt(i);
+ } catch (IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot retrieve decoder codec info", e);
+ }
+
+ if (info == null || info.isEncoder()) {
+ continue;
+ }
+
+ if (isSupportedCodec(info, type)) {
+ return info;
+ }
+ }
+
+ return null; // No support for this type.
+ }
+
+ // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
+ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) {
+ if (!MediaCodecUtils.codecSupportsType(info, type)) {
+ return false;
+ }
+ // Check for a supported color format.
+ if (MediaCodecUtils.selectColorFormat(
+ MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
+ == null) {
+ return false;
+ }
+ return isCodecAllowed(info);
+ }
+
+ private boolean isCodecAllowed(MediaCodecInfo info) {
+ if (codecAllowedPredicate == null) {
+ return true;
+ }
+ return codecAllowedPredicate.test(info);
+ }
+
+ private boolean isH264HighProfileSupported(MediaCodecInfo info) {
+ String name = info.getName();
+ // Support H.264 HP decoding on QCOM chips.
+ if (name.startsWith(QCOM_PREFIX)) {
+ return true;
+ }
+ // Support H.264 HP decoding on Exynos chips for Android M and above.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && name.startsWith(EXYNOS_PREFIX)) {
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java
new file mode 100644
index 0000000000..60c853df35
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import java.nio.ByteBuffer;
+
+/**
+ * Subset of methods defined in {@link android.media.MediaCodec} needed by
+ * {@link HardwareVideoEncoder} and {@link AndroidVideoDecoder}. This interface
+ * exists to allow mocking and using a fake implementation in tests.
+ */
+interface MediaCodecWrapper {
+ void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags);
+
+ void start();
+
+ void flush();
+
+ void stop();
+
+ void release();
+
+ int dequeueInputBuffer(long timeoutUs);
+
+ void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags);
+
+ int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs);
+
+ void releaseOutputBuffer(int index, boolean render);
+
+ MediaFormat getInputFormat();
+
+ MediaFormat getOutputFormat();
+
+ ByteBuffer getInputBuffer(int index);
+
+ ByteBuffer getOutputBuffer(int index);
+
+ Surface createInputSurface();
+
+ void setParameters(Bundle params);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java
new file mode 100644
index 0000000000..2962cb62a7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.io.IOException;
+
+interface MediaCodecWrapperFactory {
+ /**
+ * Creates a new {@link MediaCodecWrapper} by codec name.
+ *
+ * <p>For additional information see {@link android.media.MediaCodec#createByCodecName}.
+ */
+ MediaCodecWrapper createByCodecName(String name) throws IOException;
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
new file mode 100644
index 0000000000..2ba62ac7d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * Implementation of MediaCodecWrapperFactory that returns MediaCodecInterfaces wrapping
+ * {@link android.media.MediaCodec} objects.
+ */
+class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
+ private static class MediaCodecWrapperImpl implements MediaCodecWrapper {
+ private final MediaCodec mediaCodec;
+
+ public MediaCodecWrapperImpl(MediaCodec mediaCodec) {
+ this.mediaCodec = mediaCodec;
+ }
+
+ @Override
+ public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
+ mediaCodec.configure(format, surface, crypto, flags);
+ }
+
+ @Override
+ public void start() {
+ mediaCodec.start();
+ }
+
+ @Override
+ public void flush() {
+ mediaCodec.flush();
+ }
+
+ @Override
+ public void stop() {
+ mediaCodec.stop();
+ }
+
+ @Override
+ public void release() {
+ mediaCodec.release();
+ }
+
+ @Override
+ public int dequeueInputBuffer(long timeoutUs) {
+ return mediaCodec.dequeueInputBuffer(timeoutUs);
+ }
+
+ @Override
+ public void queueInputBuffer(
+ int index, int offset, int size, long presentationTimeUs, int flags) {
+ mediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
+ }
+
+ @Override
+ public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) {
+ return mediaCodec.dequeueOutputBuffer(info, timeoutUs);
+ }
+
+ @Override
+ public void releaseOutputBuffer(int index, boolean render) {
+ mediaCodec.releaseOutputBuffer(index, render);
+ }
+
+ @Override
+ public MediaFormat getInputFormat() {
+ return mediaCodec.getInputFormat();
+ }
+
+ @Override
+ public MediaFormat getOutputFormat() {
+ return mediaCodec.getOutputFormat();
+ }
+
+ @Override
+ public ByteBuffer getInputBuffer(int index) {
+ return mediaCodec.getInputBuffer(index);
+ }
+
+ @Override
+ public ByteBuffer getOutputBuffer(int index) {
+ return mediaCodec.getOutputBuffer(index);
+ }
+
+ @Override
+ public Surface createInputSurface() {
+ return mediaCodec.createInputSurface();
+ }
+
+ @Override
+ public void setParameters(Bundle params) {
+ mediaCodec.setParameters(params);
+ }
+ }
+
+ @Override
+ public MediaCodecWrapper createByCodecName(String name) throws IOException {
+ return new MediaCodecWrapperImpl(MediaCodec.createByCodecName(name));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java
new file mode 100644
index 0000000000..fe0221d826
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+public class NV12Buffer implements VideoFrame.Buffer {
+ private final int width;
+ private final int height;
+ private final int stride;
+ private final int sliceHeight;
+ private final ByteBuffer buffer;
+ private final RefCountDelegate refCountDelegate;
+
+ public NV12Buffer(int width, int height, int stride, int sliceHeight, ByteBuffer buffer,
+ @Nullable Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.stride = stride;
+ this.sliceHeight = sliceHeight;
+ this.buffer = buffer;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ return (VideoFrame.I420Buffer) cropAndScale(0, 0, width, height, width, height);
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, buffer, width,
+ height, stride, sliceHeight, newBuffer.getDataY(), newBuffer.getStrideY(),
+ newBuffer.getDataU(), newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, ByteBuffer src, int srcWidth, int srcHeight, int srcStride,
+ int srcSliceHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int dstStrideU,
+ ByteBuffer dstV, int dstStrideV);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
new file mode 100644
index 0000000000..0fb1afe74b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+public class NV21Buffer implements VideoFrame.Buffer {
+ private final byte[] data;
+ private final int width;
+ private final int height;
+ private final RefCountDelegate refCountDelegate;
+
+ public NV21Buffer(byte[] data, int width, int height, @Nullable Runnable releaseCallback) {
+ this.data = data;
+ this.width = width;
+ this.height = height;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ // Cropping converts the frame to I420. Just crop and scale to the whole image.
+ return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
+ height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
+ height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+ newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
+ int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java
new file mode 100644
index 0000000000..d4fba481e8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoProcessor;
+
+/**
+ * This class is meant to be a simple layer that only handles the JNI wrapping of a C++
+ * AndroidVideoTrackSource, that can easily be mocked out in Java unit tests. Refrain from adding
+ * any unnecessary logic to this class.
+ * This class is thred safe and methods can be called from any thread, but if frames A, B, ..., are
+ * sent to adaptFrame(), the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same
+ * order to onFrameCaptured().
+ */
+class NativeAndroidVideoTrackSource {
+ // Pointer to webrtc::jni::AndroidVideoTrackSource.
+ private final long nativeAndroidVideoTrackSource;
+
+ public NativeAndroidVideoTrackSource(long nativeAndroidVideoTrackSource) {
+ this.nativeAndroidVideoTrackSource = nativeAndroidVideoTrackSource;
+ }
+
+ /**
+ * Set the state for the native MediaSourceInterface. Maps boolean to either
+ * SourceState::kLive or SourceState::kEnded.
+ */
+ public void setState(boolean isLive) {
+ nativeSetState(nativeAndroidVideoTrackSource, isLive);
+ }
+
+ /**
+ * This function should be called before delivering any frame to determine if the frame should be
+ * dropped or what the cropping and scaling parameters should be. If the return value is null, the
+ * frame should be dropped, otherwise the frame should be adapted in accordance to the frame
+ * adaptation parameters before calling onFrameCaptured().
+ */
+ @Nullable
+ public VideoProcessor.FrameAdaptationParameters adaptFrame(VideoFrame frame) {
+ return nativeAdaptFrame(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(),
+ frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs());
+ }
+
+ /**
+ * Pass an adapted frame to the native AndroidVideoTrackSource. Note that adaptFrame() is
+ * expected to be called first and that the passed frame conforms to those parameters.
+ */
+ public void onFrameCaptured(VideoFrame frame) {
+ nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getRotation(),
+ frame.getTimestampNs(), frame.getBuffer());
+ }
+
+ /**
+ * Calling this function will cause frames to be scaled down to the requested resolution. Also,
+ * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
+ * the requested fps.
+ */
+ public void adaptOutputFormat(VideoSource.AspectRatio targetLandscapeAspectRatio,
+ @Nullable Integer maxLandscapePixelCount, VideoSource.AspectRatio targetPortraitAspectRatio,
+ @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
+ nativeAdaptOutputFormat(nativeAndroidVideoTrackSource, targetLandscapeAspectRatio.width,
+ targetLandscapeAspectRatio.height, maxLandscapePixelCount, targetPortraitAspectRatio.width,
+ targetPortraitAspectRatio.height, maxPortraitPixelCount, maxFps);
+ }
+
+ public void setIsScreencast(boolean isScreencast) {
+ nativeSetIsScreencast(nativeAndroidVideoTrackSource, isScreencast);
+ }
+
+ @CalledByNative
+ static VideoProcessor.FrameAdaptationParameters createFrameAdaptationParameters(int cropX,
+ int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight, long timestampNs,
+ boolean drop) {
+ return new VideoProcessor.FrameAdaptationParameters(
+ cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, timestampNs, drop);
+ }
+
+ private static native void nativeSetIsScreencast(
+ long nativeAndroidVideoTrackSource, boolean isScreencast);
+ private static native void nativeSetState(long nativeAndroidVideoTrackSource, boolean isLive);
+ private static native void nativeAdaptOutputFormat(long nativeAndroidVideoTrackSource,
+ int landscapeWidth, int landscapeHeight, @Nullable Integer maxLandscapePixelCount,
+ int portraitWidth, int portraitHeight, @Nullable Integer maxPortraitPixelCount,
+ @Nullable Integer maxFps);
+ @Nullable
+ private static native VideoProcessor.FrameAdaptationParameters nativeAdaptFrame(
+ long nativeAndroidVideoTrackSource, int width, int height, int rotation, long timestampNs);
+ private static native void nativeOnFrameCaptured(
+ long nativeAndroidVideoTrackSource, int rotation, long timestampNs, VideoFrame.Buffer buffer);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java
new file mode 100644
index 0000000000..c195fb3a4c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoFrame;
+
+/**
+ * Used from native api and implements a simple VideoCapturer.CapturerObserver that feeds frames to
+ * a webrtc::jni::AndroidVideoTrackSource.
+ */
+class NativeCapturerObserver implements CapturerObserver {
+ private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
+
+ @CalledByNative
+ public NativeCapturerObserver(long nativeSource) {
+ this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
+ }
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeAndroidVideoTrackSource.setState(success);
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
+ }
+
+ @Override
+ public void onFrameCaptured(VideoFrame frame) {
+ final VideoProcessor.FrameAdaptationParameters parameters =
+ nativeAndroidVideoTrackSource.adaptFrame(frame);
+ if (parameters == null) {
+ // Drop frame.
+ return;
+ }
+
+ final VideoFrame.Buffer adaptedBuffer =
+ frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth,
+ parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight);
+ nativeAndroidVideoTrackSource.onFrameCaptured(
+ new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs));
+ adaptedBuffer.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java
new file mode 100644
index 0000000000..531c216302
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+class NativeLibrary {
+ private static String TAG = "NativeLibrary";
+
+ static class DefaultLoader implements NativeLibraryLoader {
+ @Override
+ public boolean load(String name) {
+ Logging.d(TAG, "Loading library: " + name);
+ System.loadLibrary(name);
+
+ // Not relevant, but kept for API compatibility.
+ return true;
+ }
+ }
+
+ private static Object lock = new Object();
+ private static boolean libraryLoaded;
+
+ /**
+ * Loads the native library. Clients should call PeerConnectionFactory.initialize. It will call
+ * this method for them.
+ */
+ static void initialize(NativeLibraryLoader loader, String libraryName) {
+ synchronized (lock) {
+ if (libraryLoaded) {
+ Logging.d(TAG, "Native library has already been loaded.");
+ return;
+ }
+ Logging.d(TAG, "Loading native library: " + libraryName);
+ libraryLoaded = loader.load(libraryName);
+ }
+ }
+
+ /** Returns true if the library has been loaded successfully. */
+ static boolean isLoaded() {
+ synchronized (lock) {
+ return libraryLoaded;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java
new file mode 100644
index 0000000000..b9210d26a4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Implementation of RefCounted that executes a Runnable once the ref count reaches zero.
+ */
+class RefCountDelegate implements RefCounted {
+ private final AtomicInteger refCount = new AtomicInteger(1);
+ private final @Nullable Runnable releaseCallback;
+
+ /**
+ * @param releaseCallback Callback that will be executed once the ref count reaches zero.
+ */
+ public RefCountDelegate(@Nullable Runnable releaseCallback) {
+ this.releaseCallback = releaseCallback;
+ }
+
+ @Override
+ public void retain() {
+ int updated_count = refCount.incrementAndGet();
+ if (updated_count < 2) {
+ throw new IllegalStateException("retain() called on an object with refcount < 1");
+ }
+ }
+
+ @Override
+ public void release() {
+ int updated_count = refCount.decrementAndGet();
+ if (updated_count < 0) {
+ throw new IllegalStateException("release() called on an object with refcount < 1");
+ }
+ if (updated_count == 0 && releaseCallback != null) {
+ releaseCallback.run();
+ }
+ }
+
+ /**
+ * Tries to retain the object. Can be used in scenarios where it is unknown if the object has
+ * already been released. Returns true if successful or false if the object was already released.
+ */
+ boolean safeRetain() {
+ int currentRefCount = refCount.get();
+ while (currentRefCount != 0) {
+ if (refCount.weakCompareAndSet(currentRefCount, currentRefCount + 1)) {
+ return true;
+ }
+ currentRefCount = refCount.get();
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
new file mode 100644
index 0000000000..26a030919d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Enumeration of supported video codec types. */
+enum VideoCodecMimeType {
+ VP8("video/x-vnd.on2.vp8"),
+ VP9("video/x-vnd.on2.vp9"),
+ H264("video/avc"),
+ AV1("video/av01");
+
+ private final String mimeType;
+
+ private VideoCodecMimeType(String mimeType) {
+ this.mimeType = mimeType;
+ }
+
+ String mimeType() {
+ return mimeType;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java
new file mode 100644
index 0000000000..2aae041640
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoDecoder;
+
+/**
+ * This class contains the Java glue code for JNI generation of VideoDecoder.
+ */
+class VideoDecoderWrapper {
+ @CalledByNative
+ static VideoDecoder.Callback createDecoderCallback(final long nativeDecoder) {
+ return (VideoFrame frame, Integer decodeTimeMs,
+ Integer qp) -> nativeOnDecodedFrame(nativeDecoder, frame, decodeTimeMs, qp);
+ }
+
+ private static native void nativeOnDecodedFrame(
+ long nativeVideoDecoderWrapper, VideoFrame frame, Integer decodeTimeMs, Integer qp);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
new file mode 100644
index 0000000000..b5485d4edb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+// Explicit imports necessary for JNI generation.
+import androidx.annotation.Nullable;
+import org.webrtc.VideoEncoder;
+
+/**
+ * This class contains the Java glue code for JNI generation of VideoEncoder.
+ */
+class VideoEncoderWrapper {
+ @CalledByNative
+ static boolean getScalingSettingsOn(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.on;
+ }
+
+ @Nullable
+ @CalledByNative
+ static Integer getScalingSettingsLow(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.low;
+ }
+
+ @Nullable
+ @CalledByNative
+ static Integer getScalingSettingsHigh(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.high;
+ }
+
+ @CalledByNative
+ static VideoEncoder.Callback createEncoderCallback(final long nativeEncoder) {
+ return (EncodedImage frame,
+ VideoEncoder.CodecSpecificInfo info) -> nativeOnEncodedFrame(nativeEncoder, frame);
+ }
+
+ private static native void nativeOnEncodedFrame(
+ long nativeVideoEncoderWrapper, EncodedImage frame);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java
new file mode 100644
index 0000000000..023e92cfb1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * This class provides a ClassLoader that is capable of loading WebRTC Java classes regardless of
+ * what thread it's called from. Such a ClassLoader is needed for the few cases where the JNI
+ * mechanism is unable to automatically determine the appropriate ClassLoader instance.
+ */
+class WebRtcClassLoader {
+ @CalledByNative
+ static Object getClassLoader() {
+ Object loader = WebRtcClassLoader.class.getClassLoader();
+ if (loader == null) {
+ throw new RuntimeException("Failed to get WebRTC class loader.");
+ }
+ return loader;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java
new file mode 100644
index 0000000000..0461660fcf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/**
+ * This class wraps a webrtc::I420BufferInterface into a VideoFrame.I420Buffer.
+ */
+class WrappedNativeI420Buffer implements VideoFrame.I420Buffer {
+ private final int width;
+ private final int height;
+ private final ByteBuffer dataY;
+ private final int strideY;
+ private final ByteBuffer dataU;
+ private final int strideU;
+ private final ByteBuffer dataV;
+ private final int strideV;
+ private final long nativeBuffer;
+
+ @CalledByNative
+ WrappedNativeI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
+ int strideU, ByteBuffer dataV, int strideV, long nativeBuffer) {
+ this.width = width;
+ this.height = height;
+ this.dataY = dataY;
+ this.strideY = strideY;
+ this.dataU = dataU;
+ this.strideU = strideU;
+ this.dataV = dataV;
+ this.strideV = strideV;
+ this.nativeBuffer = nativeBuffer;
+
+ retain();
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public ByteBuffer getDataY() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataY.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataU() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataU.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataV() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataV.slice();
+ }
+
+ @Override
+ public int getStrideY() {
+ return strideY;
+ }
+
+ @Override
+ public int getStrideU() {
+ return strideU;
+ }
+
+ @Override
+ public int getStrideV() {
+ return strideV;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ retain();
+ return this;
+ }
+
+ @Override
+ public void retain() {
+ JniCommon.nativeAddRef(nativeBuffer);
+ }
+
+ @Override
+ public void release() {
+ JniCommon.nativeReleaseRef(nativeBuffer);
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ return JavaI420Buffer.cropAndScaleI420(
+ this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java
new file mode 100644
index 0000000000..70c625ab4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.AudioTrack;
+import android.os.Build;
+import org.webrtc.Logging;
+
+// Lowers the buffer size if no underruns are detected for 100 ms. Once an
+// underrun is detected, the buffer size is increased by 10 ms and it will not
+// be lowered further. The buffer size will never be increased more than
+// 5 times, to avoid the possibility of the buffer size increasing without
+// bounds.
+class LowLatencyAudioBufferManager {
+ private static final String TAG = "LowLatencyAudioBufferManager";
+ // The underrun count that was valid during the previous call to maybeAdjustBufferSize(). Used to
+ // detect increases in the value.
+ private int prevUnderrunCount;
+ // The number of ticks to wait without an underrun before decreasing the buffer size.
+ private int ticksUntilNextDecrease;
+ // Indicate if we should continue to decrease the buffer size.
+ private boolean keepLoweringBufferSize;
+ // How often the buffer size was increased.
+ private int bufferIncreaseCounter;
+
+ public LowLatencyAudioBufferManager() {
+ this.prevUnderrunCount = 0;
+ this.ticksUntilNextDecrease = 10;
+ this.keepLoweringBufferSize = true;
+ this.bufferIncreaseCounter = 0;
+ }
+
+ public void maybeAdjustBufferSize(AudioTrack audioTrack) {
+ if (Build.VERSION.SDK_INT >= 26) {
+ final int underrunCount = audioTrack.getUnderrunCount();
+ if (underrunCount > prevUnderrunCount) {
+ // Don't increase buffer more than 5 times. Continuing to increase the buffer size
+ // could be harmful on low-power devices that regularly experience underruns under
+ // normal conditions.
+ if (bufferIncreaseCounter < 5) {
+ // Underrun detected, increase buffer size by 10ms.
+ final int currentBufferSize = audioTrack.getBufferSizeInFrames();
+ final int newBufferSize = currentBufferSize + audioTrack.getPlaybackRate() / 100;
+ Logging.d(TAG,
+ "Underrun detected! Increasing AudioTrack buffer size from " + currentBufferSize
+ + " to " + newBufferSize);
+ audioTrack.setBufferSizeInFrames(newBufferSize);
+ bufferIncreaseCounter++;
+ }
+ // Stop trying to lower the buffer size.
+ keepLoweringBufferSize = false;
+ prevUnderrunCount = underrunCount;
+ ticksUntilNextDecrease = 10;
+ } else if (keepLoweringBufferSize) {
+ ticksUntilNextDecrease--;
+ if (ticksUntilNextDecrease <= 0) {
+ // No underrun seen for 100 ms, try to lower the buffer size by 10ms.
+ final int bufferSize10ms = audioTrack.getPlaybackRate() / 100;
+ // Never go below a buffer size of 10ms.
+ final int currentBufferSize = audioTrack.getBufferSizeInFrames();
+ final int newBufferSize = Math.max(bufferSize10ms, currentBufferSize - bufferSize10ms);
+ if (newBufferSize != currentBufferSize) {
+ Logging.d(TAG,
+ "Lowering AudioTrack buffer size from " + currentBufferSize + " to "
+ + newBufferSize);
+ audioTrack.setBufferSizeInFrames(newBufferSize);
+ }
+ ticksUntilNextDecrease = 10;
+ }
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java
new file mode 100644
index 0000000000..06d5cd3a8e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.AudioManager;
+import androidx.annotation.Nullable;
+import java.util.Timer;
+import java.util.TimerTask;
+import org.webrtc.Logging;
+
+// TODO(magjed): Do we really need to spawn a new thread just to log volume? Can we re-use the
+// AudioTrackThread instead?
+/**
+ * Private utility class that periodically checks and logs the volume level of the audio stream that
+ * is currently controlled by the volume control. A timer triggers logs once every 30 seconds and
+ * the timer's associated thread is named "WebRtcVolumeLevelLoggerThread".
+ */
+class VolumeLogger {
+ private static final String TAG = "VolumeLogger";
+ private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread";
+ private static final int TIMER_PERIOD_IN_SECONDS = 30;
+
+ private final AudioManager audioManager;
+ private @Nullable Timer timer;
+
+ public VolumeLogger(AudioManager audioManager) {
+ this.audioManager = audioManager;
+ }
+
+ public void start() {
+ Logging.d(TAG, "start" + WebRtcAudioUtils.getThreadInfo());
+ if (timer != null) {
+ return;
+ }
+ Logging.d(TAG, "audio mode is: " + WebRtcAudioUtils.modeToString(audioManager.getMode()));
+
+ timer = new Timer(THREAD_NAME);
+ timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
+ audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
+ 0, TIMER_PERIOD_IN_SECONDS * 1000);
+ }
+
+ private class LogVolumeTask extends TimerTask {
+ private final int maxRingVolume;
+ private final int maxVoiceCallVolume;
+
+ LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) {
+ this.maxRingVolume = maxRingVolume;
+ this.maxVoiceCallVolume = maxVoiceCallVolume;
+ }
+
+ @Override
+ public void run() {
+ final int mode = audioManager.getMode();
+ if (mode == AudioManager.MODE_RINGTONE) {
+ Logging.d(TAG,
+ "STREAM_RING stream volume: " + audioManager.getStreamVolume(AudioManager.STREAM_RING)
+ + " (max=" + maxRingVolume + ")");
+ } else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
+ Logging.d(TAG,
+ "VOICE_CALL stream volume: "
+ + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL)
+ + " (max=" + maxVoiceCallVolume + ")");
+ }
+ }
+ }
+
+ public void stop() {
+ Logging.d(TAG, "stop" + WebRtcAudioUtils.getThreadInfo());
+ if (timer != null) {
+ timer.cancel();
+ timer = null;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
new file mode 100644
index 0000000000..a9ff1011b6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
@@ -0,0 +1,227 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.audiofx.AcousticEchoCanceler;
+import android.media.audiofx.AudioEffect;
+import android.media.audiofx.AudioEffect.Descriptor;
+import android.media.audiofx.NoiseSuppressor;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.UUID;
+import org.webrtc.Logging;
+
+// This class wraps control of three different platform effects. Supported
+// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS).
+// Calling enable() will active all effects that are
+// supported by the device if the corresponding `shouldEnableXXX` member is set.
+class WebRtcAudioEffects {
+ private static final boolean DEBUG = false;
+
+ private static final String TAG = "WebRtcAudioEffectsExternal";
+
+ // UUIDs for Software Audio Effects that we want to avoid using.
+ // The implementor field will be set to "The Android Open Source Project".
+ private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER =
+ UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b");
+ private static final UUID AOSP_NOISE_SUPPRESSOR =
+ UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b");
+
+ // Contains the available effect descriptors returned from the
+ // AudioEffect.getEffects() call. This result is cached to avoid doing the
+ // slow OS call multiple times.
+ private static @Nullable Descriptor[] cachedEffects;
+
+ // Contains the audio effect objects. Created in enable() and destroyed
+ // in release().
+ private @Nullable AcousticEchoCanceler aec;
+ private @Nullable NoiseSuppressor ns;
+
+ // Affects the final state given to the setEnabled() method on each effect.
+ // The default state is set to "disabled" but each effect can also be enabled
+ // by calling setAEC() and setNS().
+ private boolean shouldEnableAec;
+ private boolean shouldEnableNs;
+
+ // Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
+ // fulfilled.
+ public static boolean isAcousticEchoCancelerSupported() {
+ return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
+ }
+
+ // Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
+ public static boolean isNoiseSuppressorSupported() {
+ return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
+ }
+
+ public WebRtcAudioEffects() {
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ // Call this method to enable or disable the platform AEC. It modifies
+ // `shouldEnableAec` which is used in enable() where the actual state
+ // of the AEC effect is modified. Returns true if HW AEC is supported and
+ // false otherwise.
+ public boolean setAEC(boolean enable) {
+ Logging.d(TAG, "setAEC(" + enable + ")");
+ if (!isAcousticEchoCancelerSupported()) {
+ Logging.w(TAG, "Platform AEC is not supported");
+ shouldEnableAec = false;
+ return false;
+ }
+ if (aec != null && (enable != shouldEnableAec)) {
+ Logging.e(TAG, "Platform AEC state can't be modified while recording");
+ return false;
+ }
+ shouldEnableAec = enable;
+ return true;
+ }
+
+ // Call this method to enable or disable the platform NS. It modifies
+ // `shouldEnableNs` which is used in enable() where the actual state
+ // of the NS effect is modified. Returns true if HW NS is supported and
+ // false otherwise.
+ public boolean setNS(boolean enable) {
+ Logging.d(TAG, "setNS(" + enable + ")");
+ if (!isNoiseSuppressorSupported()) {
+ Logging.w(TAG, "Platform NS is not supported");
+ shouldEnableNs = false;
+ return false;
+ }
+ if (ns != null && (enable != shouldEnableNs)) {
+ Logging.e(TAG, "Platform NS state can't be modified while recording");
+ return false;
+ }
+ shouldEnableNs = enable;
+ return true;
+ }
+
+ public void enable(int audioSession) {
+ Logging.d(TAG, "enable(audioSession=" + audioSession + ")");
+ assertTrue(aec == null);
+ assertTrue(ns == null);
+
+ if (DEBUG) {
+ // Add logging of supported effects but filter out "VoIP effects", i.e.,
+ // AEC, AEC and NS. Avoid calling AudioEffect.queryEffects() unless the
+ // DEBUG flag is set since we have seen crashes in this API.
+ for (Descriptor d : AudioEffect.queryEffects()) {
+ if (effectTypeIsVoIP(d.type)) {
+ Logging.d(TAG,
+ "name: " + d.name + ", "
+ + "mode: " + d.connectMode + ", "
+ + "implementor: " + d.implementor + ", "
+ + "UUID: " + d.uuid);
+ }
+ }
+ }
+
+ if (isAcousticEchoCancelerSupported()) {
+ // Create an AcousticEchoCanceler and attach it to the AudioRecord on
+ // the specified audio session.
+ aec = AcousticEchoCanceler.create(audioSession);
+ if (aec != null) {
+ boolean enabled = aec.getEnabled();
+ boolean enable = shouldEnableAec && isAcousticEchoCancelerSupported();
+ if (aec.setEnabled(enable) != AudioEffect.SUCCESS) {
+ Logging.e(TAG, "Failed to set the AcousticEchoCanceler state");
+ }
+ Logging.d(TAG,
+ "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") + ", enable: "
+ + enable + ", is now: " + (aec.getEnabled() ? "enabled" : "disabled"));
+ } else {
+ Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance");
+ }
+ }
+
+ if (isNoiseSuppressorSupported()) {
+ // Create an NoiseSuppressor and attach it to the AudioRecord on the
+ // specified audio session.
+ ns = NoiseSuppressor.create(audioSession);
+ if (ns != null) {
+ boolean enabled = ns.getEnabled();
+ boolean enable = shouldEnableNs && isNoiseSuppressorSupported();
+ if (ns.setEnabled(enable) != AudioEffect.SUCCESS) {
+ Logging.e(TAG, "Failed to set the NoiseSuppressor state");
+ }
+ Logging.d(TAG,
+ "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable
+ + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled"));
+ } else {
+ Logging.e(TAG, "Failed to create the NoiseSuppressor instance");
+ }
+ }
+ }
+
+ // Releases all native audio effect resources. It is a good practice to
+ // release the effect engine when not in use as control can be returned
+ // to other applications or the native resources released.
+ public void release() {
+ Logging.d(TAG, "release");
+ if (aec != null) {
+ aec.release();
+ aec = null;
+ }
+ if (ns != null) {
+ ns.release();
+ ns = null;
+ }
+ }
+
+ // Returns true for effect types in `type` that are of "VoIP" types:
+ // Acoustic Echo Canceler (AEC) or Automatic Gain Control (AGC) or
+ // Noise Suppressor (NS). Note that, an extra check for support is needed
+ // in each comparison since some devices includes effects in the
+ // AudioEffect.Descriptor array that are actually not available on the device.
+ // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
+ // AutomaticGainControl.isAvailable() returns false.
+ private boolean effectTypeIsVoIP(UUID type) {
+ return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
+ || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ // Returns the cached copy of the audio effects array, if available, or
+ // queries the operating system for the list of effects.
+ private static @Nullable Descriptor[] getAvailableEffects() {
+ if (cachedEffects != null) {
+ return cachedEffects;
+ }
+ // The caching is best effort only - if this method is called from several
+ // threads in parallel, they may end up doing the underlying OS call
+ // multiple times. It's normally only called on one thread so there's no
+ // real need to optimize for the multiple threads case.
+ cachedEffects = AudioEffect.queryEffects();
+ return cachedEffects;
+ }
+
+ // Returns true if an effect of the specified type is available. Functionally
+ // equivalent to (NoiseSuppressor`AutomaticGainControl`...).isAvailable(), but
+ // faster as it avoids the expensive OS call to enumerate effects.
+ private static boolean isEffectTypeAvailable(UUID effectType, UUID blockListedUuid) {
+ Descriptor[] effects = getAvailableEffects();
+ if (effects == null) {
+ return false;
+ }
+ for (Descriptor d : effects) {
+ if (d.type.equals(effectType)) {
+ return !d.uuid.equals(blockListedUuid);
+ }
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
new file mode 100644
index 0000000000..f398602a28
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.os.Build;
+import org.webrtc.Logging;
+import org.webrtc.CalledByNative;
+
+/**
+ * This class contains static functions to query sample rate and input/output audio buffer sizes.
+ */
+class WebRtcAudioManager {
+ private static final String TAG = "WebRtcAudioManagerExternal";
+
+ private static final int DEFAULT_SAMPLE_RATE_HZ = 16000;
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ private static final int BITS_PER_SAMPLE = 16;
+
+ private static final int DEFAULT_FRAME_PER_BUFFER = 256;
+
+ @CalledByNative
+ static AudioManager getAudioManager(Context context) {
+ return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ }
+
+ @CalledByNative
+ static int getOutputBufferSize(
+ Context context, AudioManager audioManager, int sampleRate, int numberOfOutputChannels) {
+ return isLowLatencyOutputSupported(context)
+ ? getLowLatencyFramesPerBuffer(audioManager)
+ : getMinOutputFrameSize(sampleRate, numberOfOutputChannels);
+ }
+
+ @CalledByNative
+ static int getInputBufferSize(
+ Context context, AudioManager audioManager, int sampleRate, int numberOfInputChannels) {
+ return isLowLatencyInputSupported(context)
+ ? getLowLatencyFramesPerBuffer(audioManager)
+ : getMinInputFrameSize(sampleRate, numberOfInputChannels);
+ }
+
+ private static boolean isLowLatencyOutputSupported(Context context) {
+ return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
+ }
+
+ private static boolean isLowLatencyInputSupported(Context context) {
+ // TODO(henrika): investigate if some sort of device list is needed here
+ // as well. The NDK doc states that: "As of API level 21, lower latency
+ // audio input is supported on select devices. To take advantage of this
+ // feature, first confirm that lower latency output is available".
+ return isLowLatencyOutputSupported(context);
+ }
+
+ /**
+ * Returns the native input/output sample rate for this device's output stream.
+ */
+ @CalledByNative
+ static int getSampleRate(AudioManager audioManager) {
+ // Override this if we're running on an old emulator image which only
+ // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE.
+ if (WebRtcAudioUtils.runningOnEmulator()) {
+ Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz.");
+ return 8000;
+ }
+ // Deliver best possible estimate based on default Android AudioManager APIs.
+ final int sampleRateHz = getSampleRateForApiLevel(audioManager);
+ Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
+ return sampleRateHz;
+ }
+
+ private static int getSampleRateForApiLevel(AudioManager audioManager) {
+ String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+ return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
+ }
+
+ // Returns the native output buffer size for low-latency output streams.
+ private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
+ String framesPerBuffer =
+ audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+ return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
+ }
+
+ // Returns the minimum output buffer size for Java based audio (AudioTrack).
+ // This size can also be used for OpenSL ES implementations on devices that
+ // lacks support of low-latency output.
+ private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
+ final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+ final int channelConfig =
+ (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+ return AudioTrack.getMinBufferSize(
+ sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+ / bytesPerFrame;
+ }
+
+ // Returns the minimum input buffer size for Java based audio (AudioRecord).
+ // This size can calso be used for OpenSL ES implementations on devices that
+ // lacks support of low-latency input.
+ private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
+ final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+ final int channelConfig =
+ (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+ return AudioRecord.getMinBufferSize(
+ sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+ / bytesPerFrame;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
new file mode 100644
index 0000000000..6647e5fcbb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
@@ -0,0 +1,743 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioRecordingConfiguration;
+import android.media.AudioTimestamp;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import android.os.Process;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import java.lang.System;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+import org.webrtc.CalledByNative;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
+
+class WebRtcAudioRecord {
+ private static final String TAG = "WebRtcAudioRecordExternal";
+
+ // Requested size of each recorded buffer provided to the client.
+ private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+ // Average number of callbacks per second.
+ private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+ // We ask for a native buffer size of BUFFER_SIZE_FACTOR * (minimum required
+ // buffer size). The extra space is allocated to guard against glitches under
+ // high load.
+ private static final int BUFFER_SIZE_FACTOR = 2;
+
+ // The AudioRecordJavaThread is allowed to wait for successful call to join()
+ // but the wait times out afther this amount of time.
+ private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+ public static final int DEFAULT_AUDIO_SOURCE = AudioSource.VOICE_COMMUNICATION;
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ public static final int DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+
+ // Indicates AudioRecord has started recording audio.
+ private static final int AUDIO_RECORD_START = 0;
+
+ // Indicates AudioRecord has stopped recording audio.
+ private static final int AUDIO_RECORD_STOP = 1;
+
+ // Time to wait before checking recording status after start has been called. Tests have
+ // shown that the result can sometimes be invalid (our own status might be missing) if we check
+ // directly after start.
+ private static final int CHECK_REC_STATUS_DELAY_MS = 100;
+
+ private final Context context;
+ private final AudioManager audioManager;
+ private final int audioSource;
+ private final int audioFormat;
+
+ private long nativeAudioRecord;
+
+ private final WebRtcAudioEffects effects = new WebRtcAudioEffects();
+
+ private @Nullable ByteBuffer byteBuffer;
+
+ private @Nullable AudioRecord audioRecord;
+ private @Nullable AudioRecordThread audioThread;
+ private @Nullable AudioDeviceInfo preferredDevice;
+
+ private final ScheduledExecutorService executor;
+ private @Nullable ScheduledFuture<String> future;
+
+ private volatile boolean microphoneMute;
+ private final AtomicReference<Boolean> audioSourceMatchesRecordingSessionRef =
+ new AtomicReference<>();
+ private byte[] emptyBytes;
+
+ private final @Nullable AudioRecordErrorCallback errorCallback;
+ private final @Nullable AudioRecordStateCallback stateCallback;
+ private final @Nullable SamplesReadyCallback audioSamplesReadyCallback;
+ private final boolean isAcousticEchoCancelerSupported;
+ private final boolean isNoiseSuppressorSupported;
+
+ /**
+ * Audio thread which keeps calling ByteBuffer.read() waiting for audio
+ * to be recorded. Feeds recorded data to the native counterpart as a
+ * periodic sequence of callbacks using DataIsRecorded().
+ * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+ */
+ private class AudioRecordThread extends Thread {
+ private volatile boolean keepAlive = true;
+
+ public AudioRecordThread(String name) {
+ super(name);
+ }
+
+ @Override
+ public void run() {
+ Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+ Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
+ assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
+
+ // Audio recording has started and the client is informed about it.
+ doAudioRecordStateCallback(AUDIO_RECORD_START);
+
+ long lastTime = System.nanoTime();
+ AudioTimestamp audioTimestamp = null;
+ if (Build.VERSION.SDK_INT >= 24) {
+ audioTimestamp = new AudioTimestamp();
+ }
+ while (keepAlive) {
+ int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity());
+ if (bytesRead == byteBuffer.capacity()) {
+ if (microphoneMute) {
+ byteBuffer.clear();
+ byteBuffer.put(emptyBytes);
+ }
+ // It's possible we've been shut down during the read, and stopRecording() tried and
+ // failed to join this thread. To be a bit safer, try to avoid calling any native methods
+ // in case they've been unregistered after stopRecording() returned.
+ if (keepAlive) {
+ long captureTimeNs = 0;
+ if (Build.VERSION.SDK_INT >= 24) {
+ if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC)
+ == AudioRecord.SUCCESS) {
+ captureTimeNs = audioTimestamp.nanoTime;
+ }
+ }
+ nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs);
+ }
+ if (audioSamplesReadyCallback != null) {
+ // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily
+ // at index 0.
+ byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(),
+ byteBuffer.capacity() + byteBuffer.arrayOffset());
+ audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady(
+ new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(),
+ audioRecord.getChannelCount(), audioRecord.getSampleRate(), data));
+ }
+ } else {
+ String errorMessage = "AudioRecord.read failed: " + bytesRead;
+ Logging.e(TAG, errorMessage);
+ if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
+ keepAlive = false;
+ reportWebRtcAudioRecordError(errorMessage);
+ }
+ }
+ }
+
+ try {
+ if (audioRecord != null) {
+ audioRecord.stop();
+ doAudioRecordStateCallback(AUDIO_RECORD_STOP);
+ }
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
+ }
+ }
+
+ // Stops the inner thread loop and also calls AudioRecord.stop().
+ // Does not block the calling thread.
+ public void stopThread() {
+ Logging.d(TAG, "stopThread");
+ keepAlive = false;
+ }
+ }
+
+ @CalledByNative
+ WebRtcAudioRecord(Context context, AudioManager audioManager) {
+ this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE,
+ DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */,
+ null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(),
+ WebRtcAudioEffects.isNoiseSuppressorSupported());
+ }
+
+ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler,
+ AudioManager audioManager, int audioSource, int audioFormat,
+ @Nullable AudioRecordErrorCallback errorCallback,
+ @Nullable AudioRecordStateCallback stateCallback,
+ @Nullable SamplesReadyCallback audioSamplesReadyCallback,
+ boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) {
+ if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) {
+ throw new IllegalArgumentException("HW AEC not supported");
+ }
+ if (isNoiseSuppressorSupported && !WebRtcAudioEffects.isNoiseSuppressorSupported()) {
+ throw new IllegalArgumentException("HW NS not supported");
+ }
+ this.context = context;
+ this.executor = scheduler;
+ this.audioManager = audioManager;
+ this.audioSource = audioSource;
+ this.audioFormat = audioFormat;
+ this.errorCallback = errorCallback;
+ this.stateCallback = stateCallback;
+ this.audioSamplesReadyCallback = audioSamplesReadyCallback;
+ this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported;
+ this.isNoiseSuppressorSupported = isNoiseSuppressorSupported;
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ @CalledByNative
+ public void setNativeAudioRecord(long nativeAudioRecord) {
+ this.nativeAudioRecord = nativeAudioRecord;
+ }
+
+ @CalledByNative
+ boolean isAcousticEchoCancelerSupported() {
+ return isAcousticEchoCancelerSupported;
+ }
+
+ @CalledByNative
+ boolean isNoiseSuppressorSupported() {
+ return isNoiseSuppressorSupported;
+ }
+
+ // Returns true if a valid call to verifyAudioConfig() has been done. Should always be
+ // checked before using the returned value of isAudioSourceMatchingRecordingSession().
+ @CalledByNative
+ boolean isAudioConfigVerified() {
+ return audioSourceMatchesRecordingSessionRef.get() != null;
+ }
+
+ // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when
+ // startRecording() has been called. Hence, should preferably be called in combination with
+ // stopRecording() to ensure that it has been set properly. `isAudioConfigVerified` is
+ // enabled in WebRtcAudioRecord to ensure that the returned value is valid.
+ @CalledByNative
+ boolean isAudioSourceMatchingRecordingSession() {
+ Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get();
+ if (audioSourceMatchesRecordingSession == null) {
+ Logging.w(TAG, "Audio configuration has not yet been verified");
+ return false;
+ }
+ return audioSourceMatchesRecordingSession;
+ }
+
+ @CalledByNative
+ private boolean enableBuiltInAEC(boolean enable) {
+ Logging.d(TAG, "enableBuiltInAEC(" + enable + ")");
+ return effects.setAEC(enable);
+ }
+
+ @CalledByNative
+ private boolean enableBuiltInNS(boolean enable) {
+ Logging.d(TAG, "enableBuiltInNS(" + enable + ")");
+ return effects.setNS(enable);
+ }
+
+ @CalledByNative
+ private int initRecording(int sampleRate, int channels) {
+ Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+ if (audioRecord != null) {
+ reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording.");
+ return -1;
+ }
+ final int bytesPerFrame = channels * getBytesPerSample(audioFormat);
+ final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND;
+ byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer);
+ if (!(byteBuffer.hasArray())) {
+ reportWebRtcAudioRecordInitError("ByteBuffer does not have backing array.");
+ return -1;
+ }
+ Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+ emptyBytes = new byte[byteBuffer.capacity()];
+ // Rather than passing the ByteBuffer with every callback (requiring
+ // the potentially expensive GetDirectBufferAddress) we simply have the
+ // the native class cache the address to the memory once.
+ nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer);
+
+ // Get the minimum buffer size required for the successful creation of
+ // an AudioRecord object, in byte units.
+ // Note that this size doesn't guarantee a smooth recording under load.
+ final int channelConfig = channelCountToConfiguration(channels);
+ int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
+ if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
+ reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize);
+ return -1;
+ }
+ Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize);
+
+ // Use a larger buffer size than the minimum required when creating the
+ // AudioRecord instance to ensure smooth recording under load. It has been
+ // verified that it does not increase the actual recording latency.
+ int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
+ Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
+ try {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ // Use the AudioRecord.Builder class on Android M (23) and above.
+ // Throws IllegalArgumentException.
+ audioRecord = createAudioRecordOnMOrHigher(
+ audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ audioSourceMatchesRecordingSessionRef.set(null);
+ if (preferredDevice != null) {
+ setPreferredDevice(preferredDevice);
+ }
+ } else {
+ // Use the old AudioRecord constructor for API levels below 23.
+ // Throws UnsupportedOperationException.
+ audioRecord = createAudioRecordOnLowerThanM(
+ audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ audioSourceMatchesRecordingSessionRef.set(null);
+ }
+ } catch (IllegalArgumentException | UnsupportedOperationException e) {
+ // Report of exception message is sufficient. Example: "Cannot create AudioRecord".
+ reportWebRtcAudioRecordInitError(e.getMessage());
+ releaseAudioResources();
+ return -1;
+ }
+ if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
+ reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed.");
+ releaseAudioResources();
+ return -1;
+ }
+ effects.enable(audioRecord.getAudioSessionId());
+ logMainParameters();
+ logMainParametersExtended();
+ // Check number of active recording sessions. Should be zero but we have seen conflict cases
+ // and adding a log for it can help us figure out details about conflicting sessions.
+ final int numActiveRecordingSessions =
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (numActiveRecordingSessions != 0) {
+ // Log the conflict as a warning since initialization did in fact succeed. Most likely, the
+ // upcoming call to startRecording() will fail under these conditions.
+ Logging.w(
+ TAG, "Potential microphone conflict. Active sessions: " + numActiveRecordingSessions);
+ }
+ return framesPerBuffer;
+ }
+
+ /**
+ * Prefer a specific {@link AudioDeviceInfo} device for recording. Calling after recording starts
+ * is valid but may cause a temporary interruption if the audio routing changes.
+ */
+ @RequiresApi(Build.VERSION_CODES.M)
+ @TargetApi(Build.VERSION_CODES.M)
+ void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) {
+ Logging.d(
+ TAG, "setPreferredDevice " + (preferredDevice != null ? preferredDevice.getId() : null));
+ this.preferredDevice = preferredDevice;
+ if (audioRecord != null) {
+ if (!audioRecord.setPreferredDevice(preferredDevice)) {
+ Logging.e(TAG, "setPreferredDevice failed");
+ }
+ }
+ }
+
+ @CalledByNative
+ private boolean startRecording() {
+ Logging.d(TAG, "startRecording");
+ assertTrue(audioRecord != null);
+ assertTrue(audioThread == null);
+ try {
+ audioRecord.startRecording();
+ } catch (IllegalStateException e) {
+ reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION,
+ "AudioRecord.startRecording failed: " + e.getMessage());
+ return false;
+ }
+ if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+ reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH,
+ "AudioRecord.startRecording failed - incorrect state: "
+ + audioRecord.getRecordingState());
+ return false;
+ }
+ audioThread = new AudioRecordThread("AudioRecordJavaThread");
+ audioThread.start();
+ scheduleLogRecordingConfigurationsTask(audioRecord);
+ return true;
+ }
+
+ @CalledByNative
+ private boolean stopRecording() {
+ Logging.d(TAG, "stopRecording");
+ assertTrue(audioThread != null);
+ if (future != null) {
+ if (!future.isDone()) {
+ // Might be needed if the client calls startRecording(), stopRecording() back-to-back.
+ future.cancel(true /* mayInterruptIfRunning */);
+ }
+ future = null;
+ }
+ audioThread.stopThread();
+ if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
+ Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ }
+ audioThread = null;
+ effects.release();
+ releaseAudioResources();
+ return true;
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private static AudioRecord createAudioRecordOnMOrHigher(
+ int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) {
+ Logging.d(TAG, "createAudioRecordOnMOrHigher");
+ return new AudioRecord.Builder()
+ .setAudioSource(audioSource)
+ .setAudioFormat(new AudioFormat.Builder()
+ .setEncoding(audioFormat)
+ .setSampleRate(sampleRate)
+ .setChannelMask(channelConfig)
+ .build())
+ .setBufferSizeInBytes(bufferSizeInBytes)
+ .build();
+ }
+
+ private static AudioRecord createAudioRecordOnLowerThanM(
+ int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) {
+ Logging.d(TAG, "createAudioRecordOnLowerThanM");
+ return new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ }
+
+ private void logMainParameters() {
+ Logging.d(TAG,
+ "AudioRecord: "
+ + "session ID: " + audioRecord.getAudioSessionId() + ", "
+ + "channels: " + audioRecord.getChannelCount() + ", "
+ + "sample rate: " + audioRecord.getSampleRate());
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private void logMainParametersExtended() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ Logging.d(TAG,
+ "AudioRecord: "
+ // The frame count of the native AudioRecord buffer.
+ + "buffer size in frames: " + audioRecord.getBufferSizeInFrames());
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ // Checks the number of active recording sessions and logs the states of all active sessions.
+ // Returns number of active sessions. Note that this could occur on arbituary thread.
+ private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
+ Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher");
+ return 0;
+ }
+ if (audioRecord == null) {
+ return 0;
+ }
+
+ // Get a list of the currently active audio recording configurations of the device (can be more
+ // than one). An empty list indicates there is no recording active when queried.
+ List<AudioRecordingConfiguration> configs = audioManager.getActiveRecordingConfigurations();
+ final int numActiveRecordingSessions = configs.size();
+ Logging.d(TAG, "Number of active recording sessions: " + numActiveRecordingSessions);
+ if (numActiveRecordingSessions > 0) {
+ logActiveRecordingConfigs(audioRecord.getAudioSessionId(), configs);
+ if (verifyAudioConfig) {
+ // Run an extra check to verify that the existing audio source doing the recording (tied
+ // to the AudioRecord instance) is matching what the audio recording configuration lists
+ // as its client parameters. If these do not match, recording might work but under invalid
+ // conditions.
+ audioSourceMatchesRecordingSessionRef.set(
+ verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(),
+ audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs));
+ }
+ }
+ return numActiveRecordingSessions;
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ private int channelCountToConfiguration(int channels) {
+ return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+ }
+
+ private native void nativeCacheDirectBufferAddress(
+ long nativeAudioRecordJni, ByteBuffer byteBuffer);
+ private native void nativeDataIsRecorded(
+ long nativeAudioRecordJni, int bytes, long captureTimestampNs);
+
+ // Sets all recorded samples to zero if `mute` is true, i.e., ensures that
+ // the microphone is muted.
+ public void setMicrophoneMute(boolean mute) {
+ Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
+ microphoneMute = mute;
+ }
+
+ // Releases the native AudioRecord resources.
+ private void releaseAudioResources() {
+ Logging.d(TAG, "releaseAudioResources");
+ if (audioRecord != null) {
+ audioRecord.release();
+ audioRecord = null;
+ }
+ audioSourceMatchesRecordingSessionRef.set(null);
+ }
+
+ private void reportWebRtcAudioRecordInitError(String errorMessage) {
+ Logging.e(TAG, "Init recording error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordInitError(errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioRecordStartError(
+ AudioRecordStartErrorCode errorCode, String errorMessage) {
+ Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioRecordError(String errorMessage) {
+ Logging.e(TAG, "Run-time recording error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordError(errorMessage);
+ }
+ }
+
+ private void doAudioRecordStateCallback(int audioState) {
+ Logging.d(TAG, "doAudioRecordStateCallback: " + audioStateToString(audioState));
+ if (stateCallback != null) {
+ if (audioState == WebRtcAudioRecord.AUDIO_RECORD_START) {
+ stateCallback.onWebRtcAudioRecordStart();
+ } else if (audioState == WebRtcAudioRecord.AUDIO_RECORD_STOP) {
+ stateCallback.onWebRtcAudioRecordStop();
+ } else {
+ Logging.e(TAG, "Invalid audio state");
+ }
+ }
+ }
+
+ // Reference from Android code, AudioFormat.getBytesPerSample. BitPerSample / 8
+ // Default audio data format is PCM 16 bits per sample.
+ // Guaranteed to be supported by all devices
+ private static int getBytesPerSample(int audioFormat) {
+ switch (audioFormat) {
+ case AudioFormat.ENCODING_PCM_8BIT:
+ return 1;
+ case AudioFormat.ENCODING_PCM_16BIT:
+ case AudioFormat.ENCODING_IEC61937:
+ case AudioFormat.ENCODING_DEFAULT:
+ return 2;
+ case AudioFormat.ENCODING_PCM_FLOAT:
+ return 4;
+ case AudioFormat.ENCODING_INVALID:
+ default:
+ throw new IllegalArgumentException("Bad audio format " + audioFormat);
+ }
+ }
+
+ // Use an ExecutorService to schedule a task after a given delay where the task consists of
+ // checking (by logging) the current status of active recording sessions.
+ private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) {
+ Logging.d(TAG, "scheduleLogRecordingConfigurationsTask");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
+ return;
+ }
+
+ Callable<String> callable = () -> {
+ if (this.audioRecord == audioRecord) {
+ logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */);
+ } else {
+ Logging.d(TAG, "audio record has changed");
+ }
+ return "Scheduled task is done";
+ };
+
+ if (future != null && !future.isDone()) {
+ future.cancel(true /* mayInterruptIfRunning */);
+ }
+ // Schedule call to logRecordingConfigurations() from executor thread after fixed delay.
+ future = executor.schedule(callable, CHECK_REC_STATUS_DELAY_MS, TimeUnit.MILLISECONDS);
+ };
+
+ @TargetApi(Build.VERSION_CODES.N)
+ private static boolean logActiveRecordingConfigs(
+ int session, List<AudioRecordingConfiguration> configs) {
+ assertTrue(!configs.isEmpty());
+ final Iterator<AudioRecordingConfiguration> it = configs.iterator();
+ Logging.d(TAG, "AudioRecordingConfigurations: ");
+ while (it.hasNext()) {
+ final AudioRecordingConfiguration config = it.next();
+ StringBuilder conf = new StringBuilder();
+ // The audio source selected by the client.
+ final int audioSource = config.getClientAudioSource();
+ conf.append(" client audio source=")
+ .append(WebRtcAudioUtils.audioSourceToString(audioSource))
+ .append(", client session id=")
+ .append(config.getClientAudioSessionId())
+ // Compare with our own id (based on AudioRecord#getAudioSessionId()).
+ .append(" (")
+ .append(session)
+ .append(")")
+ .append("\n");
+ // Audio format at which audio is recorded on this Android device. Note that it may differ
+ // from the client application recording format (see getClientFormat()).
+ AudioFormat format = config.getFormat();
+ conf.append(" Device AudioFormat: ")
+ .append("channel count=")
+ .append(format.getChannelCount())
+ .append(", channel index mask=")
+ .append(format.getChannelIndexMask())
+ // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices.
+ .append(", channel mask=")
+ .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask()))
+ .append(", encoding=")
+ .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding()))
+ .append(", sample rate=")
+ .append(format.getSampleRate())
+ .append("\n");
+ // Audio format at which the client application is recording audio.
+ format = config.getClientFormat();
+ conf.append(" Client AudioFormat: ")
+ .append("channel count=")
+ .append(format.getChannelCount())
+ .append(", channel index mask=")
+ .append(format.getChannelIndexMask())
+ // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices.
+ .append(", channel mask=")
+ .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask()))
+ .append(", encoding=")
+ .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding()))
+ .append(", sample rate=")
+ .append(format.getSampleRate())
+ .append("\n");
+ // Audio input device used for this recording session.
+ final AudioDeviceInfo device = config.getAudioDevice();
+ if (device != null) {
+ assertTrue(device.isSource());
+ conf.append(" AudioDevice: ")
+ .append("type=")
+ .append(WebRtcAudioUtils.deviceTypeToString(device.getType()))
+ .append(", id=")
+ .append(device.getId());
+ }
+ Logging.d(TAG, conf.toString());
+ }
+ return true;
+ }
+
+ // Verify that the client audio configuration (device and format) matches the requested
+ // configuration (same as AudioRecord's).
+ @TargetApi(Build.VERSION_CODES.N)
+ private static boolean verifyAudioConfig(int source, int session, AudioFormat format,
+ AudioDeviceInfo device, List<AudioRecordingConfiguration> configs) {
+ assertTrue(!configs.isEmpty());
+ final Iterator<AudioRecordingConfiguration> it = configs.iterator();
+ while (it.hasNext()) {
+ final AudioRecordingConfiguration config = it.next();
+ final AudioDeviceInfo configDevice = config.getAudioDevice();
+ if (configDevice == null) {
+ continue;
+ }
+ if ((config.getClientAudioSource() == source)
+ && (config.getClientAudioSessionId() == session)
+ // Check the client format (should match the format of the AudioRecord instance).
+ && (config.getClientFormat().getEncoding() == format.getEncoding())
+ && (config.getClientFormat().getSampleRate() == format.getSampleRate())
+ && (config.getClientFormat().getChannelMask() == format.getChannelMask())
+ && (config.getClientFormat().getChannelIndexMask() == format.getChannelIndexMask())
+ // Ensure that the device format is properly configured.
+ && (config.getFormat().getEncoding() != AudioFormat.ENCODING_INVALID)
+ && (config.getFormat().getSampleRate() > 0)
+ // For the channel mask, either the position or index-based value must be valid.
+ && ((config.getFormat().getChannelMask() != AudioFormat.CHANNEL_INVALID)
+ || (config.getFormat().getChannelIndexMask() != AudioFormat.CHANNEL_INVALID))
+ && checkDeviceMatch(configDevice, device)) {
+ Logging.d(TAG, "verifyAudioConfig: PASS");
+ return true;
+ }
+ }
+ Logging.e(TAG, "verifyAudioConfig: FAILED");
+ return false;
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ // Returns true if device A parameters matches those of device B.
+ // TODO(henrika): can be improved by adding AudioDeviceInfo#getAddress() but it requires API 29.
+ private static boolean checkDeviceMatch(AudioDeviceInfo devA, AudioDeviceInfo devB) {
+ return ((devA.getId() == devB.getId() && (devA.getType() == devB.getType())));
+ }
+
+ private static String audioStateToString(int state) {
+ switch (state) {
+ case WebRtcAudioRecord.AUDIO_RECORD_START:
+ return "START";
+ case WebRtcAudioRecord.AUDIO_RECORD_STOP:
+ return "STOP";
+ default:
+ return "INVALID";
+ }
+ }
+
+ private static final AtomicInteger nextSchedulerId = new AtomicInteger(0);
+
+ static ScheduledExecutorService newDefaultScheduler() {
+ AtomicInteger nextThreadId = new AtomicInteger(0);
+ return Executors.newScheduledThreadPool(0, new ThreadFactory() {
+ /**
+ * Constructs a new {@code Thread}
+ */
+ @Override
+ public Thread newThread(Runnable r) {
+ Thread thread = Executors.defaultThreadFactory().newThread(r);
+ thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s",
+ nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement()));
+ return thread;
+ }
+ });
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
new file mode 100644
index 0000000000..2b34e34013
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -0,0 +1,585 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioAttributes;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.Build;
+import android.os.Process;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import org.webrtc.CalledByNative;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
+import org.webrtc.audio.LowLatencyAudioBufferManager;
+
+class WebRtcAudioTrack {
+ private static final String TAG = "WebRtcAudioTrackExternal";
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ private static final int BITS_PER_SAMPLE = 16;
+
+ // Requested size of each recorded buffer provided to the client.
+ private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+ // Average number of callbacks per second.
+ private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+ // The AudioTrackThread is allowed to wait for successful call to join()
+ // but the wait times out afther this amount of time.
+ private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+ // By default, WebRTC creates audio tracks with a usage attribute
+ // corresponding to voice communications, such as telephony or VoIP.
+ private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION;
+
+ // Indicates the AudioTrack has started playing audio.
+ private static final int AUDIO_TRACK_START = 0;
+
+ // Indicates the AudioTrack has stopped playing audio.
+ private static final int AUDIO_TRACK_STOP = 1;
+
+ private long nativeAudioTrack;
+ private final Context context;
+ private final AudioManager audioManager;
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+ private ByteBuffer byteBuffer;
+
+ private @Nullable final AudioAttributes audioAttributes;
+ private @Nullable AudioTrack audioTrack;
+ private @Nullable AudioTrackThread audioThread;
+ private final VolumeLogger volumeLogger;
+
+ // Samples to be played are replaced by zeros if `speakerMute` is set to true.
+ // Can be used to ensure that the speaker is fully muted.
+ private volatile boolean speakerMute;
+ private byte[] emptyBytes;
+ private boolean useLowLatency;
+ private int initialBufferSizeInFrames;
+
+ private final @Nullable AudioTrackErrorCallback errorCallback;
+ private final @Nullable AudioTrackStateCallback stateCallback;
+
+ /**
+ * Audio thread which keeps calling AudioTrack.write() to stream audio.
+ * Data is periodically acquired from the native WebRTC layer using the
+ * nativeGetPlayoutData callback function.
+ * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+ */
+ private class AudioTrackThread extends Thread {
+ private volatile boolean keepAlive = true;
+ private LowLatencyAudioBufferManager bufferManager;
+
+ public AudioTrackThread(String name) {
+ super(name);
+ bufferManager = new LowLatencyAudioBufferManager();
+ }
+
+ @Override
+ public void run() {
+ Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+ Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
+ assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
+
+ // Audio playout has started and the client is informed about it.
+ doAudioTrackStateCallback(AUDIO_TRACK_START);
+
+ // Fixed size in bytes of each 10ms block of audio data that we ask for
+ // using callbacks to the native WebRTC client.
+ final int sizeInBytes = byteBuffer.capacity();
+
+ while (keepAlive) {
+ // Get 10ms of PCM data from the native WebRTC client. Audio data is
+ // written into the common ByteBuffer using the address that was
+ // cached at construction.
+ nativeGetPlayoutData(nativeAudioTrack, sizeInBytes);
+ // Write data until all data has been written to the audio sink.
+ // Upon return, the buffer position will have been advanced to reflect
+ // the amount of data that was successfully written to the AudioTrack.
+ assertTrue(sizeInBytes <= byteBuffer.remaining());
+ if (speakerMute) {
+ byteBuffer.clear();
+ byteBuffer.put(emptyBytes);
+ byteBuffer.position(0);
+ }
+ int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
+ if (bytesWritten != sizeInBytes) {
+ Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
+ // If a write() returns a negative value, an error has occurred.
+ // Stop playing and report an error in this case.
+ if (bytesWritten < 0) {
+ keepAlive = false;
+ reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten);
+ }
+ }
+ if (useLowLatency) {
+ bufferManager.maybeAdjustBufferSize(audioTrack);
+ }
+ // The byte buffer must be rewinded since byteBuffer.position() is
+ // increased at each call to AudioTrack.write(). If we don't do this,
+ // next call to AudioTrack.write() will fail.
+ byteBuffer.rewind();
+
+ // TODO(henrika): it is possible to create a delay estimate here by
+ // counting number of written frames and subtracting the result from
+ // audioTrack.getPlaybackHeadPosition().
+ }
+ }
+
+ // Stops the inner thread loop which results in calling AudioTrack.stop().
+ // Does not block the calling thread.
+ public void stopThread() {
+ Logging.d(TAG, "stopThread");
+ keepAlive = false;
+ }
+ }
+
+ @CalledByNative
+ WebRtcAudioTrack(Context context, AudioManager audioManager) {
+ this(context, audioManager, null /* audioAttributes */, null /* errorCallback */,
+ null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */);
+ }
+
+ WebRtcAudioTrack(Context context, AudioManager audioManager,
+ @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback,
+ @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency,
+ boolean enableVolumeLogger) {
+ threadChecker.detachThread();
+ this.context = context;
+ this.audioManager = audioManager;
+ this.audioAttributes = audioAttributes;
+ this.errorCallback = errorCallback;
+ this.stateCallback = stateCallback;
+ this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null;
+ this.useLowLatency = useLowLatency;
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ @CalledByNative
+ public void setNativeAudioTrack(long nativeAudioTrack) {
+ this.nativeAudioTrack = nativeAudioTrack;
+ }
+
+ @CalledByNative
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG,
+ "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
+ + ", bufferSizeFactor=" + bufferSizeFactor + ")");
+ final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
+ byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
+ Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+ emptyBytes = new byte[byteBuffer.capacity()];
+ // Rather than passing the ByteBuffer with every callback (requiring
+ // the potentially expensive GetDirectBufferAddress) we simply have the
+ // the native class cache the address to the memory once.
+ nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer);
+
+ // Get the minimum buffer size required for the successful creation of an
+ // AudioTrack object to be created in the MODE_STREAM mode.
+ // Note that this size doesn't guarantee a smooth playback under load.
+ final int channelConfig = channelCountToConfiguration(channels);
+ final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig,
+ AudioFormat.ENCODING_PCM_16BIT)
+ * bufferSizeFactor);
+ Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes);
+ // For the streaming mode, data must be written to the audio sink in
+ // chunks of size (given by byteBuffer.capacity()) less than or equal
+ // to the total buffer size `minBufferSizeInBytes`. But, we have seen
+ // reports of "getMinBufferSize(): error querying hardware". Hence, it
+ // can happen that `minBufferSizeInBytes` contains an invalid value.
+ if (minBufferSizeInBytes < byteBuffer.capacity()) {
+ reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
+ return -1;
+ }
+
+ // Don't use low-latency mode when a bufferSizeFactor > 1 is used. When bufferSizeFactor > 1
+ // we want to use a larger buffer to prevent underruns. However, low-latency mode would
+ // decrease the buffer size, which makes the bufferSizeFactor have no effect.
+ if (bufferSizeFactor > 1.0) {
+ useLowLatency = false;
+ }
+
+ // Ensure that prevision audio session was stopped correctly before trying
+ // to create a new AudioTrack.
+ if (audioTrack != null) {
+ reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
+ return -1;
+ }
+ try {
+ // Create an AudioTrack object and initialize its associated audio buffer.
+ // The size of this buffer determines how long an AudioTrack can play
+ // before running out of data.
+ if (useLowLatency && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ // On API level 26 or higher, we can use a low latency mode.
+ audioTrack = createAudioTrackOnOreoOrHigher(
+ sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
+ } else {
+ // As we are on API level 21 or higher, it is possible to use a special AudioTrack
+ // constructor that uses AudioAttributes and AudioFormat as input. It allows us to
+ // supersede the notion of stream types for defining the behavior of audio playback,
+ // and to allow certain platforms or routing policies to use this information for more
+ // refined volume or routing decisions.
+ audioTrack = createAudioTrackBeforeOreo(
+ sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
+ }
+ } catch (IllegalArgumentException e) {
+ reportWebRtcAudioTrackInitError(e.getMessage());
+ releaseAudioResources();
+ return -1;
+ }
+
+ // It can happen that an AudioTrack is created but it was not successfully
+ // initialized upon creation. Seems to be the case e.g. when the maximum
+ // number of globally available audio tracks is exceeded.
+ if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
+ reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
+ releaseAudioResources();
+ return -1;
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ initialBufferSizeInFrames = audioTrack.getBufferSizeInFrames();
+ } else {
+ initialBufferSizeInFrames = -1;
+ }
+ logMainParameters();
+ logMainParametersExtended();
+ return minBufferSizeInBytes;
+ }
+
+ @CalledByNative
+ private boolean startPlayout() {
+ threadChecker.checkIsOnValidThread();
+ if (volumeLogger != null) {
+ volumeLogger.start();
+ }
+ Logging.d(TAG, "startPlayout");
+ assertTrue(audioTrack != null);
+ assertTrue(audioThread == null);
+
+ // Starts playing an audio track.
+ try {
+ audioTrack.play();
+ } catch (IllegalStateException e) {
+ reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
+ "AudioTrack.play failed: " + e.getMessage());
+ releaseAudioResources();
+ return false;
+ }
+ if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
+ reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
+ "AudioTrack.play failed - incorrect state :" + audioTrack.getPlayState());
+ releaseAudioResources();
+ return false;
+ }
+
+ // Create and start new high-priority thread which calls AudioTrack.write()
+ // and where we also call the native nativeGetPlayoutData() callback to
+ // request decoded audio from WebRTC.
+ audioThread = new AudioTrackThread("AudioTrackJavaThread");
+ audioThread.start();
+ return true;
+ }
+
+ @CalledByNative
+ private boolean stopPlayout() {
+ threadChecker.checkIsOnValidThread();
+ if (volumeLogger != null) {
+ volumeLogger.stop();
+ }
+ Logging.d(TAG, "stopPlayout");
+ assertTrue(audioThread != null);
+ logUnderrunCount();
+ audioThread.stopThread();
+
+ Logging.d(TAG, "Stopping the AudioTrackThread...");
+ audioThread.interrupt();
+ if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
+ Logging.e(TAG, "Join of AudioTrackThread timed out.");
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ }
+ Logging.d(TAG, "AudioTrackThread has now been stopped.");
+ audioThread = null;
+ if (audioTrack != null) {
+ Logging.d(TAG, "Calling AudioTrack.stop...");
+ try {
+ audioTrack.stop();
+ Logging.d(TAG, "AudioTrack.stop is done.");
+ doAudioTrackStateCallback(AUDIO_TRACK_STOP);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
+ }
+ }
+ releaseAudioResources();
+ return true;
+ }
+
+ // Get max possible volume index for a phone call audio stream.
+ @CalledByNative
+ private int getStreamMaxVolume() {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "getStreamMaxVolume");
+ return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ // Set current volume level for a phone call audio stream.
+ @CalledByNative
+ private boolean setStreamVolume(int volume) {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "setStreamVolume(" + volume + ")");
+ if (audioManager.isVolumeFixed()) {
+ Logging.e(TAG, "The device implements a fixed volume policy.");
+ return false;
+ }
+ audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
+ return true;
+ }
+
+ /** Get current volume level for a phone call audio stream. */
+ @CalledByNative
+ private int getStreamVolume() {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "getStreamVolume");
+ return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ @CalledByNative
+ private int GetPlayoutUnderrunCount() {
+ if (Build.VERSION.SDK_INT >= 24) {
+ if (audioTrack != null) {
+ return audioTrack.getUnderrunCount();
+ } else {
+ return -1;
+ }
+ } else {
+ return -2;
+ }
+ }
+
+ private void logMainParameters() {
+ Logging.d(TAG,
+ "AudioTrack: "
+ + "session ID: " + audioTrack.getAudioSessionId() + ", "
+ + "channels: " + audioTrack.getChannelCount() + ", "
+ + "sample rate: " + audioTrack.getSampleRate()
+ + ", "
+ // Gain (>=1.0) expressed as linear multiplier on sample values.
+ + "max gain: " + AudioTrack.getMaxVolume());
+ }
+
+ private static void logNativeOutputSampleRate(int requestedSampleRateInHz) {
+ final int nativeOutputSampleRate =
+ AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
+ Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
+ if (requestedSampleRateInHz != nativeOutputSampleRate) {
+ Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
+ }
+ }
+
+ private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes overrideAttributes) {
+ AudioAttributes.Builder attributesBuilder =
+ new AudioAttributes.Builder()
+ .setUsage(DEFAULT_USAGE)
+ .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH);
+
+ if (overrideAttributes != null) {
+ if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) {
+ attributesBuilder.setUsage(overrideAttributes.getUsage());
+ }
+ if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) {
+ attributesBuilder.setContentType(overrideAttributes.getContentType());
+ }
+
+ attributesBuilder.setFlags(overrideAttributes.getFlags());
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ attributesBuilder = applyAttributesOnQOrHigher(attributesBuilder, overrideAttributes);
+ }
+ }
+ return attributesBuilder.build();
+ }
+
+ // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
+ // It allows certain platforms or routing policies to use this information for more
+ // refined volume or routing decisions.
+ private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig,
+ int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
+ Logging.d(TAG, "createAudioTrackBeforeOreo");
+ logNativeOutputSampleRate(sampleRateInHz);
+
+ // Create an audio track where the audio usage is for VoIP and the content type is speech.
+ return new AudioTrack(getAudioAttributes(overrideAttributes),
+ new AudioFormat.Builder()
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .setSampleRate(sampleRateInHz)
+ .setChannelMask(channelConfig)
+ .build(),
+ bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
+ }
+
+ // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
+ // Use the low-latency mode to improve audio latency. Note that the low-latency mode may
+ // prevent effects (such as AEC) from working. Assuming AEC is working, the delay changes
+ // that happen in low-latency mode during the call will cause the AEC to perform worse.
+ // The behavior of the low-latency mode may be device dependent, use at your own risk.
+ @TargetApi(Build.VERSION_CODES.O)
+ private static AudioTrack createAudioTrackOnOreoOrHigher(int sampleRateInHz, int channelConfig,
+ int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
+ Logging.d(TAG, "createAudioTrackOnOreoOrHigher");
+ logNativeOutputSampleRate(sampleRateInHz);
+
+ // Create an audio track where the audio usage is for VoIP and the content type is speech.
+ return new AudioTrack.Builder()
+ .setAudioAttributes(getAudioAttributes(overrideAttributes))
+ .setAudioFormat(new AudioFormat.Builder()
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .setSampleRate(sampleRateInHz)
+ .setChannelMask(channelConfig)
+ .build())
+ .setBufferSizeInBytes(bufferSizeInBytes)
+ .setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY)
+ .setTransferMode(AudioTrack.MODE_STREAM)
+ .setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE)
+ .build();
+ }
+
+ @TargetApi(Build.VERSION_CODES.Q)
+ private static AudioAttributes.Builder applyAttributesOnQOrHigher(
+ AudioAttributes.Builder builder, AudioAttributes overrideAttributes) {
+ return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy());
+ }
+
+ private void logBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ Logging.d(TAG,
+ "AudioTrack: "
+ // The effective size of the AudioTrack buffer that the app writes to.
+ + "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
+ }
+ }
+
+ @CalledByNative
+ private int getBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ return audioTrack.getBufferSizeInFrames();
+ }
+ return -1;
+ }
+
+ @CalledByNative
+ private int getInitialBufferSizeInFrames() {
+ return initialBufferSizeInFrames;
+ }
+
+ private void logBufferCapacityInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ Logging.d(TAG,
+ "AudioTrack: "
+ // Maximum size of the AudioTrack buffer in frames.
+ + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames());
+ }
+ }
+
+ private void logMainParametersExtended() {
+ logBufferSizeInFrames();
+ logBufferCapacityInFrames();
+ }
+
+ // Prints the number of underrun occurrences in the application-level write
+ // buffer since the AudioTrack was created. An underrun occurs if the app does
+ // not write audio data quickly enough, causing the buffer to underflow and a
+ // potential audio glitch.
+ // TODO(henrika): keep track of this value in the field and possibly add new
+ // UMA stat if needed.
+ private void logUnderrunCount() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
+ }
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ private int channelCountToConfiguration(int channels) {
+ return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+ }
+
+ private static native void nativeCacheDirectBufferAddress(
+ long nativeAudioTrackJni, ByteBuffer byteBuffer);
+ private static native void nativeGetPlayoutData(long nativeAudioTrackJni, int bytes);
+
+ // Sets all samples to be played out to zero if `mute` is true, i.e.,
+ // ensures that the speaker is muted.
+ public void setSpeakerMute(boolean mute) {
+ Logging.w(TAG, "setSpeakerMute(" + mute + ")");
+ speakerMute = mute;
+ }
+
+ // Releases the native AudioTrack resources.
+ private void releaseAudioResources() {
+ Logging.d(TAG, "releaseAudioResources");
+ if (audioTrack != null) {
+ audioTrack.release();
+ audioTrack = null;
+ }
+ }
+
+ private void reportWebRtcAudioTrackInitError(String errorMessage) {
+ Logging.e(TAG, "Init playout error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackInitError(errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioTrackStartError(
+ AudioTrackStartErrorCode errorCode, String errorMessage) {
+ Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioTrackError(String errorMessage) {
+ Logging.e(TAG, "Run-time playback error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackError(errorMessage);
+ }
+ }
+
+ private void doAudioTrackStateCallback(int audioState) {
+ Logging.d(TAG, "doAudioTrackStateCallback: " + audioState);
+ if (stateCallback != null) {
+ if (audioState == WebRtcAudioTrack.AUDIO_TRACK_START) {
+ stateCallback.onWebRtcAudioTrackStart();
+ } else if (audioState == WebRtcAudioTrack.AUDIO_TRACK_STOP) {
+ stateCallback.onWebRtcAudioTrackStop();
+ } else {
+ Logging.e(TAG, "Invalid audio state");
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
new file mode 100644
index 0000000000..7b4b809ab1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
@@ -0,0 +1,308 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import static android.media.AudioManager.MODE_IN_CALL;
+import static android.media.AudioManager.MODE_IN_COMMUNICATION;
+import static android.media.AudioManager.MODE_NORMAL;
+import static android.media.AudioManager.MODE_RINGTONE;
+
+import android.annotation.SuppressLint;
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import java.lang.Thread;
+import java.util.Arrays;
+import org.webrtc.Logging;
+
+final class WebRtcAudioUtils {
+ private static final String TAG = "WebRtcAudioUtilsExternal";
+
+ // Helper method for building a string of thread information.
+ public static String getThreadInfo() {
+ return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ + "]";
+ }
+
+ // Returns true if we're running on emulator.
+ public static boolean runningOnEmulator() {
+ return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
+ }
+
+ // Information about the current build, taken from system properties.
+ static void logDeviceInfo(String tag) {
+ Logging.d(tag,
+ "Android SDK: " + Build.VERSION.SDK_INT + ", "
+ + "Release: " + Build.VERSION.RELEASE + ", "
+ + "Brand: " + Build.BRAND + ", "
+ + "Device: " + Build.DEVICE + ", "
+ + "Id: " + Build.ID + ", "
+ + "Hardware: " + Build.HARDWARE + ", "
+ + "Manufacturer: " + Build.MANUFACTURER + ", "
+ + "Model: " + Build.MODEL + ", "
+ + "Product: " + Build.PRODUCT);
+ }
+
+ // Logs information about the current audio state. The idea is to call this
+ // method when errors are detected to log under what conditions the error
+ // occurred. Hopefully it will provide clues to what might be the root cause.
+ static void logAudioState(String tag, Context context, AudioManager audioManager) {
+ logDeviceInfo(tag);
+ logAudioStateBasic(tag, context, audioManager);
+ logAudioStateVolume(tag, audioManager);
+ logAudioDeviceInfo(tag, audioManager);
+ }
+
+ // Converts AudioDeviceInfo types to local string representation.
+ static String deviceTypeToString(int type) {
+ switch (type) {
+ case AudioDeviceInfo.TYPE_UNKNOWN:
+ return "TYPE_UNKNOWN";
+ case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
+ return "TYPE_BUILTIN_EARPIECE";
+ case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+ return "TYPE_BUILTIN_SPEAKER";
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ return "TYPE_WIRED_HEADSET";
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ return "TYPE_WIRED_HEADPHONES";
+ case AudioDeviceInfo.TYPE_LINE_ANALOG:
+ return "TYPE_LINE_ANALOG";
+ case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+ return "TYPE_LINE_DIGITAL";
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ return "TYPE_BLUETOOTH_SCO";
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ return "TYPE_BLUETOOTH_A2DP";
+ case AudioDeviceInfo.TYPE_HDMI:
+ return "TYPE_HDMI";
+ case AudioDeviceInfo.TYPE_HDMI_ARC:
+ return "TYPE_HDMI_ARC";
+ case AudioDeviceInfo.TYPE_USB_DEVICE:
+ return "TYPE_USB_DEVICE";
+ case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+ return "TYPE_USB_ACCESSORY";
+ case AudioDeviceInfo.TYPE_DOCK:
+ return "TYPE_DOCK";
+ case AudioDeviceInfo.TYPE_FM:
+ return "TYPE_FM";
+ case AudioDeviceInfo.TYPE_BUILTIN_MIC:
+ return "TYPE_BUILTIN_MIC";
+ case AudioDeviceInfo.TYPE_FM_TUNER:
+ return "TYPE_FM_TUNER";
+ case AudioDeviceInfo.TYPE_TV_TUNER:
+ return "TYPE_TV_TUNER";
+ case AudioDeviceInfo.TYPE_TELEPHONY:
+ return "TYPE_TELEPHONY";
+ case AudioDeviceInfo.TYPE_AUX_LINE:
+ return "TYPE_AUX_LINE";
+ case AudioDeviceInfo.TYPE_IP:
+ return "TYPE_IP";
+ case AudioDeviceInfo.TYPE_BUS:
+ return "TYPE_BUS";
+ case AudioDeviceInfo.TYPE_USB_HEADSET:
+ return "TYPE_USB_HEADSET";
+ default:
+ return "TYPE_UNKNOWN";
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ public static String audioSourceToString(int source) {
+ // AudioSource.UNPROCESSED requires API level 29. Use local define instead.
+ final int VOICE_PERFORMANCE = 10;
+ switch (source) {
+ case AudioSource.DEFAULT:
+ return "DEFAULT";
+ case AudioSource.MIC:
+ return "MIC";
+ case AudioSource.VOICE_UPLINK:
+ return "VOICE_UPLINK";
+ case AudioSource.VOICE_DOWNLINK:
+ return "VOICE_DOWNLINK";
+ case AudioSource.VOICE_CALL:
+ return "VOICE_CALL";
+ case AudioSource.CAMCORDER:
+ return "CAMCORDER";
+ case AudioSource.VOICE_RECOGNITION:
+ return "VOICE_RECOGNITION";
+ case AudioSource.VOICE_COMMUNICATION:
+ return "VOICE_COMMUNICATION";
+ case AudioSource.UNPROCESSED:
+ return "UNPROCESSED";
+ case VOICE_PERFORMANCE:
+ return "VOICE_PERFORMANCE";
+ default:
+ return "INVALID";
+ }
+ }
+
+ public static String channelMaskToString(int mask) {
+ // For input or AudioRecord, the mask should be AudioFormat#CHANNEL_IN_MONO or
+ // AudioFormat#CHANNEL_IN_STEREO. AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all
+ // devices.
+ switch (mask) {
+ case AudioFormat.CHANNEL_IN_STEREO:
+ return "IN_STEREO";
+ case AudioFormat.CHANNEL_IN_MONO:
+ return "IN_MONO";
+ default:
+ return "INVALID";
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ public static String audioEncodingToString(int enc) {
+ switch (enc) {
+ case AudioFormat.ENCODING_INVALID:
+ return "INVALID";
+ case AudioFormat.ENCODING_PCM_16BIT:
+ return "PCM_16BIT";
+ case AudioFormat.ENCODING_PCM_8BIT:
+ return "PCM_8BIT";
+ case AudioFormat.ENCODING_PCM_FLOAT:
+ return "PCM_FLOAT";
+ case AudioFormat.ENCODING_AC3:
+ return "AC3";
+ case AudioFormat.ENCODING_E_AC3:
+ return "AC3";
+ case AudioFormat.ENCODING_DTS:
+ return "DTS";
+ case AudioFormat.ENCODING_DTS_HD:
+ return "DTS_HD";
+ case AudioFormat.ENCODING_MP3:
+ return "MP3";
+ default:
+ return "Invalid encoding: " + enc;
+ }
+ }
+
+ // Reports basic audio statistics.
+ private static void logAudioStateBasic(String tag, Context context, AudioManager audioManager) {
+ Logging.d(tag,
+ "Audio State: "
+ + "audio mode: " + modeToString(audioManager.getMode()) + ", "
+ + "has mic: " + hasMicrophone(context) + ", "
+ + "mic muted: " + audioManager.isMicrophoneMute() + ", "
+ + "music active: " + audioManager.isMusicActive() + ", "
+ + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", "
+ + "BT SCO: " + audioManager.isBluetoothScoOn());
+ }
+
+ // Adds volume information for all possible stream types.
+ private static void logAudioStateVolume(String tag, AudioManager audioManager) {
+ final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
+ AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION,
+ AudioManager.STREAM_SYSTEM};
+ Logging.d(tag, "Audio State: ");
+ // Some devices may not have volume controls and might use a fixed volume.
+ boolean fixedVolume = audioManager.isVolumeFixed();
+ Logging.d(tag, " fixed volume=" + fixedVolume);
+ if (!fixedVolume) {
+ for (int stream : streams) {
+ StringBuilder info = new StringBuilder();
+ info.append(" " + streamTypeToString(stream) + ": ");
+ info.append("volume=").append(audioManager.getStreamVolume(stream));
+ info.append(", max=").append(audioManager.getStreamMaxVolume(stream));
+ logIsStreamMute(tag, audioManager, stream, info);
+ Logging.d(tag, info.toString());
+ }
+ }
+ }
+
+ private static void logIsStreamMute(
+ String tag, AudioManager audioManager, int stream, StringBuilder info) {
+ if (Build.VERSION.SDK_INT >= 23) {
+ info.append(", muted=").append(audioManager.isStreamMute(stream));
+ }
+ }
+
+ // Moz linting complains even though AudioManager.GET_DEVICES_ALL is
+ // listed in the docs here:
+ // https://developer.android.com/reference/android/media/AudioManager#GET_DEVICES_ALL
+ @SuppressLint("WrongConstant")
+ private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
+ if (Build.VERSION.SDK_INT < 23) {
+ return;
+ }
+ final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ if (devices.length == 0) {
+ return;
+ }
+ Logging.d(tag, "Audio Devices: ");
+ for (AudioDeviceInfo device : devices) {
+ StringBuilder info = new StringBuilder();
+ info.append(" ").append(deviceTypeToString(device.getType()));
+ info.append(device.isSource() ? "(in): " : "(out): ");
+ // An empty array indicates that the device supports arbitrary channel counts.
+ if (device.getChannelCounts().length > 0) {
+ info.append("channels=").append(Arrays.toString(device.getChannelCounts()));
+ info.append(", ");
+ }
+ if (device.getEncodings().length > 0) {
+ // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4.
+ info.append("encodings=").append(Arrays.toString(device.getEncodings()));
+ info.append(", ");
+ }
+ if (device.getSampleRates().length > 0) {
+ info.append("sample rates=").append(Arrays.toString(device.getSampleRates()));
+ info.append(", ");
+ }
+ info.append("id=").append(device.getId());
+ Logging.d(tag, info.toString());
+ }
+ }
+
+ // Converts media.AudioManager modes into local string representation.
+ static String modeToString(int mode) {
+ switch (mode) {
+ case MODE_IN_CALL:
+ return "MODE_IN_CALL";
+ case MODE_IN_COMMUNICATION:
+ return "MODE_IN_COMMUNICATION";
+ case MODE_NORMAL:
+ return "MODE_NORMAL";
+ case MODE_RINGTONE:
+ return "MODE_RINGTONE";
+ default:
+ return "MODE_INVALID";
+ }
+ }
+
+ private static String streamTypeToString(int stream) {
+ switch (stream) {
+ case AudioManager.STREAM_VOICE_CALL:
+ return "STREAM_VOICE_CALL";
+ case AudioManager.STREAM_MUSIC:
+ return "STREAM_MUSIC";
+ case AudioManager.STREAM_RING:
+ return "STREAM_RING";
+ case AudioManager.STREAM_ALARM:
+ return "STREAM_ALARM";
+ case AudioManager.STREAM_NOTIFICATION:
+ return "STREAM_NOTIFICATION";
+ case AudioManager.STREAM_SYSTEM:
+ return "STREAM_SYSTEM";
+ default:
+ return "STREAM_INVALID";
+ }
+ }
+
+ // Returns true if the device can record audio via a microphone.
+ private static boolean hasMicrophone(Context context) {
+ return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/jni/DEPS b/third_party/libwebrtc/sdk/android/src/jni/DEPS
new file mode 100644
index 0000000000..ae33fa6830
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/DEPS
@@ -0,0 +1,15 @@
+include_rules = [
+ "+third_party/libyuv",
+ "+call/callfactoryinterface.h",
+ "+common_video",
+ "+logging/rtc_event_log/rtc_event_log_factory.h",
+ "+media/base",
+ "+media/engine",
+ "+modules/audio_device/include/audio_device.h",
+ "+modules/audio_processing/include/audio_processing.h",
+ "+modules/include",
+ "+modules/utility/include/jvm_android.h",
+ "+modules/video_coding",
+ "+pc",
+ "+system_wrappers/include",
+]
diff --git a/third_party/libwebrtc/sdk/android/src/jni/OWNERS b/third_party/libwebrtc/sdk/android/src/jni/OWNERS
new file mode 100644
index 0000000000..557373424b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/OWNERS
@@ -0,0 +1,4 @@
+per-file androidhistogram.cc=xalep@webrtc.org
+per-file androidmetrics.cc=xalep@webrtc.org
+per-file androidvideotracksource.*=xalep@webrtc.org
+per-file androidvideotracksource.cc=xalep@webrtc.org
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc b/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc
new file mode 100644
index 0000000000..498f143743
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <map>
+#include <memory>
+
+#include "sdk/android/generated_base_jni/Histogram_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "system_wrappers/include/metrics.h"
+
+// Enables collection of native histograms and creating them.
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_Histogram_CreateCounts(JNIEnv* jni,
+ const JavaParamRef<jstring>& j_name,
+ jint min,
+ jint max,
+ jint buckets) {
+ std::string name = JavaToStdString(jni, j_name);
+ return jlongFromPointer(
+ metrics::HistogramFactoryGetCounts(name, min, max, buckets));
+}
+
+static jlong JNI_Histogram_CreateEnumeration(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_name,
+ jint max) {
+ std::string name = JavaToStdString(jni, j_name);
+ return jlongFromPointer(metrics::HistogramFactoryGetEnumeration(name, max));
+}
+
+static void JNI_Histogram_AddSample(JNIEnv* jni,
+ jlong histogram,
+ jint sample) {
+ if (histogram) {
+ HistogramAdd(reinterpret_cast<metrics::Histogram*>(histogram), sample);
+ }
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc b/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc
new file mode 100644
index 0000000000..01398cc77f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <map>
+#include <memory>
+
+#include "rtc_base/string_utils.h"
+#include "sdk/android/generated_metrics_jni/Metrics_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "system_wrappers/include/metrics.h"
+
+// Enables collection of native histograms and creating them.
+namespace webrtc {
+namespace jni {
+
+static void JNI_Metrics_Enable(JNIEnv* jni) {
+ metrics::Enable();
+}
+
+// Gets and clears native histograms.
+static ScopedJavaLocalRef<jobject> JNI_Metrics_GetAndReset(JNIEnv* jni) {
+ ScopedJavaLocalRef<jobject> j_metrics = Java_Metrics_Constructor(jni);
+
+ std::map<std::string, std::unique_ptr<metrics::SampleInfo>,
+ rtc::AbslStringViewCmp>
+ histograms;
+ metrics::GetAndReset(&histograms);
+ for (const auto& kv : histograms) {
+ // Create and add samples to `HistogramInfo`.
+ ScopedJavaLocalRef<jobject> j_info = Java_HistogramInfo_Constructor(
+ jni, kv.second->min, kv.second->max,
+ static_cast<int>(kv.second->bucket_count));
+ for (const auto& sample : kv.second->samples) {
+ Java_HistogramInfo_addSample(jni, j_info, sample.first, sample.second);
+ }
+ // Add `HistogramInfo` to `Metrics`.
+ ScopedJavaLocalRef<jstring> j_name = NativeToJavaString(jni, kv.first);
+ Java_Metrics_add(jni, j_metrics, j_name, j_info);
+ }
+ CHECK_EXCEPTION(jni);
+ return j_metrics;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc
new file mode 100644
index 0000000000..539d41487e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc
@@ -0,0 +1,686 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/android_network_monitor.h"
+
+#include <dlfcn.h>
+
+#include "absl/strings/string_view.h"
+#ifndef RTLD_NOLOAD
+// This was added in Lollipop to dlfcn.h
+#define RTLD_NOLOAD 4
+#endif
+
+#include "api/sequence_checker.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "sdk/android/generated_base_jni/NetworkChangeDetector_jni.h"
+#include "sdk/android/generated_base_jni/NetworkMonitor_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+const char* NetworkTypeToString(NetworkType type) {
+ switch (type) {
+ case NETWORK_UNKNOWN:
+ return "UNKNOWN";
+ case NETWORK_ETHERNET:
+ return "ETHERNET";
+ case NETWORK_WIFI:
+ return "WIFI";
+ case NETWORK_5G:
+ return "5G";
+ case NETWORK_4G:
+ return "4G";
+ case NETWORK_3G:
+ return "3G";
+ case NETWORK_2G:
+ return "2G";
+ case NETWORK_UNKNOWN_CELLULAR:
+ return "UNKNOWN_CELLULAR";
+ case NETWORK_BLUETOOTH:
+ return "BLUETOOTH";
+ case NETWORK_VPN:
+ return "VPN";
+ case NETWORK_NONE:
+ return "NONE";
+ }
+}
+
+} // namespace
+
+enum AndroidSdkVersion {
+ SDK_VERSION_LOLLIPOP = 21,
+ SDK_VERSION_MARSHMALLOW = 23
+};
+
+static NetworkType GetNetworkTypeFromJava(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_network_type) {
+ std::string enum_name = GetJavaEnumName(jni, j_network_type);
+ if (enum_name == "CONNECTION_UNKNOWN") {
+ return NetworkType::NETWORK_UNKNOWN;
+ }
+ if (enum_name == "CONNECTION_ETHERNET") {
+ return NetworkType::NETWORK_ETHERNET;
+ }
+ if (enum_name == "CONNECTION_WIFI") {
+ return NetworkType::NETWORK_WIFI;
+ }
+ if (enum_name == "CONNECTION_5G") {
+ return NetworkType::NETWORK_5G;
+ }
+ if (enum_name == "CONNECTION_4G") {
+ return NetworkType::NETWORK_4G;
+ }
+ if (enum_name == "CONNECTION_3G") {
+ return NetworkType::NETWORK_3G;
+ }
+ if (enum_name == "CONNECTION_2G") {
+ return NetworkType::NETWORK_2G;
+ }
+ if (enum_name == "CONNECTION_UNKNOWN_CELLULAR") {
+ return NetworkType::NETWORK_UNKNOWN_CELLULAR;
+ }
+ if (enum_name == "CONNECTION_BLUETOOTH") {
+ return NetworkType::NETWORK_BLUETOOTH;
+ }
+ if (enum_name == "CONNECTION_VPN") {
+ return NetworkType::NETWORK_VPN;
+ }
+ if (enum_name == "CONNECTION_NONE") {
+ return NetworkType::NETWORK_NONE;
+ }
+ RTC_DCHECK_NOTREACHED();
+ return NetworkType::NETWORK_UNKNOWN;
+}
+
+static rtc::AdapterType AdapterTypeFromNetworkType(
+ NetworkType network_type,
+ bool surface_cellular_types) {
+ switch (network_type) {
+ case NETWORK_UNKNOWN:
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ case NETWORK_ETHERNET:
+ return rtc::ADAPTER_TYPE_ETHERNET;
+ case NETWORK_WIFI:
+ return rtc::ADAPTER_TYPE_WIFI;
+ case NETWORK_5G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_5G
+ : rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_4G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_4G
+ : rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_3G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_3G
+ : rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_2G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_2G
+ : rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_UNKNOWN_CELLULAR:
+ return rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_VPN:
+ return rtc::ADAPTER_TYPE_VPN;
+ case NETWORK_BLUETOOTH:
+ // There is no corresponding mapping for bluetooth networks.
+ // Map it to UNKNOWN for now.
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ case NETWORK_NONE:
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ }
+
+ RTC_DCHECK_NOTREACHED() << "Invalid network type " << network_type;
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+}
+
+static rtc::IPAddress JavaToNativeIpAddress(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ip_address) {
+ std::vector<int8_t> address =
+ JavaToNativeByteArray(jni, Java_IPAddress_getAddress(jni, j_ip_address));
+ size_t address_length = address.size();
+ if (address_length == 4) {
+ // IP4
+ struct in_addr ip4_addr;
+ memcpy(&ip4_addr.s_addr, address.data(), 4);
+ return rtc::IPAddress(ip4_addr);
+ }
+ // IP6
+ RTC_CHECK(address_length == 16);
+ struct in6_addr ip6_addr;
+ memcpy(ip6_addr.s6_addr, address.data(), address_length);
+ return rtc::IPAddress(ip6_addr);
+}
+
+static NetworkInformation GetNetworkInformationFromJava(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_network_info) {
+ NetworkInformation network_info;
+ network_info.interface_name = JavaToStdString(
+ jni, Java_NetworkInformation_getName(jni, j_network_info));
+ network_info.handle = static_cast<NetworkHandle>(
+ Java_NetworkInformation_getHandle(jni, j_network_info));
+ network_info.type = GetNetworkTypeFromJava(
+ jni, Java_NetworkInformation_getConnectionType(jni, j_network_info));
+ network_info.underlying_type_for_vpn = GetNetworkTypeFromJava(
+ jni, Java_NetworkInformation_getUnderlyingConnectionTypeForVpn(
+ jni, j_network_info));
+ ScopedJavaLocalRef<jobjectArray> j_ip_addresses =
+ Java_NetworkInformation_getIpAddresses(jni, j_network_info);
+ network_info.ip_addresses = JavaToNativeVector<rtc::IPAddress>(
+ jni, j_ip_addresses, &JavaToNativeIpAddress);
+ return network_info;
+}
+
+static bool AddressMatch(const rtc::IPAddress& ip1, const rtc::IPAddress& ip2) {
+ if (ip1.family() != ip2.family()) {
+ return false;
+ }
+ if (ip1.family() == AF_INET) {
+ return ip1.ipv4_address().s_addr == ip2.ipv4_address().s_addr;
+ }
+ if (ip1.family() == AF_INET6) {
+ // The last 64-bits of an ipv6 address are temporary address and it could
+ // change over time. So we only compare the first 64-bits.
+ return memcmp(ip1.ipv6_address().s6_addr, ip2.ipv6_address().s6_addr,
+ sizeof(in6_addr) / 2) == 0;
+ }
+ return false;
+}
+
+NetworkInformation::NetworkInformation() = default;
+
+NetworkInformation::NetworkInformation(const NetworkInformation&) = default;
+
+NetworkInformation::NetworkInformation(NetworkInformation&&) = default;
+
+NetworkInformation::~NetworkInformation() = default;
+
+NetworkInformation& NetworkInformation::operator=(const NetworkInformation&) =
+ default;
+
+NetworkInformation& NetworkInformation::operator=(NetworkInformation&&) =
+ default;
+
+std::string NetworkInformation::ToString() const {
+ rtc::StringBuilder ss;
+ ss << "NetInfo[name " << interface_name << "; handle " << handle << "; type "
+ << type;
+ if (type == NETWORK_VPN) {
+ ss << "; underlying_type_for_vpn " << underlying_type_for_vpn;
+ }
+ ss << "]";
+ return ss.Release();
+}
+
+AndroidNetworkMonitor::AndroidNetworkMonitor(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_application_context,
+ const FieldTrialsView& field_trials)
+ : android_sdk_int_(Java_NetworkMonitor_androidSdkInt(env)),
+ j_application_context_(env, j_application_context),
+ j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)),
+ network_thread_(rtc::Thread::Current()),
+ field_trials_(field_trials) {}
+
+AndroidNetworkMonitor::~AndroidNetworkMonitor() {
+ RTC_DCHECK(!started_);
+}
+
+void AndroidNetworkMonitor::Start() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (started_) {
+ return;
+ }
+ reset();
+ started_ = true;
+ surface_cellular_types_ =
+ field_trials_.IsEnabled("WebRTC-SurfaceCellularTypes");
+ find_network_handle_without_ipv6_temporary_part_ = field_trials_.IsEnabled(
+ "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart");
+ bind_using_ifname_ =
+ !field_trials_.IsDisabled("WebRTC-BindUsingInterfaceName");
+ disable_is_adapter_available_ = field_trials_.IsDisabled(
+ "WebRTC-AndroidNetworkMonitor-IsAdapterAvailable");
+
+ // This pointer is also accessed by the methods called from java threads.
+ // Assigning it here is safe, because the java monitor is in a stopped state,
+ // and will not make any callbacks.
+ safety_flag_ = PendingTaskSafetyFlag::Create();
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_NetworkMonitor_startMonitoring(
+ env, j_network_monitor_, j_application_context_, jlongFromPointer(this),
+ NativeToJavaString(
+ env, field_trials_.Lookup("WebRTC-NetworkMonitorAutoDetect")));
+}
+
+void AndroidNetworkMonitor::reset() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ network_handle_by_address_.clear();
+ network_handle_by_if_name_.clear();
+ network_info_by_handle_.clear();
+ network_preference_by_adapter_type_.clear();
+}
+
+void AndroidNetworkMonitor::Stop() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!started_) {
+ return;
+ }
+ started_ = false;
+ find_network_handle_without_ipv6_temporary_part_ = false;
+
+ // Cancel any pending tasks. We should not call
+ // `InvokeNetworksChangedCallback()` when the monitor is stopped.
+ safety_flag_->SetNotAlive();
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_NetworkMonitor_stopMonitoring(env, j_network_monitor_,
+ jlongFromPointer(this));
+
+ reset();
+}
+
+// The implementation is largely taken from UDPSocketPosix::BindToNetwork in
+// https://cs.chromium.org/chromium/src/net/udp/udp_socket_posix.cc
+rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork(
+ int socket_fd,
+ const rtc::IPAddress& address,
+ absl::string_view if_name) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ // Android prior to Lollipop didn't have support for binding sockets to
+ // networks. This may also occur if there is no connectivity manager
+ // service.
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ const bool network_binding_supported =
+ Java_NetworkMonitor_networkBindingSupported(env, j_network_monitor_);
+ if (!network_binding_supported) {
+ RTC_LOG(LS_WARNING)
+ << "BindSocketToNetwork is not supported on this platform "
+ "(Android SDK: "
+ << android_sdk_int_ << ")";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+
+ absl::optional<NetworkHandle> network_handle =
+ FindNetworkHandleFromAddressOrName(address, if_name);
+ if (!network_handle) {
+ RTC_LOG(LS_WARNING)
+ << "BindSocketToNetwork unable to find network handle for"
+ << " addr: " << address.ToSensitiveString() << " ifname: " << if_name;
+ return rtc::NetworkBindingResult::ADDRESS_NOT_FOUND;
+ }
+
+ if (*network_handle == 0 /* NETWORK_UNSPECIFIED */) {
+ RTC_LOG(LS_WARNING) << "BindSocketToNetwork 0 network handle for"
+ << " addr: " << address.ToSensitiveString()
+ << " ifname: " << if_name;
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+
+ int rv = 0;
+ if (android_sdk_int_ >= SDK_VERSION_MARSHMALLOW) {
+ // See declaration of android_setsocknetwork() here:
+ // http://androidxref.com/6.0.0_r1/xref/development/ndk/platforms/android-M/include/android/multinetwork.h#65
+ // Function cannot be called directly as it will cause app to fail to load
+ // on pre-marshmallow devices.
+ typedef int (*MarshmallowSetNetworkForSocket)(NetworkHandle net,
+ int socket);
+ static MarshmallowSetNetworkForSocket marshmallowSetNetworkForSocket;
+ // This is not thread-safe, but we are running this only on the worker
+ // thread.
+ if (!marshmallowSetNetworkForSocket) {
+ const std::string android_native_lib_path = "libandroid.so";
+ void* lib = dlopen(android_native_lib_path.c_str(), RTLD_NOW);
+ if (lib == nullptr) {
+ RTC_LOG(LS_ERROR) << "Library " << android_native_lib_path
+ << " not found!";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+ marshmallowSetNetworkForSocket =
+ reinterpret_cast<MarshmallowSetNetworkForSocket>(
+ dlsym(lib, "android_setsocknetwork"));
+ }
+ if (!marshmallowSetNetworkForSocket) {
+ RTC_LOG(LS_ERROR) << "Symbol marshmallowSetNetworkForSocket is not found";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+ rv = marshmallowSetNetworkForSocket(*network_handle, socket_fd);
+ } else {
+ // NOTE: This relies on Android implementation details, but it won't
+ // change because Lollipop is already released.
+ typedef int (*LollipopSetNetworkForSocket)(unsigned net, int socket);
+ static LollipopSetNetworkForSocket lollipopSetNetworkForSocket;
+ // This is not threadsafe, but we are running this only on the worker
+ // thread.
+ if (!lollipopSetNetworkForSocket) {
+ // Android's netd client library should always be loaded in our address
+ // space as it shims libc functions like connect().
+ const std::string net_library_path = "libnetd_client.so";
+ // Use RTLD_NOW to match Android's prior loading of the library:
+ // http://androidxref.com/6.0.0_r5/xref/bionic/libc/bionic/NetdClient.cpp#37
+ // Use RTLD_NOLOAD to assert that the library is already loaded and
+ // avoid doing any disk IO.
+ void* lib = dlopen(net_library_path.c_str(), RTLD_NOW | RTLD_NOLOAD);
+ if (lib == nullptr) {
+ RTC_LOG(LS_ERROR) << "Library " << net_library_path << " not found!";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+ lollipopSetNetworkForSocket =
+ reinterpret_cast<LollipopSetNetworkForSocket>(
+ dlsym(lib, "setNetworkForSocket"));
+ }
+ if (!lollipopSetNetworkForSocket) {
+ RTC_LOG(LS_ERROR) << "Symbol lollipopSetNetworkForSocket is not found ";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+ rv = lollipopSetNetworkForSocket(*network_handle, socket_fd);
+ }
+
+ // If `network` has since disconnected, `rv` will be ENONET. Surface this as
+ // ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back
+ // the less descriptive ERR_FAILED.
+ if (rv == 0) {
+ RTC_LOG(LS_VERBOSE) << "BindSocketToNetwork bound network handle for"
+ << " addr: " << address.ToSensitiveString()
+ << " ifname: " << if_name;
+ return rtc::NetworkBindingResult::SUCCESS;
+ }
+
+ RTC_LOG(LS_WARNING) << "BindSocketToNetwork got error: " << rv
+ << " addr: " << address.ToSensitiveString()
+ << " ifname: " << if_name;
+ if (rv == ENONET) {
+ return rtc::NetworkBindingResult::NETWORK_CHANGED;
+ }
+
+ return rtc::NetworkBindingResult::FAILURE;
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected_n(
+ const NetworkInformation& network_info) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO) << "Network connected: " << network_info.ToString();
+ network_info_by_handle_[network_info.handle] = network_info;
+ for (const rtc::IPAddress& address : network_info.ip_addresses) {
+ network_handle_by_address_[address] = network_info.handle;
+ }
+ network_handle_by_if_name_[network_info.interface_name] = network_info.handle;
+ RTC_CHECK(network_info_by_handle_.size() >=
+ network_handle_by_if_name_.size());
+ InvokeNetworksChangedCallback();
+}
+
+absl::optional<NetworkHandle>
+AndroidNetworkMonitor::FindNetworkHandleFromAddressOrName(
+ const rtc::IPAddress& ip_address,
+ absl::string_view if_name) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO) << "Find network handle.";
+ if (find_network_handle_without_ipv6_temporary_part_) {
+ for (auto const& iter : network_info_by_handle_) {
+ const std::vector<rtc::IPAddress>& addresses = iter.second.ip_addresses;
+ auto address_it = std::find_if(addresses.begin(), addresses.end(),
+ [ip_address](rtc::IPAddress address) {
+ return AddressMatch(ip_address, address);
+ });
+ if (address_it != addresses.end()) {
+ return absl::make_optional(iter.first);
+ }
+ }
+ } else {
+ auto iter = network_handle_by_address_.find(ip_address);
+ if (iter != network_handle_by_address_.end()) {
+ return absl::make_optional(iter->second);
+ }
+ }
+
+ return FindNetworkHandleFromIfname(if_name);
+}
+
+absl::optional<NetworkHandle>
+AndroidNetworkMonitor::FindNetworkHandleFromIfname(
+ absl::string_view if_name) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ auto iter = network_handle_by_if_name_.find(if_name);
+ if (iter != network_handle_by_if_name_.end()) {
+ return iter->second;
+ }
+
+ if (bind_using_ifname_) {
+ for (auto const& iter : network_handle_by_if_name_) {
+ // Use substring match so that e.g if_name="v4-wlan0" is matched
+ // agains iter="wlan0"
+ if (if_name.find(iter.first) != absl::string_view::npos) {
+ return absl::make_optional(iter.second);
+ }
+ }
+ }
+
+ return absl::nullopt;
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO) << "Network disconnected for handle " << handle;
+ auto iter = network_info_by_handle_.find(handle);
+ if (iter == network_info_by_handle_.end()) {
+ return;
+ }
+
+ for (const rtc::IPAddress& address : iter->second.ip_addresses) {
+ network_handle_by_address_.erase(address);
+ }
+
+ // We've discovered that the if_name is not always unique,
+ // i.e it can be several network conencted with same if_name.
+ //
+ // This is handled the following way,
+ // 1) OnNetworkConnected_n overwrites any previous "owner" of an interface
+ // name ("owner" == entry in network_handle_by_if_name_).
+ // 2) OnNetworkDisconnected_n, we scan and see if there are any remaining
+ // connected network with the interface name, and set it as owner.
+ //
+ // This means that network_info_by_handle can have more entries than
+ // network_handle_by_if_name_.
+
+ // Check if we are registered as "owner" of if_name.
+ const auto& if_name = iter->second.interface_name;
+ auto iter2 = network_handle_by_if_name_.find(if_name);
+ RTC_DCHECK(iter2 != network_handle_by_if_name_.end());
+ if (iter2 != network_handle_by_if_name_.end() && iter2->second == handle) {
+ // We are owner...
+ // Check if there is someone else we can set as owner.
+ bool found = false;
+ for (const auto& info : network_info_by_handle_) {
+ if (info.first == handle) {
+ continue;
+ }
+ if (info.second.interface_name == if_name) {
+ found = true;
+ network_handle_by_if_name_[if_name] = info.first;
+ break;
+ }
+ }
+ if (!found) {
+ // No new owner...
+ network_handle_by_if_name_.erase(iter2);
+ }
+ } else {
+ // We are not owner...don't do anything.
+#if RTC_DCHECK_IS_ON
+ auto owner_handle = FindNetworkHandleFromIfname(if_name);
+ RTC_DCHECK(owner_handle && *owner_handle != handle);
+#endif
+ }
+
+ network_info_by_handle_.erase(iter);
+}
+
+void AndroidNetworkMonitor::OnNetworkPreference_n(
+ NetworkType type,
+ rtc::NetworkPreference preference) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO) << "Android network monitor preference for "
+ << NetworkTypeToString(type) << " changed to "
+ << rtc::NetworkPreferenceToString(preference);
+ auto adapter_type = AdapterTypeFromNetworkType(type, surface_cellular_types_);
+ network_preference_by_adapter_type_[adapter_type] = preference;
+ InvokeNetworksChangedCallback();
+}
+
+void AndroidNetworkMonitor::SetNetworkInfos(
+ const std::vector<NetworkInformation>& network_infos) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ // We expect this method to be called once directly after startMonitoring.
+ // All the caches should be empty.
+ RTC_DCHECK(network_handle_by_if_name_.empty());
+ RTC_DCHECK(network_handle_by_address_.empty());
+ RTC_DCHECK(network_info_by_handle_.empty());
+ RTC_DCHECK(network_preference_by_adapter_type_.empty());
+
+ // ...but reset just in case.
+ reset();
+ RTC_LOG(LS_INFO) << "Android network monitor found " << network_infos.size()
+ << " networks";
+ for (const NetworkInformation& network : network_infos) {
+ OnNetworkConnected_n(network);
+ }
+}
+
+rtc::NetworkMonitorInterface::InterfaceInfo
+AndroidNetworkMonitor::GetInterfaceInfo(absl::string_view if_name) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto handle = FindNetworkHandleFromIfname(if_name);
+ if (!handle) {
+ return {
+ .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN,
+ .available = (disable_is_adapter_available_ ? true : false),
+ };
+ }
+ auto iter = network_info_by_handle_.find(*handle);
+ RTC_DCHECK(iter != network_info_by_handle_.end());
+ if (iter == network_info_by_handle_.end()) {
+ return {
+ .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN,
+ .available = (disable_is_adapter_available_ ? true : false),
+ };
+ }
+
+ auto type =
+ AdapterTypeFromNetworkType(iter->second.type, surface_cellular_types_);
+ auto vpn_type =
+ (type == rtc::ADAPTER_TYPE_VPN)
+ ? AdapterTypeFromNetworkType(iter->second.underlying_type_for_vpn,
+ surface_cellular_types_)
+ : rtc::ADAPTER_TYPE_UNKNOWN;
+ return {
+ .adapter_type = type,
+ .underlying_type_for_vpn = vpn_type,
+ .network_preference = GetNetworkPreference(type),
+ .available = true,
+ };
+}
+
+rtc::NetworkPreference AndroidNetworkMonitor::GetNetworkPreference(
+ rtc::AdapterType adapter_type) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto preference_iter = network_preference_by_adapter_type_.find(adapter_type);
+ if (preference_iter == network_preference_by_adapter_type_.end()) {
+ return rtc::NetworkPreference::NEUTRAL;
+ }
+
+ return preference_iter->second;
+}
+
+AndroidNetworkMonitorFactory::AndroidNetworkMonitorFactory()
+ : j_application_context_(nullptr) {}
+
+AndroidNetworkMonitorFactory::AndroidNetworkMonitorFactory(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_application_context)
+ : j_application_context_(env, j_application_context) {}
+
+AndroidNetworkMonitorFactory::~AndroidNetworkMonitorFactory() = default;
+
+rtc::NetworkMonitorInterface*
+AndroidNetworkMonitorFactory::CreateNetworkMonitor(
+ const FieldTrialsView& field_trials) {
+ return new AndroidNetworkMonitor(AttachCurrentThreadIfNeeded(),
+ j_application_context_, field_trials);
+}
+
+void AndroidNetworkMonitor::NotifyConnectionTypeChanged(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller) {
+ network_thread_->PostTask(SafeTask(safety_flag_, [this] {
+ RTC_LOG(LS_INFO)
+ << "Android network monitor detected connection type change.";
+ InvokeNetworksChangedCallback();
+ }));
+}
+
+void AndroidNetworkMonitor::NotifyOfActiveNetworkList(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobjectArray>& j_network_infos) {
+ std::vector<NetworkInformation> network_infos =
+ JavaToNativeVector<NetworkInformation>(env, j_network_infos,
+ &GetNetworkInformationFromJava);
+ SetNetworkInfos(network_infos);
+}
+
+void AndroidNetworkMonitor::NotifyOfNetworkConnect(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobject>& j_network_info) {
+ NetworkInformation network_info =
+ GetNetworkInformationFromJava(env, j_network_info);
+ network_thread_->PostTask(
+ SafeTask(safety_flag_, [this, network_info = std::move(network_info)] {
+ OnNetworkConnected_n(network_info);
+ }));
+}
+
+void AndroidNetworkMonitor::NotifyOfNetworkDisconnect(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ jlong network_handle) {
+ network_thread_->PostTask(SafeTask(safety_flag_, [this, network_handle] {
+ OnNetworkDisconnected_n(static_cast<NetworkHandle>(network_handle));
+ }));
+}
+
+void AndroidNetworkMonitor::NotifyOfNetworkPreference(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobject>& j_connection_type,
+ jint jpreference) {
+ NetworkType type = GetNetworkTypeFromJava(env, j_connection_type);
+ rtc::NetworkPreference preference =
+ static_cast<rtc::NetworkPreference>(jpreference);
+
+ network_thread_->PostTask(SafeTask(safety_flag_, [this, type, preference] {
+ OnNetworkPreference_n(type, preference);
+ }));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h
new file mode 100644
index 0000000000..d0aad5ea76
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_
+#define SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/field_trials_view.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "rtc_base/network_monitor.h"
+#include "rtc_base/network_monitor_factory.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace test {
+class AndroidNetworkMonitorTest;
+} // namespace test
+
+namespace jni {
+
+typedef int64_t NetworkHandle;
+
+// c++ equivalent of java NetworkChangeDetector.ConnectionType.
+enum NetworkType {
+ NETWORK_UNKNOWN,
+ NETWORK_ETHERNET,
+ NETWORK_WIFI,
+ NETWORK_5G,
+ NETWORK_4G,
+ NETWORK_3G,
+ NETWORK_2G,
+ NETWORK_UNKNOWN_CELLULAR,
+ NETWORK_BLUETOOTH,
+ NETWORK_VPN,
+ NETWORK_NONE
+};
+
+// The information is collected from Android OS so that the native code can get
+// the network type and handle (Android network ID) for each interface.
+struct NetworkInformation {
+ std::string interface_name;
+ NetworkHandle handle;
+ NetworkType type;
+ NetworkType underlying_type_for_vpn;
+ std::vector<rtc::IPAddress> ip_addresses;
+
+ NetworkInformation();
+ NetworkInformation(const NetworkInformation&);
+ NetworkInformation(NetworkInformation&&);
+ ~NetworkInformation();
+ NetworkInformation& operator=(const NetworkInformation&);
+ NetworkInformation& operator=(NetworkInformation&&);
+
+ std::string ToString() const;
+};
+
+class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface {
+ public:
+ AndroidNetworkMonitor(JNIEnv* env,
+ const JavaRef<jobject>& j_application_context,
+ const FieldTrialsView& field_trials);
+ ~AndroidNetworkMonitor() override;
+
+ // TODO(sakal): Remove once down stream dependencies have been updated.
+ static void SetAndroidContext(JNIEnv* jni, jobject context) {}
+
+ void Start() override;
+ void Stop() override;
+
+ // Does `this` NetworkMonitorInterface implement BindSocketToNetwork?
+ // Only Android returns true.
+ virtual bool SupportsBindSocketToNetwork() const override { return true; }
+
+ rtc::NetworkBindingResult BindSocketToNetwork(
+ int socket_fd,
+ const rtc::IPAddress& address,
+ absl::string_view if_name) override;
+
+ InterfaceInfo GetInterfaceInfo(absl::string_view if_name) override;
+
+ // Always expected to be called on the network thread.
+ void SetNetworkInfos(const std::vector<NetworkInformation>& network_infos);
+
+ void NotifyConnectionTypeChanged(JNIEnv* env,
+ const JavaRef<jobject>& j_caller);
+ void NotifyOfNetworkConnect(JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobject>& j_network_info);
+ void NotifyOfNetworkDisconnect(JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ jlong network_handle);
+ void NotifyOfActiveNetworkList(JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobjectArray>& j_network_infos);
+ void NotifyOfNetworkPreference(JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobject>& j_connection_type,
+ jint preference);
+
+ // Visible for testing.
+ absl::optional<NetworkHandle> FindNetworkHandleFromAddressOrName(
+ const rtc::IPAddress& address,
+ absl::string_view ifname) const;
+
+ private:
+ void reset();
+ void OnNetworkConnected_n(const NetworkInformation& network_info);
+ void OnNetworkDisconnected_n(NetworkHandle network_handle);
+ void OnNetworkPreference_n(NetworkType type,
+ rtc::NetworkPreference preference);
+
+ rtc::NetworkPreference GetNetworkPreference(rtc::AdapterType) const;
+ absl::optional<NetworkHandle> FindNetworkHandleFromIfname(
+ absl::string_view ifname) const;
+
+ const int android_sdk_int_;
+ ScopedJavaGlobalRef<jobject> j_application_context_;
+ ScopedJavaGlobalRef<jobject> j_network_monitor_;
+ rtc::Thread* const network_thread_;
+ bool started_ RTC_GUARDED_BY(network_thread_) = false;
+ std::map<std::string, NetworkHandle, rtc::AbslStringViewCmp>
+ network_handle_by_if_name_ RTC_GUARDED_BY(network_thread_);
+ std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_
+ RTC_GUARDED_BY(network_thread_);
+ std::map<NetworkHandle, NetworkInformation> network_info_by_handle_
+ RTC_GUARDED_BY(network_thread_);
+ std::map<rtc::AdapterType, rtc::NetworkPreference>
+ network_preference_by_adapter_type_ RTC_GUARDED_BY(network_thread_);
+ bool find_network_handle_without_ipv6_temporary_part_
+ RTC_GUARDED_BY(network_thread_) = false;
+ bool surface_cellular_types_ RTC_GUARDED_BY(network_thread_) = false;
+
+ // NOTE: if bind_using_ifname_ is TRUE
+ // then the adapter name is used with substring matching as follows:
+ // An adapater name repored by android as 'wlan0'
+ // will be matched with 'v4-wlan0' ("v4-wlan0".find("wlan0") != npos).
+ // This applies to adapter_type_by_name_, vpn_underlying_adapter_type_by_name_
+ // and FindNetworkHandleFromIfname.
+ bool bind_using_ifname_ RTC_GUARDED_BY(network_thread_) = true;
+
+ // NOTE: disable_is_adapter_available_ is a kill switch for the impl.
+ // of IsAdapterAvailable().
+ bool disable_is_adapter_available_ RTC_GUARDED_BY(network_thread_) = false;
+
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag_
+ RTC_PT_GUARDED_BY(network_thread_) = nullptr;
+
+ const FieldTrialsView& field_trials_;
+
+ friend class webrtc::test::AndroidNetworkMonitorTest;
+};
+
+class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+ // Deprecated. Pass in application context to this class.
+ AndroidNetworkMonitorFactory();
+
+ AndroidNetworkMonitorFactory(JNIEnv* env,
+ const JavaRef<jobject>& j_application_context);
+
+ ~AndroidNetworkMonitorFactory() override;
+
+ rtc::NetworkMonitorInterface* CreateNetworkMonitor(
+ const FieldTrialsView& field_trials) override;
+
+ private:
+ ScopedJavaGlobalRef<jobject> j_application_context_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+// TODO(magjed): Remove once external clients are updated.
+namespace webrtc_jni {
+
+using webrtc::jni::AndroidNetworkMonitor;
+using webrtc::jni::AndroidNetworkMonitorFactory;
+
+} // namespace webrtc_jni
+
+#endif // SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc
new file mode 100644
index 0000000000..4f3152dc6f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc
@@ -0,0 +1,167 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/android_video_track_source.h"
+
+#include <utility>
+
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_video_jni/NativeAndroidVideoTrackSource_jni.h"
+#include "sdk/android/src/jni/video_frame.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+// MediaCodec wants resolution to be divisible by 2.
+const int kRequiredResolutionAlignment = 2;
+
+VideoRotation jintToVideoRotation(jint rotation) {
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
+ return static_cast<VideoRotation>(rotation);
+}
+
+absl::optional<std::pair<int, int>> OptionalAspectRatio(jint j_width,
+ jint j_height) {
+ if (j_width > 0 && j_height > 0)
+ return std::pair<int, int>(j_width, j_height);
+ return absl::nullopt;
+}
+
+} // namespace
+
+AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
+ JNIEnv* jni,
+ bool is_screencast,
+ bool align_timestamps)
+ : AdaptedVideoTrackSource(kRequiredResolutionAlignment),
+ signaling_thread_(signaling_thread),
+ is_screencast_(is_screencast),
+ align_timestamps_(align_timestamps) {
+ RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
+}
+AndroidVideoTrackSource::~AndroidVideoTrackSource() = default;
+
+bool AndroidVideoTrackSource::is_screencast() const {
+ return is_screencast_.load();
+}
+
+absl::optional<bool> AndroidVideoTrackSource::needs_denoising() const {
+ return false;
+}
+
+void AndroidVideoTrackSource::SetState(JNIEnv* env,
+ jboolean j_is_live) {
+ const SourceState state = j_is_live ? kLive : kEnded;
+ if (state_.exchange(state) != state) {
+ if (rtc::Thread::Current() == signaling_thread_) {
+ FireOnChanged();
+ } else {
+ // TODO(sakal): Is this even necessary, does FireOnChanged have to be
+ // called from signaling thread?
+ signaling_thread_->PostTask([this] { FireOnChanged(); });
+ }
+ }
+}
+
+AndroidVideoTrackSource::SourceState AndroidVideoTrackSource::state() const {
+ return state_.load();
+}
+
+bool AndroidVideoTrackSource::remote() const {
+ return false;
+}
+
+void AndroidVideoTrackSource::SetIsScreencast(JNIEnv* env,
+ jboolean j_is_screencast) {
+ is_screencast_.store(j_is_screencast);
+}
+
+ScopedJavaLocalRef<jobject> AndroidVideoTrackSource::AdaptFrame(
+ JNIEnv* env,
+ jint j_width,
+ jint j_height,
+ jint j_rotation,
+ jlong j_timestamp_ns) {
+ const VideoRotation rotation = jintToVideoRotation(j_rotation);
+
+ const int64_t camera_time_us = j_timestamp_ns / rtc::kNumNanosecsPerMicrosec;
+ const int64_t aligned_timestamp_ns =
+ align_timestamps_ ? rtc::kNumNanosecsPerMicrosec *
+ timestamp_aligner_.TranslateTimestamp(
+ camera_time_us, rtc::TimeMicros())
+ : j_timestamp_ns;
+
+ int adapted_width = 0;
+ int adapted_height = 0;
+ int crop_width = 0;
+ int crop_height = 0;
+ int crop_x = 0;
+ int crop_y = 0;
+ bool drop;
+
+ // TODO(magjed): Move this logic to users of NativeAndroidVideoTrackSource
+ // instead, in order to keep this native wrapping layer as thin as possible.
+ if (rotation % 180 == 0) {
+ drop = !rtc::AdaptedVideoTrackSource::AdaptFrame(
+ j_width, j_height, camera_time_us, &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y);
+ } else {
+ // Swap all width/height and x/y.
+ drop = !rtc::AdaptedVideoTrackSource::AdaptFrame(
+ j_height, j_width, camera_time_us, &adapted_height, &adapted_width,
+ &crop_height, &crop_width, &crop_y, &crop_x);
+ }
+
+ return Java_NativeAndroidVideoTrackSource_createFrameAdaptationParameters(
+ env, crop_x, crop_y, crop_width, crop_height, adapted_width,
+ adapted_height, aligned_timestamp_ns, drop);
+}
+
+void AndroidVideoTrackSource::OnFrameCaptured(
+ JNIEnv* env,
+ jint j_rotation,
+ jlong j_timestamp_ns,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ JavaToNativeFrameBuffer(env, j_video_frame_buffer);
+ const VideoRotation rotation = jintToVideoRotation(j_rotation);
+
+ // AdaptedVideoTrackSource handles applying rotation for I420 frames.
+ if (apply_rotation() && rotation != kVideoRotation_0)
+ buffer = buffer->ToI420();
+
+ OnFrame(VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(rotation)
+ .set_timestamp_us(j_timestamp_ns / rtc::kNumNanosecsPerMicrosec)
+ .build());
+}
+
+void AndroidVideoTrackSource::AdaptOutputFormat(
+ JNIEnv* env,
+ jint j_landscape_width,
+ jint j_landscape_height,
+ const JavaRef<jobject>& j_max_landscape_pixel_count,
+ jint j_portrait_width,
+ jint j_portrait_height,
+ const JavaRef<jobject>& j_max_portrait_pixel_count,
+ const JavaRef<jobject>& j_max_fps) {
+ video_adapter()->OnOutputFormatRequest(
+ OptionalAspectRatio(j_landscape_width, j_landscape_height),
+ JavaToNativeOptionalInt(env, j_max_landscape_pixel_count),
+ OptionalAspectRatio(j_portrait_width, j_portrait_height),
+ JavaToNativeOptionalInt(env, j_max_portrait_pixel_count),
+ JavaToNativeOptionalInt(env, j_max_fps));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h
new file mode 100644
index 0000000000..625633b90b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
+#define API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
+
+#include <jni.h>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "media/base/adapted_video_track_source.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/timestamp_aligner.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// This class needs to be used in conjunction with the Java corresponding class
+// NativeAndroidVideoTrackSource. This class is thred safe and methods can be
+// called from any thread, but if frames A, B, ..., are sent to adaptFrame(),
+// the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same
+// order to onFrameCaptured().
+class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
+ public:
+ AndroidVideoTrackSource(rtc::Thread* signaling_thread,
+ JNIEnv* jni,
+ bool is_screencast,
+ bool align_timestamps);
+ ~AndroidVideoTrackSource() override;
+
+ bool is_screencast() const override;
+
+ // Indicates that the encoder should denoise video before encoding it.
+ // If it is not set, the default configuration is used which is different
+ // depending on video codec.
+ absl::optional<bool> needs_denoising() const override;
+
+ void SetState(SourceState state);
+
+ SourceState state() const override;
+
+ bool remote() const override;
+
+ // This function should be called before delivering any frame to determine if
+ // the frame should be dropped or what the cropping and scaling parameters
+ // should be. This function is thread safe and can be called from any thread.
+ // This function returns
+ // NativeAndroidVideoTrackSource.FrameAdaptationParameters, or null if the
+ // frame should be dropped.
+ ScopedJavaLocalRef<jobject> AdaptFrame(JNIEnv* env,
+ jint j_width,
+ jint j_height,
+ jint j_rotation,
+ jlong j_timestamp_ns);
+
+ // This function converts and passes the frame on to the rest of the C++
+ // WebRTC layer. Note that GetFrameAdaptationParameters() is expected to be
+ // called first and that the delivered frame conforms to those parameters.
+ // This function is thread safe and can be called from any thread.
+ void OnFrameCaptured(JNIEnv* env,
+ jint j_rotation,
+ jlong j_timestamp_ns,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ void SetState(JNIEnv* env,
+ jboolean j_is_live);
+
+ void AdaptOutputFormat(JNIEnv* env,
+ jint j_landscape_width,
+ jint j_landscape_height,
+ const JavaRef<jobject>& j_max_landscape_pixel_count,
+ jint j_portrait_width,
+ jint j_portrait_height,
+ const JavaRef<jobject>& j_max_portrait_pixel_count,
+ const JavaRef<jobject>& j_max_fps);
+
+ void SetIsScreencast(JNIEnv* env, jboolean j_is_screencast);
+
+ private:
+ rtc::Thread* signaling_thread_;
+ std::atomic<SourceState> state_;
+ std::atomic<bool> is_screencast_;
+ rtc::TimestampAligner timestamp_aligner_;
+ const bool align_timestamps_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS b/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS
new file mode 100644
index 0000000000..9a3adee687
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+base/android/jni_android.h",
+ "+modules/audio_device",
+]
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS b/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS
new file mode 100644
index 0000000000..95662c195c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS
@@ -0,0 +1 @@
+henrika@webrtc.org
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc
new file mode 100644
index 0000000000..ae8fcb9613
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc
@@ -0,0 +1,247 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/aaudio_player.h"
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+namespace jni {
+
+enum AudioDeviceMessageType : uint32_t {
+ kMessageOutputStreamDisconnected,
+};
+
+AAudioPlayer::AAudioPlayer(const AudioParameters& audio_parameters)
+ : main_thread_(rtc::Thread::Current()),
+ aaudio_(audio_parameters, AAUDIO_DIRECTION_OUTPUT, this) {
+ RTC_LOG(LS_INFO) << "ctor";
+ thread_checker_aaudio_.Detach();
+}
+
+AAudioPlayer::~AAudioPlayer() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ Terminate();
+ RTC_LOG(LS_INFO) << "#detected underruns: " << underrun_count_;
+}
+
+int AAudioPlayer::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ if (aaudio_.audio_parameters().channels() == 2) {
+ RTC_DLOG(LS_WARNING) << "Stereo mode is enabled";
+ }
+ return 0;
+}
+
+int AAudioPlayer::Terminate() {
+ RTC_LOG(LS_INFO) << "Terminate";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ StopPlayout();
+ return 0;
+}
+
+int AAudioPlayer::InitPlayout() {
+ RTC_LOG(LS_INFO) << "InitPlayout";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ RTC_DCHECK(!initialized_);
+ RTC_DCHECK(!playing_);
+ if (!aaudio_.Init()) {
+ return -1;
+ }
+ initialized_ = true;
+ return 0;
+}
+
+bool AAudioPlayer::PlayoutIsInitialized() const {
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ return initialized_;
+}
+
+int AAudioPlayer::StartPlayout() {
+ RTC_LOG(LS_INFO) << "StartPlayout";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ RTC_DCHECK(!playing_);
+ if (!initialized_) {
+ RTC_DLOG(LS_WARNING)
+ << "Playout can not start since InitPlayout must succeed first";
+ return 0;
+ }
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetPlayout();
+ }
+ if (!aaudio_.Start()) {
+ return -1;
+ }
+ underrun_count_ = aaudio_.xrun_count();
+ first_data_callback_ = true;
+ playing_ = true;
+ return 0;
+}
+
+int AAudioPlayer::StopPlayout() {
+ RTC_LOG(LS_INFO) << "StopPlayout";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ if (!initialized_ || !playing_) {
+ return 0;
+ }
+ if (!aaudio_.Stop()) {
+ RTC_LOG(LS_ERROR) << "StopPlayout failed";
+ return -1;
+ }
+ thread_checker_aaudio_.Detach();
+ initialized_ = false;
+ playing_ = false;
+ return 0;
+}
+
+bool AAudioPlayer::Playing() const {
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ return playing_;
+}
+
+void AAudioPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ RTC_DLOG(LS_INFO) << "AttachAudioBuffer";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ audio_device_buffer_ = audioBuffer;
+ const AudioParameters audio_parameters = aaudio_.audio_parameters();
+ audio_device_buffer_->SetPlayoutSampleRate(audio_parameters.sample_rate());
+ audio_device_buffer_->SetPlayoutChannels(audio_parameters.channels());
+ RTC_CHECK(audio_device_buffer_);
+ // Create a modified audio buffer class which allows us to ask for any number
+ // of samples (and not only multiple of 10ms) to match the optimal buffer
+ // size per callback used by AAudio.
+ fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_);
+}
+
+bool AAudioPlayer::SpeakerVolumeIsAvailable() {
+ return false;
+}
+
+int AAudioPlayer::SetSpeakerVolume(uint32_t volume) {
+ return -1;
+}
+
+absl::optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
+ return absl::nullopt;
+}
+
+absl::optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
+ return absl::nullopt;
+}
+
+absl::optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
+ return absl::nullopt;
+}
+
+void AAudioPlayer::OnErrorCallback(aaudio_result_t error) {
+ RTC_LOG(LS_ERROR) << "OnErrorCallback: " << AAudio_convertResultToText(error);
+ // TODO(henrika): investigate if we can use a thread checker here. Initial
+ // tests shows that this callback can sometimes be called on a unique thread
+ // but according to the documentation it should be on the same thread as the
+ // data callback.
+ // RTC_DCHECK_RUN_ON(&thread_checker_aaudio_);
+ if (aaudio_.stream_state() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ // The stream is disconnected and any attempt to use it will return
+ // AAUDIO_ERROR_DISCONNECTED.
+ RTC_LOG(LS_WARNING) << "Output stream disconnected";
+ // AAudio documentation states: "You should not close or reopen the stream
+ // from the callback, use another thread instead". A message is therefore
+ // sent to the main thread to do the restart operation.
+ RTC_DCHECK(main_thread_);
+ main_thread_->Post(RTC_FROM_HERE, this, kMessageOutputStreamDisconnected);
+ }
+}
+
+aaudio_data_callback_result_t AAudioPlayer::OnDataCallback(void* audio_data,
+ int32_t num_frames) {
+ RTC_DCHECK_RUN_ON(&thread_checker_aaudio_);
+ // Log device id in first data callback to ensure that a valid device is
+ // utilized.
+ if (first_data_callback_) {
+ RTC_LOG(LS_INFO) << "--- First output data callback: "
+ "device id="
+ << aaudio_.device_id();
+ first_data_callback_ = false;
+ }
+
+ // Check if the underrun count has increased. If it has, increase the buffer
+ // size by adding the size of a burst. It will reduce the risk of underruns
+ // at the expense of an increased latency.
+ // TODO(henrika): enable possibility to disable and/or tune the algorithm.
+ const int32_t underrun_count = aaudio_.xrun_count();
+ if (underrun_count > underrun_count_) {
+ RTC_LOG(LS_ERROR) << "Underrun detected: " << underrun_count;
+ underrun_count_ = underrun_count;
+ aaudio_.IncreaseOutputBufferSize();
+ }
+
+ // Estimate latency between writing an audio frame to the output stream and
+ // the time that same frame is played out on the output audio device.
+ latency_millis_ = aaudio_.EstimateLatencyMillis();
+ // TODO(henrika): use for development only.
+ if (aaudio_.frames_written() % (1000 * aaudio_.frames_per_burst()) == 0) {
+ RTC_DLOG(LS_INFO) << "output latency: " << latency_millis_
+ << ", num_frames: " << num_frames;
+ }
+
+ // Read audio data from the WebRTC source using the FineAudioBuffer object
+ // and write that data into `audio_data` to be played out by AAudio.
+ // Prime output with zeros during a short initial phase to avoid distortion.
+ // TODO(henrika): do more work to figure out of if the initial forced silence
+ // period is really needed.
+ if (aaudio_.frames_written() < 50 * aaudio_.frames_per_burst()) {
+ const size_t num_bytes =
+ sizeof(int16_t) * aaudio_.samples_per_frame() * num_frames;
+ memset(audio_data, 0, num_bytes);
+ } else {
+ fine_audio_buffer_->GetPlayoutData(
+ rtc::MakeArrayView(static_cast<int16_t*>(audio_data),
+ aaudio_.samples_per_frame() * num_frames),
+ static_cast<int>(latency_millis_ + 0.5));
+ }
+
+ // TODO(henrika): possibly add trace here to be included in systrace.
+ // See https://developer.android.com/studio/profile/systrace-commandline.html.
+ return AAUDIO_CALLBACK_RESULT_CONTINUE;
+}
+
+void AAudioPlayer::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ switch (msg->message_id) {
+ case kMessageOutputStreamDisconnected:
+ HandleStreamDisconnected();
+ break;
+ }
+}
+
+void AAudioPlayer::HandleStreamDisconnected() {
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ RTC_DLOG(LS_INFO) << "HandleStreamDisconnected";
+ if (!initialized_ || !playing_) {
+ return;
+ }
+ // Perform a restart by first closing the disconnected stream and then start
+ // a new stream; this time using the new (preferred) audio output device.
+ StopPlayout();
+ InitPlayout();
+ StartPlayout();
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h
new file mode 100644
index 0000000000..9e775ecfa3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_
+
+#include <aaudio/AAudio.h>
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "rtc_base/message_handler.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+namespace webrtc {
+
+class AudioDeviceBuffer;
+class FineAudioBuffer;
+
+namespace jni {
+
+// Implements low-latency 16-bit mono PCM audio output support for Android
+// using the C based AAudio API.
+//
+// An instance must be created and destroyed on one and the same thread.
+// All public methods must also be called on the same thread. A thread checker
+// will DCHECK if any method is called on an invalid thread. Audio buffers
+// are requested on a dedicated high-priority thread owned by AAudio.
+//
+// The existing design forces the user to call InitPlayout() after StopPlayout()
+// to be able to call StartPlayout() again. This is in line with how the Java-
+// based implementation works.
+//
+// An audio stream can be disconnected, e.g. when an audio device is removed.
+// This implementation will restart the audio stream using the new preferred
+// device if such an event happens.
+//
+// Also supports automatic buffer-size adjustment based on underrun detections
+// where the internal AAudio buffer can be increased when needed. It will
+// reduce the risk of underruns (~glitches) at the expense of an increased
+// latency.
+class AAudioPlayer final : public AudioOutput,
+ public AAudioObserverInterface,
+ public rtc::MessageHandler {
+ public:
+ explicit AAudioPlayer(const AudioParameters& audio_parameters);
+ ~AAudioPlayer() override;
+
+ int Init() override;
+ int Terminate() override;
+
+ int InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+
+ int StartPlayout() override;
+ int StopPlayout() override;
+ bool Playing() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ // Not implemented in AAudio.
+ bool SpeakerVolumeIsAvailable() override;
+ int SetSpeakerVolume(uint32_t volume) override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
+
+ protected:
+ // AAudioObserverInterface implementation.
+
+ // For an output stream, this function should render and write `num_frames`
+ // of data in the streams current data format to the `audio_data` buffer.
+ // Called on a real-time thread owned by AAudio.
+ aaudio_data_callback_result_t OnDataCallback(void* audio_data,
+ int32_t num_frames) override;
+ // AAudio calls this functions if any error occurs on a callback thread.
+ // Called on a real-time thread owned by AAudio.
+ void OnErrorCallback(aaudio_result_t error) override;
+
+ // rtc::MessageHandler used for restart messages from the error-callback
+ // thread to the main (creating) thread.
+ void OnMessage(rtc::Message* msg) override;
+
+ private:
+ // Closes the existing stream and starts a new stream.
+ void HandleStreamDisconnected();
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker main_thread_checker_;
+
+ // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a
+ // real-time thread owned by AAudio. Detached during construction of this
+ // object.
+ SequenceChecker thread_checker_aaudio_;
+
+ // The thread on which this object is created on.
+ rtc::Thread* main_thread_;
+
+ // Wraps all AAudio resources. Contains an output stream using the default
+ // output audio device. Can be accessed on both the main thread and the
+ // real-time thread owned by AAudio. See separate AAudio documentation about
+ // thread safety.
+ AAudioWrapper aaudio_;
+
+ // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data
+ // in chunks of 10ms. It then allows for this data to be pulled in
+ // a finer or coarser granularity. I.e. interacting with this class instead
+ // of directly with the AudioDeviceBuffer one can ask for any number of
+ // audio data samples.
+ // Example: native buffer size can be 192 audio frames at 48kHz sample rate.
+ // WebRTC will provide 480 audio frames per 10ms but AAudio asks for 192
+ // in each callback (once every 4th ms). This class can then ask for 192 and
+ // the FineAudioBuffer will ask WebRTC for new data approximately only every
+ // second callback and also cache non-utilized audio.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Counts number of detected underrun events reported by AAudio.
+ int32_t underrun_count_ = 0;
+
+ // True only for the first data callback in each audio session.
+ bool first_data_callback_ = true;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and set by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_ RTC_GUARDED_BY(main_thread_checker_) =
+ nullptr;
+
+ bool initialized_ RTC_GUARDED_BY(main_thread_checker_) = false;
+ bool playing_ RTC_GUARDED_BY(main_thread_checker_) = false;
+
+ // Estimated latency between writing an audio frame to the output stream and
+ // the time that same frame is played out on the output audio device.
+ double latency_millis_ RTC_GUARDED_BY(thread_checker_aaudio_) = 0;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc
new file mode 100644
index 0000000000..d66c1d0235
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc
@@ -0,0 +1,234 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/aaudio_recorder.h"
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+
+namespace jni {
+
+enum AudioDeviceMessageType : uint32_t {
+ kMessageInputStreamDisconnected,
+};
+
+AAudioRecorder::AAudioRecorder(const AudioParameters& audio_parameters)
+ : main_thread_(rtc::Thread::Current()),
+ aaudio_(audio_parameters, AAUDIO_DIRECTION_INPUT, this) {
+ RTC_LOG(LS_INFO) << "ctor";
+ thread_checker_aaudio_.Detach();
+}
+
+AAudioRecorder::~AAudioRecorder() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+ RTC_LOG(LS_INFO) << "detected owerflows: " << overflow_count_;
+}
+
+int AAudioRecorder::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (aaudio_.audio_parameters().channels() == 2) {
+ RTC_DLOG(LS_WARNING) << "Stereo mode is enabled";
+ }
+ return 0;
+}
+
+int AAudioRecorder::Terminate() {
+ RTC_LOG(LS_INFO) << "Terminate";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopRecording();
+ return 0;
+}
+
+int AAudioRecorder::InitRecording() {
+ RTC_LOG(LS_INFO) << "InitRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!initialized_);
+ RTC_DCHECK(!recording_);
+ if (!aaudio_.Init()) {
+ return -1;
+ }
+ initialized_ = true;
+ return 0;
+}
+
+bool AAudioRecorder::RecordingIsInitialized() const {
+ return initialized_;
+}
+
+int AAudioRecorder::StartRecording() {
+ RTC_LOG(LS_INFO) << "StartRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!recording_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetPlayout();
+ }
+ if (!aaudio_.Start()) {
+ return -1;
+ }
+ overflow_count_ = aaudio_.xrun_count();
+ first_data_callback_ = true;
+ recording_ = true;
+ return 0;
+}
+
+int AAudioRecorder::StopRecording() {
+ RTC_LOG(LS_INFO) << "StopRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !recording_) {
+ return 0;
+ }
+ if (!aaudio_.Stop()) {
+ return -1;
+ }
+ thread_checker_aaudio_.Detach();
+ initialized_ = false;
+ recording_ = false;
+ return 0;
+}
+
+bool AAudioRecorder::Recording() const {
+ return recording_;
+}
+
+void AAudioRecorder::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ RTC_LOG(LS_INFO) << "AttachAudioBuffer";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+ const AudioParameters audio_parameters = aaudio_.audio_parameters();
+ audio_device_buffer_->SetRecordingSampleRate(audio_parameters.sample_rate());
+ audio_device_buffer_->SetRecordingChannels(audio_parameters.channels());
+ RTC_CHECK(audio_device_buffer_);
+ // Create a modified audio buffer class which allows us to deliver any number
+ // of samples (and not only multiples of 10ms which WebRTC uses) to match the
+ // native AAudio buffer size.
+ fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_);
+}
+
+bool AAudioRecorder::IsAcousticEchoCancelerSupported() const {
+ return false;
+}
+
+bool AAudioRecorder::IsNoiseSuppressorSupported() const {
+ return false;
+}
+
+int AAudioRecorder::EnableBuiltInAEC(bool enable) {
+ RTC_LOG(LS_INFO) << "EnableBuiltInAEC: " << enable;
+ RTC_LOG(LS_ERROR) << "Not implemented";
+ return -1;
+}
+
+int AAudioRecorder::EnableBuiltInNS(bool enable) {
+ RTC_LOG(LS_INFO) << "EnableBuiltInNS: " << enable;
+ RTC_LOG(LS_ERROR) << "Not implemented";
+ return -1;
+}
+
+void AAudioRecorder::OnErrorCallback(aaudio_result_t error) {
+ RTC_LOG(LS_ERROR) << "OnErrorCallback: " << AAudio_convertResultToText(error);
+ // RTC_DCHECK(thread_checker_aaudio_.IsCurrent());
+ if (aaudio_.stream_state() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ // The stream is disconnected and any attempt to use it will return
+ // AAUDIO_ERROR_DISCONNECTED..
+ RTC_LOG(LS_WARNING) << "Input stream disconnected => restart is required";
+ // AAudio documentation states: "You should not close or reopen the stream
+ // from the callback, use another thread instead". A message is therefore
+ // sent to the main thread to do the restart operation.
+ RTC_DCHECK(main_thread_);
+ main_thread_->Post(RTC_FROM_HERE, this, kMessageInputStreamDisconnected);
+ }
+}
+
+// Read and process `num_frames` of data from the `audio_data` buffer.
+// TODO(henrika): possibly add trace here to be included in systrace.
+// See https://developer.android.com/studio/profile/systrace-commandline.html.
+aaudio_data_callback_result_t AAudioRecorder::OnDataCallback(
+ void* audio_data,
+ int32_t num_frames) {
+ // TODO(henrika): figure out why we sometimes hit this one.
+ // RTC_DCHECK(thread_checker_aaudio_.IsCurrent());
+ // RTC_LOG(LS_INFO) << "OnDataCallback: " << num_frames;
+ // Drain the input buffer at first callback to ensure that it does not
+ // contain any old data. Will also ensure that the lowest possible latency
+ // is obtained.
+ if (first_data_callback_) {
+ RTC_LOG(LS_INFO) << "--- First input data callback: "
+ "device id="
+ << aaudio_.device_id();
+ aaudio_.ClearInputStream(audio_data, num_frames);
+ first_data_callback_ = false;
+ }
+ // Check if the overflow counter has increased and if so log a warning.
+ // TODO(henrika): possible add UMA stat or capacity extension.
+ const int32_t overflow_count = aaudio_.xrun_count();
+ if (overflow_count > overflow_count_) {
+ RTC_LOG(LS_ERROR) << "Overflow detected: " << overflow_count;
+ overflow_count_ = overflow_count;
+ }
+ // Estimated time between an audio frame was recorded by the input device and
+ // it can read on the input stream.
+ latency_millis_ = aaudio_.EstimateLatencyMillis();
+ // TODO(henrika): use for development only.
+ if (aaudio_.frames_read() % (1000 * aaudio_.frames_per_burst()) == 0) {
+ RTC_DLOG(LS_INFO) << "input latency: " << latency_millis_
+ << ", num_frames: " << num_frames;
+ }
+ // Copy recorded audio in `audio_data` to the WebRTC sink using the
+ // FineAudioBuffer object.
+ fine_audio_buffer_->DeliverRecordedData(
+ rtc::MakeArrayView(static_cast<const int16_t*>(audio_data),
+ aaudio_.samples_per_frame() * num_frames),
+ static_cast<int>(latency_millis_ + 0.5));
+
+ return AAUDIO_CALLBACK_RESULT_CONTINUE;
+}
+
+void AAudioRecorder::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ switch (msg->message_id) {
+ case kMessageInputStreamDisconnected:
+ HandleStreamDisconnected();
+ break;
+ default:
+ RTC_LOG(LS_ERROR) << "Invalid message id: " << msg->message_id;
+ break;
+ }
+}
+
+void AAudioRecorder::HandleStreamDisconnected() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_LOG(LS_INFO) << "HandleStreamDisconnected";
+ if (!initialized_ || !recording_) {
+ return;
+ }
+ // Perform a restart by first closing the disconnected stream and then start
+ // a new stream; this time using the new (preferred) audio input device.
+ // TODO(henrika): resolve issue where a one restart attempt leads to a long
+ // sequence of new calls to OnErrorCallback().
+ // See b/73148976 for details.
+ StopRecording();
+ InitRecording();
+ StartRecording();
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h
new file mode 100644
index 0000000000..a911577bfe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_
+
+#include <aaudio/AAudio.h>
+
+#include <memory>
+
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "rtc_base/message_handler.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+namespace webrtc {
+
+class FineAudioBuffer;
+class AudioDeviceBuffer;
+
+namespace jni {
+
+// Implements low-latency 16-bit mono PCM audio input support for Android
+// using the C based AAudio API.
+//
+// An instance must be created and destroyed on one and the same thread.
+// All public methods must also be called on the same thread. A thread checker
+// will RTC_DCHECK if any method is called on an invalid thread. Audio buffers
+// are delivered on a dedicated high-priority thread owned by AAudio.
+//
+// The existing design forces the user to call InitRecording() after
+// StopRecording() to be able to call StartRecording() again. This is in line
+// with how the Java- based implementation works.
+//
+// TODO(henrika): add comments about device changes and adaptive buffer
+// management.
+class AAudioRecorder : public AudioInput,
+ public AAudioObserverInterface,
+ public rtc::MessageHandler {
+ public:
+ explicit AAudioRecorder(const AudioParameters& audio_parameters);
+ ~AAudioRecorder() override;
+
+ int Init() override;
+ int Terminate() override;
+
+ int InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int StartRecording() override;
+ int StopRecording() override;
+ bool Recording() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ // TODO(henrika): add support using AAudio APIs when available.
+ bool IsAcousticEchoCancelerSupported() const override;
+ bool IsNoiseSuppressorSupported() const override;
+ int EnableBuiltInAEC(bool enable) override;
+ int EnableBuiltInNS(bool enable) override;
+
+ protected:
+ // AAudioObserverInterface implementation.
+
+ // For an input stream, this function should read `num_frames` of recorded
+ // data, in the stream's current data format, from the `audio_data` buffer.
+ // Called on a real-time thread owned by AAudio.
+ aaudio_data_callback_result_t OnDataCallback(void* audio_data,
+ int32_t num_frames) override;
+
+ // AAudio calls this function if any error occurs on a callback thread.
+ // Called on a real-time thread owned by AAudio.
+ void OnErrorCallback(aaudio_result_t error) override;
+
+ // rtc::MessageHandler used for restart messages.
+ void OnMessage(rtc::Message* msg) override;
+
+ private:
+ // Closes the existing stream and starts a new stream.
+ void HandleStreamDisconnected();
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a
+ // real-time thread owned by AAudio. Detached during construction of this
+ // object.
+ SequenceChecker thread_checker_aaudio_;
+
+ // The thread on which this object is created on.
+ rtc::Thread* main_thread_;
+
+ // Wraps all AAudio resources. Contains an input stream using the default
+ // input audio device.
+ AAudioWrapper aaudio_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_ = nullptr;
+
+ bool initialized_ = false;
+ bool recording_ = false;
+
+ // Consumes audio of native buffer size and feeds the WebRTC layer with 10ms
+ // chunks of audio.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Counts number of detected overflow events reported by AAudio.
+ int32_t overflow_count_ = 0;
+
+ // Estimated time between an audio frame was recorded by the input device and
+ // it can read on the input stream.
+ double latency_millis_ = 0;
+
+ // True only for the first data callback in each audio session.
+ bool first_data_callback_ = true;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc
new file mode 100644
index 0000000000..6c20703108
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc
@@ -0,0 +1,501 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
+
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/time_utils.h"
+
+#define LOG_ON_ERROR(op) \
+ do { \
+ aaudio_result_t result = (op); \
+ if (result != AAUDIO_OK) { \
+ RTC_LOG(LS_ERROR) << #op << ": " << AAudio_convertResultToText(result); \
+ } \
+ } while (0)
+
+#define RETURN_ON_ERROR(op, ...) \
+ do { \
+ aaudio_result_t result = (op); \
+ if (result != AAUDIO_OK) { \
+ RTC_LOG(LS_ERROR) << #op << ": " << AAudio_convertResultToText(result); \
+ return __VA_ARGS__; \
+ } \
+ } while (0)
+
+namespace webrtc {
+
+namespace jni {
+
+namespace {
+
+const char* DirectionToString(aaudio_direction_t direction) {
+ switch (direction) {
+ case AAUDIO_DIRECTION_OUTPUT:
+ return "OUTPUT";
+ case AAUDIO_DIRECTION_INPUT:
+ return "INPUT";
+ default:
+ return "UNKNOWN";
+ }
+}
+
+const char* SharingModeToString(aaudio_sharing_mode_t mode) {
+ switch (mode) {
+ case AAUDIO_SHARING_MODE_EXCLUSIVE:
+ return "EXCLUSIVE";
+ case AAUDIO_SHARING_MODE_SHARED:
+ return "SHARED";
+ default:
+ return "UNKNOWN";
+ }
+}
+
+const char* PerformanceModeToString(aaudio_performance_mode_t mode) {
+ switch (mode) {
+ case AAUDIO_PERFORMANCE_MODE_NONE:
+ return "NONE";
+ case AAUDIO_PERFORMANCE_MODE_POWER_SAVING:
+ return "POWER_SAVING";
+ case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY:
+ return "LOW_LATENCY";
+ default:
+ return "UNKNOWN";
+ }
+}
+
+const char* FormatToString(int32_t id) {
+ switch (id) {
+ case AAUDIO_FORMAT_INVALID:
+ return "INVALID";
+ case AAUDIO_FORMAT_UNSPECIFIED:
+ return "UNSPECIFIED";
+ case AAUDIO_FORMAT_PCM_I16:
+ return "PCM_I16";
+ case AAUDIO_FORMAT_PCM_FLOAT:
+ return "FLOAT";
+ default:
+ return "UNKNOWN";
+ }
+}
+
+void ErrorCallback(AAudioStream* stream,
+ void* user_data,
+ aaudio_result_t error) {
+ RTC_DCHECK(user_data);
+ AAudioWrapper* aaudio_wrapper = reinterpret_cast<AAudioWrapper*>(user_data);
+ RTC_LOG(LS_WARNING) << "ErrorCallback: "
+ << DirectionToString(aaudio_wrapper->direction());
+ RTC_DCHECK(aaudio_wrapper->observer());
+ aaudio_wrapper->observer()->OnErrorCallback(error);
+}
+
+aaudio_data_callback_result_t DataCallback(AAudioStream* stream,
+ void* user_data,
+ void* audio_data,
+ int32_t num_frames) {
+ RTC_DCHECK(user_data);
+ RTC_DCHECK(audio_data);
+ AAudioWrapper* aaudio_wrapper = reinterpret_cast<AAudioWrapper*>(user_data);
+ RTC_DCHECK(aaudio_wrapper->observer());
+ return aaudio_wrapper->observer()->OnDataCallback(audio_data, num_frames);
+}
+
+// Wraps the stream builder object to ensure that it is released properly when
+// the stream builder goes out of scope.
+class ScopedStreamBuilder {
+ public:
+ ScopedStreamBuilder() {
+ LOG_ON_ERROR(AAudio_createStreamBuilder(&builder_));
+ RTC_DCHECK(builder_);
+ }
+ ~ScopedStreamBuilder() {
+ if (builder_) {
+ LOG_ON_ERROR(AAudioStreamBuilder_delete(builder_));
+ }
+ }
+
+ AAudioStreamBuilder* get() const { return builder_; }
+
+ private:
+ AAudioStreamBuilder* builder_ = nullptr;
+};
+
+} // namespace
+
+AAudioWrapper::AAudioWrapper(const AudioParameters& audio_parameters,
+ aaudio_direction_t direction,
+ AAudioObserverInterface* observer)
+ : audio_parameters_(audio_parameters),
+ direction_(direction),
+ observer_(observer) {
+ RTC_LOG(LS_INFO) << "ctor";
+ RTC_DCHECK(observer_);
+ aaudio_thread_checker_.Detach();
+ RTC_LOG(LS_INFO) << audio_parameters_.ToString();
+}
+
+AAudioWrapper::~AAudioWrapper() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!stream_);
+}
+
+bool AAudioWrapper::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // Creates a stream builder which can be used to open an audio stream.
+ ScopedStreamBuilder builder;
+ // Configures the stream builder using audio parameters given at construction.
+ SetStreamConfiguration(builder.get());
+ // Opens a stream based on options in the stream builder.
+ if (!OpenStream(builder.get())) {
+ return false;
+ }
+ // Ensures that the opened stream could activate the requested settings.
+ if (!VerifyStreamConfiguration()) {
+ return false;
+ }
+ // Optimizes the buffer scheme for lowest possible latency and creates
+ // additional buffer logic to match the 10ms buffer size used in WebRTC.
+ if (!OptimizeBuffers()) {
+ return false;
+ }
+ LogStreamState();
+ return true;
+}
+
+bool AAudioWrapper::Start() {
+ RTC_LOG(LS_INFO) << "Start";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // TODO(henrika): this state check might not be needed.
+ aaudio_stream_state_t current_state = AAudioStream_getState(stream_);
+ if (current_state != AAUDIO_STREAM_STATE_OPEN) {
+ RTC_LOG(LS_ERROR) << "Invalid state: "
+ << AAudio_convertStreamStateToText(current_state);
+ return false;
+ }
+ // Asynchronous request for the stream to start.
+ RETURN_ON_ERROR(AAudioStream_requestStart(stream_), false);
+ LogStreamState();
+ return true;
+}
+
+bool AAudioWrapper::Stop() {
+ RTC_LOG(LS_INFO) << "Stop: " << DirectionToString(direction());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // Asynchronous request for the stream to stop.
+ RETURN_ON_ERROR(AAudioStream_requestStop(stream_), false);
+ CloseStream();
+ aaudio_thread_checker_.Detach();
+ return true;
+}
+
+double AAudioWrapper::EstimateLatencyMillis() const {
+ RTC_DCHECK(stream_);
+ double latency_millis = 0.0;
+ if (direction() == AAUDIO_DIRECTION_INPUT) {
+ // For input streams. Best guess we can do is to use the current burst size
+ // as delay estimate.
+ latency_millis = static_cast<double>(frames_per_burst()) / sample_rate() *
+ rtc::kNumMillisecsPerSec;
+ } else {
+ int64_t existing_frame_index;
+ int64_t existing_frame_presentation_time;
+ // Get the time at which a particular frame was presented to audio hardware.
+ aaudio_result_t result = AAudioStream_getTimestamp(
+ stream_, CLOCK_MONOTONIC, &existing_frame_index,
+ &existing_frame_presentation_time);
+ // Results are only valid when the stream is in AAUDIO_STREAM_STATE_STARTED.
+ if (result == AAUDIO_OK) {
+ // Get write index for next audio frame.
+ int64_t next_frame_index = frames_written();
+ // Number of frames between next frame and the existing frame.
+ int64_t frame_index_delta = next_frame_index - existing_frame_index;
+ // Assume the next frame will be written now.
+ int64_t next_frame_write_time = rtc::TimeNanos();
+ // Calculate time when next frame will be presented to the hardware taking
+ // sample rate into account.
+ int64_t frame_time_delta =
+ (frame_index_delta * rtc::kNumNanosecsPerSec) / sample_rate();
+ int64_t next_frame_presentation_time =
+ existing_frame_presentation_time + frame_time_delta;
+ // Derive a latency estimate given results above.
+ latency_millis = static_cast<double>(next_frame_presentation_time -
+ next_frame_write_time) /
+ rtc::kNumNanosecsPerMillisec;
+ }
+ }
+ return latency_millis;
+}
+
+// Returns new buffer size or a negative error value if buffer size could not
+// be increased.
+bool AAudioWrapper::IncreaseOutputBufferSize() {
+ RTC_LOG(LS_INFO) << "IncreaseBufferSize";
+ RTC_DCHECK(stream_);
+ RTC_DCHECK(aaudio_thread_checker_.IsCurrent());
+ RTC_DCHECK_EQ(direction(), AAUDIO_DIRECTION_OUTPUT);
+ aaudio_result_t buffer_size = AAudioStream_getBufferSizeInFrames(stream_);
+ // Try to increase size of buffer with one burst to reduce risk of underrun.
+ buffer_size += frames_per_burst();
+ // Verify that the new buffer size is not larger than max capacity.
+ // TODO(henrika): keep track of case when we reach the capacity limit.
+ const int32_t max_buffer_size = buffer_capacity_in_frames();
+ if (buffer_size > max_buffer_size) {
+ RTC_LOG(LS_ERROR) << "Required buffer size (" << buffer_size
+ << ") is higher than max: " << max_buffer_size;
+ return false;
+ }
+ RTC_LOG(LS_INFO) << "Updating buffer size to: " << buffer_size
+ << " (max=" << max_buffer_size << ")";
+ buffer_size = AAudioStream_setBufferSizeInFrames(stream_, buffer_size);
+ if (buffer_size < 0) {
+ RTC_LOG(LS_ERROR) << "Failed to change buffer size: "
+ << AAudio_convertResultToText(buffer_size);
+ return false;
+ }
+ RTC_LOG(LS_INFO) << "Buffer size changed to: " << buffer_size;
+ return true;
+}
+
+void AAudioWrapper::ClearInputStream(void* audio_data, int32_t num_frames) {
+ RTC_LOG(LS_INFO) << "ClearInputStream";
+ RTC_DCHECK(stream_);
+ RTC_DCHECK(aaudio_thread_checker_.IsCurrent());
+ RTC_DCHECK_EQ(direction(), AAUDIO_DIRECTION_INPUT);
+ aaudio_result_t cleared_frames = 0;
+ do {
+ cleared_frames = AAudioStream_read(stream_, audio_data, num_frames, 0);
+ } while (cleared_frames > 0);
+}
+
+AAudioObserverInterface* AAudioWrapper::observer() const {
+ return observer_;
+}
+
+AudioParameters AAudioWrapper::audio_parameters() const {
+ return audio_parameters_;
+}
+
+int32_t AAudioWrapper::samples_per_frame() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getSamplesPerFrame(stream_);
+}
+
+int32_t AAudioWrapper::buffer_size_in_frames() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getBufferSizeInFrames(stream_);
+}
+
+int32_t AAudioWrapper::buffer_capacity_in_frames() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getBufferCapacityInFrames(stream_);
+}
+
+int32_t AAudioWrapper::device_id() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getDeviceId(stream_);
+}
+
+int32_t AAudioWrapper::xrun_count() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getXRunCount(stream_);
+}
+
+int32_t AAudioWrapper::format() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getFormat(stream_);
+}
+
+int32_t AAudioWrapper::sample_rate() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getSampleRate(stream_);
+}
+
+int32_t AAudioWrapper::channel_count() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getChannelCount(stream_);
+}
+
+int32_t AAudioWrapper::frames_per_callback() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getFramesPerDataCallback(stream_);
+}
+
+aaudio_sharing_mode_t AAudioWrapper::sharing_mode() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getSharingMode(stream_);
+}
+
+aaudio_performance_mode_t AAudioWrapper::performance_mode() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getPerformanceMode(stream_);
+}
+
+aaudio_stream_state_t AAudioWrapper::stream_state() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getState(stream_);
+}
+
+int64_t AAudioWrapper::frames_written() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getFramesWritten(stream_);
+}
+
+int64_t AAudioWrapper::frames_read() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getFramesRead(stream_);
+}
+
+void AAudioWrapper::SetStreamConfiguration(AAudioStreamBuilder* builder) {
+ RTC_LOG(LS_INFO) << "SetStreamConfiguration";
+ RTC_DCHECK(builder);
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // Request usage of default primary output/input device.
+ // TODO(henrika): verify that default device follows Java APIs.
+ // https://developer.android.com/reference/android/media/AudioDeviceInfo.html.
+ AAudioStreamBuilder_setDeviceId(builder, AAUDIO_UNSPECIFIED);
+ // Use preferred sample rate given by the audio parameters.
+ AAudioStreamBuilder_setSampleRate(builder, audio_parameters().sample_rate());
+ // Use preferred channel configuration given by the audio parameters.
+ AAudioStreamBuilder_setChannelCount(builder, audio_parameters().channels());
+ // Always use 16-bit PCM audio sample format.
+ AAudioStreamBuilder_setFormat(builder, AAUDIO_FORMAT_PCM_I16);
+ // TODO(henrika): investigate effect of using AAUDIO_SHARING_MODE_EXCLUSIVE.
+ // Ask for exclusive mode since this will give us the lowest possible latency.
+ // If exclusive mode isn't available, shared mode will be used instead.
+ AAudioStreamBuilder_setSharingMode(builder, AAUDIO_SHARING_MODE_SHARED);
+ // Use the direction that was given at construction.
+ AAudioStreamBuilder_setDirection(builder, direction_);
+ // TODO(henrika): investigate performance using different performance modes.
+ AAudioStreamBuilder_setPerformanceMode(builder,
+ AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+ // Given that WebRTC applications require low latency, our audio stream uses
+ // an asynchronous callback function to transfer data to and from the
+ // application. AAudio executes the callback in a higher-priority thread that
+ // has better performance.
+ AAudioStreamBuilder_setDataCallback(builder, DataCallback, this);
+ // Request that AAudio calls this functions if any error occurs on a callback
+ // thread.
+ AAudioStreamBuilder_setErrorCallback(builder, ErrorCallback, this);
+}
+
+bool AAudioWrapper::OpenStream(AAudioStreamBuilder* builder) {
+ RTC_LOG(LS_INFO) << "OpenStream";
+ RTC_DCHECK(builder);
+ AAudioStream* stream = nullptr;
+ RETURN_ON_ERROR(AAudioStreamBuilder_openStream(builder, &stream), false);
+ stream_ = stream;
+ LogStreamConfiguration();
+ return true;
+}
+
+void AAudioWrapper::CloseStream() {
+ RTC_LOG(LS_INFO) << "CloseStream";
+ RTC_DCHECK(stream_);
+ LOG_ON_ERROR(AAudioStream_close(stream_));
+ stream_ = nullptr;
+}
+
+void AAudioWrapper::LogStreamConfiguration() {
+ RTC_DCHECK(stream_);
+ char ss_buf[1024];
+ rtc::SimpleStringBuilder ss(ss_buf);
+ ss << "Stream Configuration: ";
+ ss << "sample rate=" << sample_rate() << ", channels=" << channel_count();
+ ss << ", samples per frame=" << samples_per_frame();
+ ss << ", format=" << FormatToString(format());
+ ss << ", sharing mode=" << SharingModeToString(sharing_mode());
+ ss << ", performance mode=" << PerformanceModeToString(performance_mode());
+ ss << ", direction=" << DirectionToString(direction());
+ ss << ", device id=" << AAudioStream_getDeviceId(stream_);
+ ss << ", frames per callback=" << frames_per_callback();
+ RTC_LOG(LS_INFO) << ss.str();
+}
+
+void AAudioWrapper::LogStreamState() {
+ RTC_LOG(LS_INFO) << "AAudio stream state: "
+ << AAudio_convertStreamStateToText(stream_state());
+}
+
+bool AAudioWrapper::VerifyStreamConfiguration() {
+ RTC_LOG(LS_INFO) << "VerifyStreamConfiguration";
+ RTC_DCHECK(stream_);
+ // TODO(henrika): should we verify device ID as well?
+ if (AAudioStream_getSampleRate(stream_) != audio_parameters().sample_rate()) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested sample rate";
+ return false;
+ }
+ if (AAudioStream_getChannelCount(stream_) !=
+ static_cast<int32_t>(audio_parameters().channels())) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested channel count";
+ return false;
+ }
+ if (AAudioStream_getFormat(stream_) != AAUDIO_FORMAT_PCM_I16) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested format";
+ return false;
+ }
+ if (AAudioStream_getSharingMode(stream_) != AAUDIO_SHARING_MODE_SHARED) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested sharing mode";
+ return false;
+ }
+ if (AAudioStream_getPerformanceMode(stream_) !=
+ AAUDIO_PERFORMANCE_MODE_LOW_LATENCY) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested performance mode";
+ return false;
+ }
+ if (AAudioStream_getDirection(stream_) != direction()) {
+ RTC_LOG(LS_ERROR) << "Stream direction could not be set";
+ return false;
+ }
+ if (AAudioStream_getSamplesPerFrame(stream_) !=
+ static_cast<int32_t>(audio_parameters().channels())) {
+ RTC_LOG(LS_ERROR) << "Invalid number of samples per frame";
+ return false;
+ }
+ return true;
+}
+
+bool AAudioWrapper::OptimizeBuffers() {
+ RTC_LOG(LS_INFO) << "OptimizeBuffers";
+ RTC_DCHECK(stream_);
+ // Maximum number of frames that can be filled without blocking.
+ RTC_LOG(LS_INFO) << "max buffer capacity in frames: "
+ << buffer_capacity_in_frames();
+ // Query the number of frames that the application should read or write at
+ // one time for optimal performance.
+ int32_t frames_per_burst = AAudioStream_getFramesPerBurst(stream_);
+ RTC_LOG(LS_INFO) << "frames per burst for optimal performance: "
+ << frames_per_burst;
+ frames_per_burst_ = frames_per_burst;
+ if (direction() == AAUDIO_DIRECTION_INPUT) {
+ // There is no point in calling setBufferSizeInFrames() for input streams
+ // since it has no effect on the performance (latency in this case).
+ return true;
+ }
+ // Set buffer size to same as burst size to guarantee lowest possible latency.
+ // This size might change for output streams if underruns are detected and
+ // automatic buffer adjustment is enabled.
+ AAudioStream_setBufferSizeInFrames(stream_, frames_per_burst);
+ int32_t buffer_size = AAudioStream_getBufferSizeInFrames(stream_);
+ if (buffer_size != frames_per_burst) {
+ RTC_LOG(LS_ERROR) << "Failed to use optimal buffer burst size";
+ return false;
+ }
+ // Maximum number of frames that can be filled without blocking.
+ RTC_LOG(LS_INFO) << "buffer burst size in frames: " << buffer_size;
+ return true;
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h
new file mode 100644
index 0000000000..cbc78a0a25
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_
+
+#include <aaudio/AAudio.h>
+
+#include "api/sequence_checker.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// AAudio callback interface for audio transport to/from the AAudio stream.
+// The interface also contains an error callback method for notifications of
+// e.g. device changes.
+class AAudioObserverInterface {
+ public:
+ // Audio data will be passed in our out of this function dependning on the
+ // direction of the audio stream. This callback function will be called on a
+ // real-time thread owned by AAudio.
+ virtual aaudio_data_callback_result_t OnDataCallback(void* audio_data,
+ int32_t num_frames) = 0;
+ // AAudio will call this functions if any error occurs on a callback thread.
+ // In response, this function could signal or launch another thread to reopen
+ // a stream on another device. Do not reopen the stream in this callback.
+ virtual void OnErrorCallback(aaudio_result_t error) = 0;
+
+ protected:
+ virtual ~AAudioObserverInterface() {}
+};
+
+// Utility class which wraps the C-based AAudio API into a more handy C++ class
+// where the underlying resources (AAudioStreamBuilder and AAudioStream) are
+// encapsulated. User must set the direction (in or out) at construction since
+// it defines the stream type and the direction of the data flow in the
+// AAudioObserverInterface.
+//
+// AAudio is a new Android C API introduced in the Android O (26) release.
+// It is designed for high-performance audio applications that require low
+// latency. Applications communicate with AAudio by reading and writing data
+// to streams.
+//
+// Each stream is attached to a single audio device, where each audio device
+// has a unique ID. The ID can be used to bind an audio stream to a specific
+// audio device but this implementation lets AAudio choose the default primary
+// device instead (device selection takes place in Java). A stream can only
+// move data in one direction. When a stream is opened, Android checks to
+// ensure that the audio device and stream direction agree.
+class AAudioWrapper {
+ public:
+ AAudioWrapper(const AudioParameters& audio_parameters,
+ aaudio_direction_t direction,
+ AAudioObserverInterface* observer);
+ ~AAudioWrapper();
+
+ bool Init();
+ bool Start();
+ bool Stop();
+
+ // For output streams: estimates latency between writing an audio frame to
+ // the output stream and the time that same frame is played out on the output
+ // audio device.
+ // For input streams: estimates latency between reading an audio frame from
+ // the input stream and the time that same frame was recorded on the input
+ // audio device.
+ double EstimateLatencyMillis() const;
+
+ // Increases the internal buffer size for output streams by one burst size to
+ // reduce the risk of underruns. Can be used while a stream is active.
+ bool IncreaseOutputBufferSize();
+
+ // Drains the recording stream of any existing data by reading from it until
+ // it's empty. Can be used to clear out old data before starting a new audio
+ // session.
+ void ClearInputStream(void* audio_data, int32_t num_frames);
+
+ AAudioObserverInterface* observer() const;
+ AudioParameters audio_parameters() const;
+ int32_t samples_per_frame() const;
+ int32_t buffer_size_in_frames() const;
+ int32_t buffer_capacity_in_frames() const;
+ int32_t device_id() const;
+ int32_t xrun_count() const;
+ int32_t format() const;
+ int32_t sample_rate() const;
+ int32_t channel_count() const;
+ int32_t frames_per_callback() const;
+ aaudio_sharing_mode_t sharing_mode() const;
+ aaudio_performance_mode_t performance_mode() const;
+ aaudio_stream_state_t stream_state() const;
+ int64_t frames_written() const;
+ int64_t frames_read() const;
+ aaudio_direction_t direction() const { return direction_; }
+ AAudioStream* stream() const { return stream_; }
+ int32_t frames_per_burst() const { return frames_per_burst_; }
+
+ private:
+ void SetStreamConfiguration(AAudioStreamBuilder* builder);
+ bool OpenStream(AAudioStreamBuilder* builder);
+ void CloseStream();
+ void LogStreamConfiguration();
+ void LogStreamState();
+ bool VerifyStreamConfiguration();
+ bool OptimizeBuffers();
+
+ SequenceChecker thread_checker_;
+ SequenceChecker aaudio_thread_checker_;
+ const AudioParameters audio_parameters_;
+ const aaudio_direction_t direction_;
+ AAudioObserverInterface* observer_ = nullptr;
+ AAudioStream* stream_ = nullptr;
+ int32_t frames_per_burst_ = 0;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h
new file mode 100644
index 0000000000..fdecf384c9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_
+
+namespace webrtc {
+
+namespace jni {
+
+const int kDefaultSampleRate = 44100;
+// Delay estimates for the two different supported modes. These values are based
+// on real-time round-trip delay estimates on a large set of devices and they
+// are lower bounds since the filter length is 128 ms, so the AEC works for
+// delays in the range [50, ~170] ms and [150, ~270] ms. Note that, in most
+// cases, the lowest delay estimate will not be utilized since devices that
+// support low-latency output audio often supports HW AEC as well.
+const int kLowLatencyModeDelayEstimateInMilliseconds = 50;
+const int kHighLatencyModeDelayEstimateInMilliseconds = 150;
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc
new file mode 100644
index 0000000000..7c59d3e432
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc
@@ -0,0 +1,650 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+#include <memory>
+#include <utility>
+
+#include "api/make_ref_counted.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_audio_device_module_base_jni/WebRtcAudioManager_jni.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+// This class combines a generic instance of an AudioInput and a generic
+// instance of an AudioOutput to create an AudioDeviceModule. This is mostly
+// done by delegating to the audio input/output with some glue code. This class
+// also directly implements some of the AudioDeviceModule methods with dummy
+// implementations.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread.
+// A thread checker will RTC_DCHECK if any method is called on an invalid
+// thread.
+// TODO(henrika): it might be useful to also support a scenario where the ADM
+// is constructed on thread T1, used on thread T2 and destructed on T2 or T3.
+// If so, care must be taken to ensure that only T2 is a COM thread.
+class AndroidAudioDeviceModule : public AudioDeviceModule {
+ public:
+ // For use with UMA logging. Must be kept in sync with histograms.xml in
+ // Chrome, located at
+ // https://cs.chromium.org/chromium/src/tools/metrics/histograms/histograms.xml
+ enum class InitStatus {
+ OK = 0,
+ PLAYOUT_ERROR = 1,
+ RECORDING_ERROR = 2,
+ OTHER_ERROR = 3,
+ NUM_STATUSES = 4
+ };
+
+ AndroidAudioDeviceModule(AudioDeviceModule::AudioLayer audio_layer,
+ bool is_stereo_playout_supported,
+ bool is_stereo_record_supported,
+ uint16_t playout_delay_ms,
+ std::unique_ptr<AudioInput> audio_input,
+ std::unique_ptr<AudioOutput> audio_output)
+ : audio_layer_(audio_layer),
+ is_stereo_playout_supported_(is_stereo_playout_supported),
+ is_stereo_record_supported_(is_stereo_record_supported),
+ playout_delay_ms_(playout_delay_ms),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ input_(std::move(audio_input)),
+ output_(std::move(audio_output)),
+ initialized_(false) {
+ RTC_CHECK(input_);
+ RTC_CHECK(output_);
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ thread_checker_.Detach();
+ }
+
+ ~AndroidAudioDeviceModule() override { RTC_DLOG(LS_INFO) << __FUNCTION__; }
+
+ int32_t ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer* audioLayer) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *audioLayer = audio_layer_;
+ return 0;
+ }
+
+ int32_t RegisterAudioCallback(AudioTransport* audioCallback) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return audio_device_buffer_->RegisterAudioCallback(audioCallback);
+ }
+
+ int32_t Init() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ =
+ std::make_unique<AudioDeviceBuffer>(task_queue_factory_.get());
+ AttachAudioBuffer();
+ if (initialized_) {
+ return 0;
+ }
+ InitStatus status;
+ if (output_->Init() != 0) {
+ status = InitStatus::PLAYOUT_ERROR;
+ } else if (input_->Init() != 0) {
+ output_->Terminate();
+ status = InitStatus::RECORDING_ERROR;
+ } else {
+ initialized_ = true;
+ status = InitStatus::OK;
+ }
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.Audio.InitializationResult",
+ static_cast<int>(status),
+ static_cast<int>(InitStatus::NUM_STATUSES));
+ if (status != InitStatus::OK) {
+ RTC_LOG(LS_ERROR) << "Audio device initialization failed.";
+ return -1;
+ }
+ return 0;
+ }
+
+ int32_t Terminate() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return 0;
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ int32_t err = input_->Terminate();
+ err |= output_->Terminate();
+ initialized_ = false;
+ thread_checker_.Detach();
+ audio_device_buffer_.reset(nullptr);
+ RTC_DCHECK_EQ(err, 0);
+ return err;
+ }
+
+ bool Initialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << ":" << initialized_;
+ return initialized_;
+ }
+
+ int16_t PlayoutDevices() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_LOG(LS_INFO) << "output: " << 1;
+ return 1;
+ }
+
+ int16_t RecordingDevices() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_DLOG(LS_INFO) << "output: " << 1;
+ return 1;
+ }
+
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SetPlayoutDevice(uint16_t index) override {
+ // OK to use but it has no effect currently since device selection is
+ // done using Andoid APIs instead.
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")";
+ return 0;
+ }
+
+ int32_t SetPlayoutDevice(
+ AudioDeviceModule::WindowsDeviceType device) override {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SetRecordingDevice(uint16_t index) override {
+ // OK to use but it has no effect currently since device selection is
+ // done using Andoid APIs instead.
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")";
+ return 0;
+ }
+
+ int32_t SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device) override {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t PlayoutIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = true;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t InitPlayout() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (PlayoutIsInitialized()) {
+ return 0;
+ }
+ int32_t result = output_->InitPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool PlayoutIsInitialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return output_->PlayoutIsInitialized();
+ }
+
+ int32_t RecordingIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = true;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t InitRecording() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (RecordingIsInitialized()) {
+ return 0;
+ }
+ int32_t result = input_->InitRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool RecordingIsInitialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return input_->RecordingIsInitialized();
+ }
+
+ int32_t StartPlayout() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (Playing()) {
+ return 0;
+ }
+ int32_t result = output_->StartPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess",
+ static_cast<int>(result == 0));
+ if (result == 0) {
+ // Only start playing the audio device buffer if starting the audio
+ // output succeeded.
+ audio_device_buffer_->StartPlayout();
+ }
+ return result;
+ }
+
+ int32_t StopPlayout() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (!Playing())
+ return 0;
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+ audio_device_buffer_->StopPlayout();
+ int32_t result = output_->StopPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool Playing() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return output_->Playing();
+ }
+
+ int32_t StartRecording() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (Recording()) {
+ return 0;
+ }
+ int32_t result = input_->StartRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess",
+ static_cast<int>(result == 0));
+ if (result == 0) {
+ // Only start recording the audio device buffer if starting the audio
+ // input succeeded.
+ audio_device_buffer_->StartRecording();
+ }
+ return result;
+ }
+
+ int32_t StopRecording() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (!Recording())
+ return 0;
+ audio_device_buffer_->StopRecording();
+ int32_t result = input_->StopRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool Recording() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return input_->Recording();
+ }
+
+ int32_t InitSpeaker() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return initialized_ ? 0 : -1;
+ }
+
+ bool SpeakerIsInitialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return initialized_;
+ }
+
+ int32_t InitMicrophone() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return initialized_ ? 0 : -1;
+ }
+
+ bool MicrophoneIsInitialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return initialized_;
+ }
+
+ int32_t SpeakerVolumeIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ *available = output_->SpeakerVolumeIsAvailable();
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t SetSpeakerVolume(uint32_t volume) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ return output_->SetSpeakerVolume(volume);
+ }
+
+ int32_t SpeakerVolume(uint32_t* output_volume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ absl::optional<uint32_t> volume = output_->SpeakerVolume();
+ if (!volume)
+ return -1;
+ *output_volume = *volume;
+ RTC_DLOG(LS_INFO) << "output: " << *volume;
+ return 0;
+ }
+
+ int32_t MaxSpeakerVolume(uint32_t* output_max_volume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ absl::optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
+ if (!max_volume)
+ return -1;
+ *output_max_volume = *max_volume;
+ return 0;
+ }
+
+ int32_t MinSpeakerVolume(uint32_t* output_min_volume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ absl::optional<uint32_t> min_volume = output_->MinSpeakerVolume();
+ if (!min_volume)
+ return -1;
+ *output_min_volume = *min_volume;
+ return 0;
+ }
+
+ int32_t MicrophoneVolumeIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = false;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return -1;
+ }
+
+ int32_t SetMicrophoneVolume(uint32_t volume) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")";
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MicrophoneVolume(uint32_t* volume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MinMicrophoneVolume(uint32_t* minVolume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SpeakerMuteIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SetSpeakerMute(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SpeakerMute(bool* enabled) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MicrophoneMuteIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SetMicrophoneMute(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MicrophoneMute(bool* enabled) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t StereoPlayoutIsAvailable(bool* available) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = is_stereo_playout_supported_;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t SetStereoPlayout(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ // Android does not support changes between mono and stero on the fly. The
+ // use of stereo or mono is determined by the audio layer. It is allowed
+ // to call this method if that same state is not modified.
+ bool available = is_stereo_playout_supported_;
+ if (enable != available) {
+ RTC_LOG(LS_WARNING) << "changing stereo playout not supported";
+ return -1;
+ }
+ return 0;
+ }
+
+ int32_t StereoPlayout(bool* enabled) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *enabled = is_stereo_playout_supported_;
+ RTC_DLOG(LS_INFO) << "output: " << *enabled;
+ return 0;
+ }
+
+ int32_t StereoRecordingIsAvailable(bool* available) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = is_stereo_record_supported_;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t SetStereoRecording(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ // Android does not support changes between mono and stero on the fly. The
+ // use of stereo or mono is determined by the audio layer. It is allowed
+ // to call this method if that same state is not modified.
+ bool available = is_stereo_record_supported_;
+ if (enable != available) {
+ RTC_LOG(LS_WARNING) << "changing stereo recording not supported";
+ return -1;
+ }
+ return 0;
+ }
+
+ int32_t StereoRecording(bool* enabled) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *enabled = is_stereo_record_supported_;
+ RTC_DLOG(LS_INFO) << "output: " << *enabled;
+ return 0;
+ }
+
+ int32_t PlayoutDelay(uint16_t* delay_ms) const override {
+ // Best guess we can do is to use half of the estimated total delay.
+ *delay_ms = playout_delay_ms_ / 2;
+ RTC_DCHECK_GT(*delay_ms, 0);
+ return 0;
+ }
+
+ // Returns true if the device both supports built in AEC and the device
+ // is not blocklisted.
+ // Currently, if OpenSL ES is used in both directions, this method will still
+ // report the correct value and it has the correct effect. As an example:
+ // a device supports built in AEC and this method returns true. Libjingle
+ // will then disable the WebRTC based AEC and that will work for all devices
+ // (mainly Nexus) even when OpenSL ES is used for input since our current
+ // implementation will enable built-in AEC by default also for OpenSL ES.
+ // The only "bad" thing that happens today is that when Libjingle calls
+ // OpenSLESRecorder::EnableBuiltInAEC() it will not have any real effect and
+ // a "Not Implemented" log will be filed. This non-perfect state will remain
+ // until I have added full support for audio effects based on OpenSL ES APIs.
+ bool BuiltInAECIsAvailable() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return false;
+ bool isAvailable = input_->IsAcousticEchoCancelerSupported();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ // Not implemented for any input device on Android.
+ bool BuiltInAGCIsAvailable() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_DLOG(LS_INFO) << "output: " << false;
+ return false;
+ }
+
+ // Returns true if the device both supports built in NS and the device
+ // is not blocklisted.
+ // TODO(henrika): add implementation for OpenSL ES based audio as well.
+ // In addition, see comments for BuiltInAECIsAvailable().
+ bool BuiltInNSIsAvailable() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return false;
+ bool isAvailable = input_->IsNoiseSuppressorSupported();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ // TODO(henrika): add implementation for OpenSL ES based audio as well.
+ int32_t EnableBuiltInAEC(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ if (!initialized_)
+ return -1;
+ RTC_CHECK(BuiltInAECIsAvailable()) << "HW AEC is not available";
+ int32_t result = input_->EnableBuiltInAEC(enable);
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ return result;
+ }
+
+ int32_t EnableBuiltInAGC(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ RTC_CHECK_NOTREACHED();
+ }
+
+ // TODO(henrika): add implementation for OpenSL ES based audio as well.
+ int32_t EnableBuiltInNS(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ if (!initialized_)
+ return -1;
+ RTC_CHECK(BuiltInNSIsAvailable()) << "HW NS is not available";
+ int32_t result = input_->EnableBuiltInNS(enable);
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ return result;
+ }
+
+ int32_t GetPlayoutUnderrunCount() const override {
+ if (!initialized_)
+ return -1;
+ return output_->GetPlayoutUnderrunCount();
+ }
+
+ int32_t AttachAudioBuffer() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ output_->AttachAudioBuffer(audio_device_buffer_.get());
+ input_->AttachAudioBuffer(audio_device_buffer_.get());
+ return 0;
+ }
+
+ private:
+ SequenceChecker thread_checker_;
+
+ const AudioDeviceModule::AudioLayer audio_layer_;
+ const bool is_stereo_playout_supported_;
+ const bool is_stereo_record_supported_;
+ const uint16_t playout_delay_ms_;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ const std::unique_ptr<AudioInput> input_;
+ const std::unique_ptr<AudioOutput> output_;
+ std::unique_ptr<AudioDeviceBuffer> audio_device_buffer_;
+
+ bool initialized_;
+};
+
+} // namespace
+
+ScopedJavaLocalRef<jobject> GetAudioManager(JNIEnv* env,
+ const JavaRef<jobject>& j_context) {
+ return Java_WebRtcAudioManager_getAudioManager(env, j_context);
+}
+
+int GetDefaultSampleRate(JNIEnv* env, const JavaRef<jobject>& j_audio_manager) {
+ return Java_WebRtcAudioManager_getSampleRate(env, j_audio_manager);
+}
+
+void GetAudioParameters(JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager,
+ int input_sample_rate,
+ int output_sample_rate,
+ bool use_stereo_input,
+ bool use_stereo_output,
+ AudioParameters* input_parameters,
+ AudioParameters* output_parameters) {
+ const int output_channels = use_stereo_output ? 2 : 1;
+ const int input_channels = use_stereo_input ? 2 : 1;
+ const size_t output_buffer_size = Java_WebRtcAudioManager_getOutputBufferSize(
+ env, j_context, j_audio_manager, output_sample_rate, output_channels);
+ const size_t input_buffer_size = Java_WebRtcAudioManager_getInputBufferSize(
+ env, j_context, j_audio_manager, input_sample_rate, input_channels);
+ output_parameters->reset(output_sample_rate,
+ static_cast<size_t>(output_channels),
+ static_cast<size_t>(output_buffer_size));
+ input_parameters->reset(input_sample_rate,
+ static_cast<size_t>(input_channels),
+ static_cast<size_t>(input_buffer_size));
+ RTC_CHECK(input_parameters->is_valid());
+ RTC_CHECK(output_parameters->is_valid());
+}
+
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::AudioLayer audio_layer,
+ bool is_stereo_playout_supported,
+ bool is_stereo_record_supported,
+ uint16_t playout_delay_ms,
+ std::unique_ptr<AudioInput> audio_input,
+ std::unique_ptr<AudioOutput> audio_output) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return rtc::make_ref_counted<AndroidAudioDeviceModule>(
+ audio_layer, is_stereo_playout_supported, is_stereo_record_supported,
+ playout_delay_ms, std::move(audio_input), std::move(audio_output));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h
new file mode 100644
index 0000000000..1918336c5a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+
+class AudioDeviceBuffer;
+
+namespace jni {
+
+class AudioInput {
+ public:
+ virtual ~AudioInput() {}
+
+ virtual int32_t Init() = 0;
+ virtual int32_t Terminate() = 0;
+
+ virtual int32_t InitRecording() = 0;
+ virtual bool RecordingIsInitialized() const = 0;
+
+ virtual int32_t StartRecording() = 0;
+ virtual int32_t StopRecording() = 0;
+ virtual bool Recording() const = 0;
+
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
+
+ // Returns true if the audio input supports built-in audio effects for AEC and
+ // NS.
+ virtual bool IsAcousticEchoCancelerSupported() const = 0;
+ virtual bool IsNoiseSuppressorSupported() const = 0;
+
+ virtual int32_t EnableBuiltInAEC(bool enable) = 0;
+ virtual int32_t EnableBuiltInNS(bool enable) = 0;
+};
+
+class AudioOutput {
+ public:
+ virtual ~AudioOutput() {}
+
+ virtual int32_t Init() = 0;
+ virtual int32_t Terminate() = 0;
+ virtual int32_t InitPlayout() = 0;
+ virtual bool PlayoutIsInitialized() const = 0;
+ virtual int32_t StartPlayout() = 0;
+ virtual int32_t StopPlayout() = 0;
+ virtual bool Playing() const = 0;
+ virtual bool SpeakerVolumeIsAvailable() = 0;
+ virtual int SetSpeakerVolume(uint32_t volume) = 0;
+ virtual absl::optional<uint32_t> SpeakerVolume() const = 0;
+ virtual absl::optional<uint32_t> MaxSpeakerVolume() const = 0;
+ virtual absl::optional<uint32_t> MinSpeakerVolume() const = 0;
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
+ virtual int GetPlayoutUnderrunCount() = 0;
+};
+
+// Extract an android.media.AudioManager from an android.content.Context.
+ScopedJavaLocalRef<jobject> GetAudioManager(JNIEnv* env,
+ const JavaRef<jobject>& j_context);
+
+// Get default audio sample rate by querying an android.media.AudioManager.
+int GetDefaultSampleRate(JNIEnv* env, const JavaRef<jobject>& j_audio_manager);
+
+// Get audio input and output parameters based on a number of settings.
+void GetAudioParameters(JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager,
+ int input_sample_rate,
+ int output_sample_rate,
+ bool use_stereo_input,
+ bool use_stereo_output,
+ AudioParameters* input_parameters,
+ AudioParameters* output_parameters);
+
+// Glue together an audio input and audio output to get an AudioDeviceModule.
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::AudioLayer audio_layer,
+ bool is_stereo_playout_supported,
+ bool is_stereo_record_supported,
+ uint16_t playout_delay_ms,
+ std::unique_ptr<AudioInput> audio_input,
+ std::unique_ptr<AudioOutput> audio_output);
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc
new file mode 100644
index 0000000000..d206297001
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc
@@ -0,0 +1,267 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
+
+#include <string>
+#include <utility>
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioRecord_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+namespace jni {
+
+namespace {
+// Scoped class which logs its time of life as a UMA statistic. It generates
+// a histogram which measures the time it takes for a method/scope to execute.
+class ScopedHistogramTimer {
+ public:
+ explicit ScopedHistogramTimer(const std::string& name)
+ : histogram_name_(name), start_time_ms_(rtc::TimeMillis()) {}
+ ~ScopedHistogramTimer() {
+ const int64_t life_time_ms = rtc::TimeSince(start_time_ms_);
+ RTC_HISTOGRAM_COUNTS_1000(histogram_name_, life_time_ms);
+ RTC_LOG(LS_INFO) << histogram_name_ << ": " << life_time_ms;
+ }
+
+ private:
+ const std::string histogram_name_;
+ int64_t start_time_ms_;
+};
+
+} // namespace
+
+ScopedJavaLocalRef<jobject> AudioRecordJni::CreateJavaWebRtcAudioRecord(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager) {
+ return Java_WebRtcAudioRecord_Constructor(env, j_context, j_audio_manager);
+}
+
+AudioRecordJni::AudioRecordJni(JNIEnv* env,
+ const AudioParameters& audio_parameters,
+ int total_delay_ms,
+ const JavaRef<jobject>& j_audio_record)
+ : j_audio_record_(env, j_audio_record),
+ audio_parameters_(audio_parameters),
+ total_delay_ms_(total_delay_ms),
+ direct_buffer_address_(nullptr),
+ direct_buffer_capacity_in_bytes_(0),
+ frames_per_buffer_(0),
+ initialized_(false),
+ recording_(false),
+ audio_device_buffer_(nullptr) {
+ RTC_LOG(LS_INFO) << "ctor";
+ RTC_DCHECK(audio_parameters_.is_valid());
+ Java_WebRtcAudioRecord_setNativeAudioRecord(env, j_audio_record_,
+ jni::jlongFromPointer(this));
+ // Detach from this thread since construction is allowed to happen on a
+ // different thread.
+ thread_checker_.Detach();
+ thread_checker_java_.Detach();
+}
+
+AudioRecordJni::~AudioRecordJni() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+}
+
+int32_t AudioRecordJni::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ env_ = AttachCurrentThreadIfNeeded();
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return 0;
+}
+
+int32_t AudioRecordJni::Terminate() {
+ RTC_LOG(LS_INFO) << "Terminate";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopRecording();
+ thread_checker_.Detach();
+ return 0;
+}
+
+int32_t AudioRecordJni::InitRecording() {
+ RTC_LOG(LS_INFO) << "InitRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (initialized_) {
+ // Already initialized.
+ return 0;
+ }
+ RTC_DCHECK(!recording_);
+ ScopedHistogramTimer timer("WebRTC.Audio.InitRecordingDurationMs");
+
+ int frames_per_buffer = Java_WebRtcAudioRecord_initRecording(
+ env_, j_audio_record_, audio_parameters_.sample_rate(),
+ static_cast<int>(audio_parameters_.channels()));
+ if (frames_per_buffer < 0) {
+ direct_buffer_address_ = nullptr;
+ RTC_LOG(LS_ERROR) << "InitRecording failed";
+ return -1;
+ }
+ frames_per_buffer_ = static_cast<size_t>(frames_per_buffer);
+ RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_;
+ const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
+ RTC_CHECK_EQ(direct_buffer_capacity_in_bytes_,
+ frames_per_buffer_ * bytes_per_frame);
+ RTC_CHECK_EQ(frames_per_buffer_, audio_parameters_.frames_per_10ms_buffer());
+ initialized_ = true;
+ return 0;
+}
+
+bool AudioRecordJni::RecordingIsInitialized() const {
+ return initialized_;
+}
+
+int32_t AudioRecordJni::StartRecording() {
+ RTC_LOG(LS_INFO) << "StartRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (recording_) {
+ // Already recording.
+ return 0;
+ }
+ if (!initialized_) {
+ RTC_DLOG(LS_WARNING)
+ << "Recording can not start since InitRecording must succeed first";
+ return 0;
+ }
+ ScopedHistogramTimer timer("WebRTC.Audio.StartRecordingDurationMs");
+ if (!Java_WebRtcAudioRecord_startRecording(env_, j_audio_record_)) {
+ RTC_LOG(LS_ERROR) << "StartRecording failed";
+ return -1;
+ }
+ recording_ = true;
+ return 0;
+}
+
+int32_t AudioRecordJni::StopRecording() {
+ RTC_LOG(LS_INFO) << "StopRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !recording_) {
+ return 0;
+ }
+ // Check if the audio source matched the activated recording session but only
+ // if a valid results exists to avoid invalid statistics.
+ if (Java_WebRtcAudioRecord_isAudioConfigVerified(env_, j_audio_record_)) {
+ const bool session_was_ok =
+ Java_WebRtcAudioRecord_isAudioSourceMatchingRecordingSession(
+ env_, j_audio_record_);
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.SourceMatchesRecordingSession",
+ session_was_ok);
+ RTC_LOG(LS_INFO)
+ << "HISTOGRAM(WebRTC.Audio.SourceMatchesRecordingSession): "
+ << session_was_ok;
+ }
+ if (!Java_WebRtcAudioRecord_stopRecording(env_, j_audio_record_)) {
+ RTC_LOG(LS_ERROR) << "StopRecording failed";
+ return -1;
+ }
+ // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded()
+ // next time StartRecording() is called since it will create a new Java
+ // thread.
+ thread_checker_java_.Detach();
+ initialized_ = false;
+ recording_ = false;
+ direct_buffer_address_ = nullptr;
+ return 0;
+}
+
+bool AudioRecordJni::Recording() const {
+ return recording_;
+}
+
+void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ RTC_LOG(LS_INFO) << "AttachAudioBuffer";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ RTC_LOG(LS_INFO) << "SetRecordingSampleRate(" << sample_rate_hz << ")";
+ audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz);
+ const size_t channels = audio_parameters_.channels();
+ RTC_LOG(LS_INFO) << "SetRecordingChannels(" << channels << ")";
+ audio_device_buffer_->SetRecordingChannels(channels);
+}
+
+bool AudioRecordJni::IsAcousticEchoCancelerSupported() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioRecord_isAcousticEchoCancelerSupported(
+ env_, j_audio_record_);
+}
+
+bool AudioRecordJni::IsNoiseSuppressorSupported() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioRecord_isNoiseSuppressorSupported(env_,
+ j_audio_record_);
+}
+
+int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) {
+ RTC_LOG(LS_INFO) << "EnableBuiltInAEC(" << enable << ")";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioRecord_enableBuiltInAEC(env_, j_audio_record_, enable)
+ ? 0
+ : -1;
+}
+
+int32_t AudioRecordJni::EnableBuiltInNS(bool enable) {
+ RTC_LOG(LS_INFO) << "EnableBuiltInNS(" << enable << ")";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioRecord_enableBuiltInNS(env_, j_audio_record_, enable)
+ ? 0
+ : -1;
+}
+
+void AudioRecordJni::CacheDirectBufferAddress(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_caller,
+ const JavaParamRef<jobject>& byte_buffer) {
+ RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!direct_buffer_address_);
+ direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
+ jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
+ RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity;
+ direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
+}
+
+// This method is called on a high-priority thread from Java. The name of
+// the thread is 'AudioRecordThread'.
+void AudioRecordJni::DataIsRecorded(JNIEnv* env,
+ const JavaParamRef<jobject>& j_caller,
+ int length,
+ int64_t capture_timestamp_ns) {
+ RTC_DCHECK(thread_checker_java_.IsCurrent());
+ if (!audio_device_buffer_) {
+ RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
+ return;
+ }
+ audio_device_buffer_->SetRecordedBuffer(
+ direct_buffer_address_, frames_per_buffer_, capture_timestamp_ns);
+ // We provide one (combined) fixed delay estimate for the APM and use the
+ // `playDelayMs` parameter only. Components like the AEC only sees the sum
+ // of `playDelayMs` and `recDelayMs`, hence the distributions does not matter.
+ audio_device_buffer_->SetVQEData(total_delay_ms_, 0);
+ if (audio_device_buffer_->DeliverRecordedData() == -1) {
+ RTC_LOG(LS_INFO) << "AudioDeviceBuffer::DeliverRecordedData failed";
+ }
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h
new file mode 100644
index 0000000000..49c905daaf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_
+
+#include <jni.h>
+
+#include <memory>
+
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// Implements 16-bit mono PCM audio input support for Android using the Java
+// AudioRecord interface. Most of the work is done by its Java counterpart in
+// WebRtcAudioRecord.java. This class is created and lives on a thread in
+// C++-land, but recorded audio buffers are delivered on a high-priority
+// thread managed by the Java class.
+//
+// The Java class makes use of AudioEffect features (mainly AEC) which are
+// first available in Jelly Bean. If it is instantiated running against earlier
+// SDKs, the AEC provided by the APM in WebRTC must be used and enabled
+// separately instead.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread. A
+// thread checker will RTC_DCHECK if any method is called on an invalid thread.
+//
+// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed.
+// Additional thread checking guarantees that no other (possibly non attached)
+// thread is used.
+class AudioRecordJni : public AudioInput {
+ public:
+ static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioRecord(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager);
+
+ AudioRecordJni(JNIEnv* env,
+ const AudioParameters& audio_parameters,
+ int total_delay_ms,
+ const JavaRef<jobject>& j_webrtc_audio_record);
+ ~AudioRecordJni() override;
+
+ int32_t Init() override;
+ int32_t Terminate() override;
+
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int32_t StartRecording() override;
+ int32_t StopRecording() override;
+ bool Recording() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ bool IsAcousticEchoCancelerSupported() const override;
+ bool IsNoiseSuppressorSupported() const override;
+
+ int32_t EnableBuiltInAEC(bool enable) override;
+ int32_t EnableBuiltInNS(bool enable) override;
+
+ // Called from Java side so we can cache the address of the Java-manged
+ // `byte_buffer` in `direct_buffer_address_`. The size of the buffer
+ // is also stored in `direct_buffer_capacity_in_bytes_`.
+ // This method will be called by the WebRtcAudioRecord constructor, i.e.,
+ // on the same thread that this object is created on.
+ void CacheDirectBufferAddress(JNIEnv* env,
+ const JavaParamRef<jobject>& j_caller,
+ const JavaParamRef<jobject>& byte_buffer);
+
+ // Called periodically by the Java based WebRtcAudioRecord object when
+ // recording has started. Each call indicates that there are `length` new
+ // bytes recorded in the memory area `direct_buffer_address_` and it is
+ // now time to send these to the consumer.
+ // This method is called on a high-priority thread from Java. The name of
+ // the thread is 'AudioRecordThread'.
+ void DataIsRecorded(JNIEnv* env,
+ const JavaParamRef<jobject>& j_caller,
+ int length,
+ int64_t capture_timestamp_ns);
+
+ private:
+ // Stores thread ID in constructor.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to OnDataIsRecorded() from high-priority
+ // thread in Java. Detached during construction of this object.
+ SequenceChecker thread_checker_java_;
+
+ // Wraps the Java specific parts of the AudioRecordJni class.
+ JNIEnv* env_ = nullptr;
+ ScopedJavaGlobalRef<jobject> j_audio_record_;
+
+ const AudioParameters audio_parameters_;
+
+ // Delay estimate of the total round-trip delay (input + output).
+ // Fixed value set once in AttachAudioBuffer() and it can take one out of two
+ // possible values. See audio_common.h for details.
+ const int total_delay_ms_;
+
+ // Cached copy of address to direct audio buffer owned by `j_audio_record_`.
+ void* direct_buffer_address_;
+
+ // Number of bytes in the direct audio buffer owned by `j_audio_record_`.
+ size_t direct_buffer_capacity_in_bytes_;
+
+ // Number audio frames per audio buffer. Each audio frame corresponds to
+ // one sample of PCM mono data at 16 bits per sample. Hence, each audio
+ // frame contains 2 bytes (given that the Java layer only supports mono).
+ // Example: 480 for 48000 Hz or 441 for 44100 Hz.
+ size_t frames_per_buffer_;
+
+ bool initialized_;
+
+ bool recording_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc
new file mode 100644
index 0000000000..c1ff4c30e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc
@@ -0,0 +1,271 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
+
+#include <utility>
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
+#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+namespace jni {
+
+ScopedJavaLocalRef<jobject> AudioTrackJni::CreateJavaWebRtcAudioTrack(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager) {
+ return Java_WebRtcAudioTrack_Constructor(env, j_context, j_audio_manager);
+}
+
+AudioTrackJni::AudioTrackJni(JNIEnv* env,
+ const AudioParameters& audio_parameters,
+ const JavaRef<jobject>& j_webrtc_audio_track)
+ : j_audio_track_(env, j_webrtc_audio_track),
+ audio_parameters_(audio_parameters),
+ direct_buffer_address_(nullptr),
+ direct_buffer_capacity_in_bytes_(0),
+ frames_per_buffer_(0),
+ initialized_(false),
+ playing_(false),
+ audio_device_buffer_(nullptr) {
+ RTC_LOG(LS_INFO) << "ctor";
+ RTC_DCHECK(audio_parameters_.is_valid());
+ Java_WebRtcAudioTrack_setNativeAudioTrack(env, j_audio_track_,
+ jni::jlongFromPointer(this));
+ // Detach from this thread since construction is allowed to happen on a
+ // different thread.
+ thread_checker_.Detach();
+ thread_checker_java_.Detach();
+}
+
+AudioTrackJni::~AudioTrackJni() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+}
+
+int32_t AudioTrackJni::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ env_ = AttachCurrentThreadIfNeeded();
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return 0;
+}
+
+int32_t AudioTrackJni::Terminate() {
+ RTC_LOG(LS_INFO) << "Terminate";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopPlayout();
+ thread_checker_.Detach();
+ return 0;
+}
+
+int32_t AudioTrackJni::InitPlayout() {
+ RTC_LOG(LS_INFO) << "InitPlayout";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (initialized_) {
+ // Already initialized.
+ return 0;
+ }
+ RTC_DCHECK(!playing_);
+ double buffer_size_factor =
+ strtod(webrtc::field_trial::FindFullName(
+ "WebRTC-AudioDevicePlayoutBufferSizeFactor")
+ .c_str(),
+ nullptr);
+ if (buffer_size_factor == 0)
+ buffer_size_factor = 1.0;
+ int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout(
+ env_, j_audio_track_, audio_parameters_.sample_rate(),
+ static_cast<int>(audio_parameters_.channels()), buffer_size_factor);
+ if (requested_buffer_size_bytes < 0) {
+ RTC_LOG(LS_ERROR) << "InitPlayout failed";
+ return -1;
+ }
+ // Update UMA histograms for both the requested and actual buffer size.
+ // To avoid division by zero, we assume the sample rate is 48k if an invalid
+ // value is found.
+ const int sample_rate = audio_parameters_.sample_rate() <= 0
+ ? 48000
+ : audio_parameters_.sample_rate();
+ // This calculation assumes that audio is mono.
+ const int requested_buffer_size_ms =
+ (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
+ requested_buffer_size_ms, 0, 1000, 100);
+ int actual_buffer_size_frames =
+ Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
+ if (actual_buffer_size_frames >= 0) {
+ const int actual_buffer_size_ms =
+ actual_buffer_size_frames * 1000 / sample_rate;
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
+ actual_buffer_size_ms, 0, 1000, 100);
+ }
+
+ initialized_ = true;
+ return 0;
+}
+
+bool AudioTrackJni::PlayoutIsInitialized() const {
+ return initialized_;
+}
+
+int32_t AudioTrackJni::StartPlayout() {
+ RTC_LOG(LS_INFO) << "StartPlayout";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (playing_) {
+ // Already playing.
+ return 0;
+ }
+ if (!initialized_) {
+ RTC_DLOG(LS_WARNING)
+ << "Playout can not start since InitPlayout must succeed first";
+ return 0;
+ }
+ if (!Java_WebRtcAudioTrack_startPlayout(env_, j_audio_track_)) {
+ RTC_LOG(LS_ERROR) << "StartPlayout failed";
+ return -1;
+ }
+ playing_ = true;
+ return 0;
+}
+
+int32_t AudioTrackJni::StopPlayout() {
+ RTC_LOG(LS_INFO) << "StopPlayout";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !playing_) {
+ return 0;
+ }
+ // Log the difference in initial and current buffer level.
+ const int current_buffer_size_frames =
+ Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
+ const int initial_buffer_size_frames =
+ Java_WebRtcAudioTrack_getInitialBufferSizeInFrames(env_, j_audio_track_);
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ RTC_HISTOGRAM_COUNTS(
+ "WebRTC.Audio.AndroidNativeAudioBufferSizeDifferenceFromInitialMs",
+ (current_buffer_size_frames - initial_buffer_size_frames) * 1000 /
+ sample_rate_hz,
+ -500, 100, 100);
+
+ if (!Java_WebRtcAudioTrack_stopPlayout(env_, j_audio_track_)) {
+ RTC_LOG(LS_ERROR) << "StopPlayout failed";
+ return -1;
+ }
+ // If we don't detach here, we will hit a RTC_DCHECK next time StartPlayout()
+ // is called since it will create a new Java thread.
+ thread_checker_java_.Detach();
+ initialized_ = false;
+ playing_ = false;
+ direct_buffer_address_ = nullptr;
+ return 0;
+}
+
+bool AudioTrackJni::Playing() const {
+ return playing_;
+}
+
+bool AudioTrackJni::SpeakerVolumeIsAvailable() {
+ return true;
+}
+
+int AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
+ RTC_LOG(LS_INFO) << "SetSpeakerVolume(" << volume << ")";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioTrack_setStreamVolume(env_, j_audio_track_,
+ static_cast<int>(volume))
+ ? 0
+ : -1;
+}
+
+absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_);
+}
+
+absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return 0;
+}
+
+absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ const uint32_t volume =
+ Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_);
+ RTC_LOG(LS_INFO) << "SpeakerVolume: " << volume;
+ return volume;
+}
+
+int AudioTrackJni::GetPlayoutUnderrunCount() {
+ return Java_WebRtcAudioTrack_GetPlayoutUnderrunCount(env_, j_audio_track_);
+}
+
+// TODO(henrika): possibly add stereo support.
+void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ RTC_LOG(LS_INFO) << "AttachAudioBuffer";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ RTC_LOG(LS_INFO) << "SetPlayoutSampleRate(" << sample_rate_hz << ")";
+ audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
+ const size_t channels = audio_parameters_.channels();
+ RTC_LOG(LS_INFO) << "SetPlayoutChannels(" << channels << ")";
+ audio_device_buffer_->SetPlayoutChannels(channels);
+}
+
+void AudioTrackJni::CacheDirectBufferAddress(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& byte_buffer) {
+ RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!direct_buffer_address_);
+ direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
+ jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
+ RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity;
+ direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
+ const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
+ frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / bytes_per_frame;
+ RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_;
+}
+
+// This method is called on a high-priority thread from Java. The name of
+// the thread is 'AudioRecordTrack'.
+void AudioTrackJni::GetPlayoutData(JNIEnv* env,
+ size_t length) {
+ RTC_DCHECK(thread_checker_java_.IsCurrent());
+ const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
+ RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);
+ if (!audio_device_buffer_) {
+ RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
+ return;
+ }
+ // Pull decoded data (in 16-bit PCM format) from jitter buffer.
+ int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_);
+ if (samples <= 0) {
+ RTC_LOG(LS_ERROR) << "AudioDeviceBuffer::RequestPlayoutData failed";
+ return;
+ }
+ RTC_DCHECK_EQ(samples, frames_per_buffer_);
+ // Copy decoded data into common byte buffer to ensure that it can be
+ // written to the Java based audio track.
+ samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_);
+ RTC_DCHECK_EQ(length, bytes_per_frame * samples);
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h
new file mode 100644
index 0000000000..5ca907c42f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_
+
+#include <jni.h>
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// Implements 16-bit mono PCM audio output support for Android using the Java
+// AudioTrack interface. Most of the work is done by its Java counterpart in
+// WebRtcAudioTrack.java. This class is created and lives on a thread in
+// C++-land, but decoded audio buffers are requested on a high-priority
+// thread managed by the Java class.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread. A
+// thread checker will RTC_DCHECK if any method is called on an invalid thread
+//
+// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed.
+// Additional thread checking guarantees that no other (possibly non attached)
+// thread is used.
+class AudioTrackJni : public AudioOutput {
+ public:
+ static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioTrack(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager);
+
+ AudioTrackJni(JNIEnv* env,
+ const AudioParameters& audio_parameters,
+ const JavaRef<jobject>& j_webrtc_audio_track);
+ ~AudioTrackJni() override;
+
+ int32_t Init() override;
+ int32_t Terminate() override;
+
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+
+ int32_t StartPlayout() override;
+ int32_t StopPlayout() override;
+ bool Playing() const override;
+
+ bool SpeakerVolumeIsAvailable() override;
+ int SetSpeakerVolume(uint32_t volume) override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
+ int GetPlayoutUnderrunCount() override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ // Called from Java side so we can cache the address of the Java-manged
+ // `byte_buffer` in `direct_buffer_address_`. The size of the buffer
+ // is also stored in `direct_buffer_capacity_in_bytes_`.
+ // Called on the same thread as the creating thread.
+ void CacheDirectBufferAddress(JNIEnv* env,
+ const JavaParamRef<jobject>& byte_buffer);
+ // Called periodically by the Java based WebRtcAudioTrack object when
+ // playout has started. Each call indicates that `length` new bytes should
+ // be written to the memory area `direct_buffer_address_` for playout.
+ // This method is called on a high-priority thread from Java. The name of
+ // the thread is 'AudioTrackThread'.
+ void GetPlayoutData(JNIEnv* env, size_t length);
+
+ private:
+ // Stores thread ID in constructor.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to OnGetPlayoutData() from high-priority
+ // thread in Java. Detached during construction of this object.
+ SequenceChecker thread_checker_java_;
+
+ // Wraps the Java specific parts of the AudioTrackJni class.
+ JNIEnv* env_ = nullptr;
+ ScopedJavaGlobalRef<jobject> j_audio_track_;
+
+ // Contains audio parameters provided to this class at construction by the
+ // AudioManager.
+ const AudioParameters audio_parameters_;
+
+ // Cached copy of address to direct audio buffer owned by `j_audio_track_`.
+ void* direct_buffer_address_;
+
+ // Number of bytes in the direct audio buffer owned by `j_audio_track_`.
+ size_t direct_buffer_capacity_in_bytes_;
+
+ // Number of audio frames per audio buffer. Each audio frame corresponds to
+ // one sample of PCM mono data at 16 bits per sample. Hence, each audio
+ // frame contains 2 bytes (given that the Java layer only supports mono).
+ // Example: 480 for 48000 Hz or 441 for 44100 Hz.
+ size_t frames_per_buffer_;
+
+ bool initialized_;
+
+ bool playing_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance
+ // and therefore outlives this object.
+ AudioDeviceBuffer* audio_device_buffer_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc
new file mode 100644
index 0000000000..1c3cbe4bbe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "sdk/android/generated_java_audio_jni/JavaAudioDeviceModule_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_JavaAudioDeviceModule_CreateAudioDeviceModule(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_context,
+ const JavaParamRef<jobject>& j_audio_manager,
+ const JavaParamRef<jobject>& j_webrtc_audio_record,
+ const JavaParamRef<jobject>& j_webrtc_audio_track,
+ int input_sample_rate,
+ int output_sample_rate,
+ jboolean j_use_stereo_input,
+ jboolean j_use_stereo_output) {
+ AudioParameters input_parameters;
+ AudioParameters output_parameters;
+ GetAudioParameters(env, j_context, j_audio_manager, input_sample_rate,
+ output_sample_rate, j_use_stereo_input,
+ j_use_stereo_output, &input_parameters,
+ &output_parameters);
+ auto audio_input = std::make_unique<AudioRecordJni>(
+ env, input_parameters, kHighLatencyModeDelayEstimateInMilliseconds,
+ j_webrtc_audio_record);
+ auto audio_output = std::make_unique<AudioTrackJni>(env, output_parameters,
+ j_webrtc_audio_track);
+ return jlongFromPointer(CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::kAndroidJavaAudio,
+ j_use_stereo_input, j_use_stereo_output,
+ kHighLatencyModeDelayEstimateInMilliseconds,
+ std::move(audio_input), std::move(audio_output))
+ .release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc
new file mode 100644
index 0000000000..300019a161
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/opensles_common.h"
+
+#include <SLES/OpenSLES.h>
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// Returns a string representation given an integer SL_RESULT_XXX code.
+// The mapping can be found in <SLES/OpenSLES.h>.
+const char* GetSLErrorString(size_t code) {
+ static const char* sl_error_strings[] = {
+ "SL_RESULT_SUCCESS", // 0
+ "SL_RESULT_PRECONDITIONS_VIOLATED", // 1
+ "SL_RESULT_PARAMETER_INVALID", // 2
+ "SL_RESULT_MEMORY_FAILURE", // 3
+ "SL_RESULT_RESOURCE_ERROR", // 4
+ "SL_RESULT_RESOURCE_LOST", // 5
+ "SL_RESULT_IO_ERROR", // 6
+ "SL_RESULT_BUFFER_INSUFFICIENT", // 7
+ "SL_RESULT_CONTENT_CORRUPTED", // 8
+ "SL_RESULT_CONTENT_UNSUPPORTED", // 9
+ "SL_RESULT_CONTENT_NOT_FOUND", // 10
+ "SL_RESULT_PERMISSION_DENIED", // 11
+ "SL_RESULT_FEATURE_UNSUPPORTED", // 12
+ "SL_RESULT_INTERNAL_ERROR", // 13
+ "SL_RESULT_UNKNOWN_ERROR", // 14
+ "SL_RESULT_OPERATION_ABORTED", // 15
+ "SL_RESULT_CONTROL_LOST", // 16
+ };
+
+ if (code >= arraysize(sl_error_strings)) {
+ return "SL_RESULT_UNKNOWN_ERROR";
+ }
+ return sl_error_strings[code];
+}
+
+SLDataFormat_PCM CreatePCMConfiguration(size_t channels,
+ int sample_rate,
+ size_t bits_per_sample) {
+ RTC_CHECK_EQ(bits_per_sample, SL_PCMSAMPLEFORMAT_FIXED_16);
+ SLDataFormat_PCM format;
+ format.formatType = SL_DATAFORMAT_PCM;
+ format.numChannels = static_cast<SLuint32>(channels);
+ // Note that, the unit of sample rate is actually in milliHertz and not Hertz.
+ switch (sample_rate) {
+ case 8000:
+ format.samplesPerSec = SL_SAMPLINGRATE_8;
+ break;
+ case 16000:
+ format.samplesPerSec = SL_SAMPLINGRATE_16;
+ break;
+ case 22050:
+ format.samplesPerSec = SL_SAMPLINGRATE_22_05;
+ break;
+ case 32000:
+ format.samplesPerSec = SL_SAMPLINGRATE_32;
+ break;
+ case 44100:
+ format.samplesPerSec = SL_SAMPLINGRATE_44_1;
+ break;
+ case 48000:
+ format.samplesPerSec = SL_SAMPLINGRATE_48;
+ break;
+ case 64000:
+ format.samplesPerSec = SL_SAMPLINGRATE_64;
+ break;
+ case 88200:
+ format.samplesPerSec = SL_SAMPLINGRATE_88_2;
+ break;
+ case 96000:
+ format.samplesPerSec = SL_SAMPLINGRATE_96;
+ break;
+ default:
+ RTC_CHECK(false) << "Unsupported sample rate: " << sample_rate;
+ break;
+ }
+ format.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+ format.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
+ format.endianness = SL_BYTEORDER_LITTLEENDIAN;
+ if (format.numChannels == 1) {
+ format.channelMask = SL_SPEAKER_FRONT_CENTER;
+ } else if (format.numChannels == 2) {
+ format.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
+ } else {
+ RTC_CHECK(false) << "Unsupported number of channels: "
+ << format.numChannels;
+ }
+ return format;
+}
+
+OpenSLEngineManager::OpenSLEngineManager() {
+ thread_checker_.Detach();
+}
+
+SLObjectItf OpenSLEngineManager::GetOpenSLEngine() {
+ RTC_LOG(LS_INFO) << "GetOpenSLEngine";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // OpenSL ES for Android only supports a single engine per application.
+ // If one already has been created, return existing object instead of
+ // creating a new.
+ if (engine_object_.Get() != nullptr) {
+ RTC_LOG(LS_WARNING)
+ << "The OpenSL ES engine object has already been created";
+ return engine_object_.Get();
+ }
+ // Create the engine object in thread safe mode.
+ const SLEngineOption option[] = {
+ {SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE)}};
+ SLresult result =
+ slCreateEngine(engine_object_.Receive(), 1, option, 0, NULL, NULL);
+ if (result != SL_RESULT_SUCCESS) {
+ RTC_LOG(LS_ERROR) << "slCreateEngine() failed: "
+ << GetSLErrorString(result);
+ engine_object_.Reset();
+ return nullptr;
+ }
+ // Realize the SL Engine in synchronous mode.
+ result = engine_object_->Realize(engine_object_.Get(), SL_BOOLEAN_FALSE);
+ if (result != SL_RESULT_SUCCESS) {
+ RTC_LOG(LS_ERROR) << "Realize() failed: " << GetSLErrorString(result);
+ engine_object_.Reset();
+ return nullptr;
+ }
+ // Finally return the SLObjectItf interface of the engine object.
+ return engine_object_.Get();
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h
new file mode 100644
index 0000000000..9dd1e0f7d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_
+
+#include <SLES/OpenSLES.h>
+#include <stddef.h>
+
+#include "api/ref_counted_base.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// Returns a string representation given an integer SL_RESULT_XXX code.
+// The mapping can be found in <SLES/OpenSLES.h>.
+const char* GetSLErrorString(size_t code);
+
+// Configures an SL_DATAFORMAT_PCM structure based on native audio parameters.
+SLDataFormat_PCM CreatePCMConfiguration(size_t channels,
+ int sample_rate,
+ size_t bits_per_sample);
+
+// Helper class for using SLObjectItf interfaces.
+template <typename SLType, typename SLDerefType>
+class ScopedSLObject {
+ public:
+ ScopedSLObject() : obj_(nullptr) {}
+
+ ~ScopedSLObject() { Reset(); }
+
+ SLType* Receive() {
+ RTC_DCHECK(!obj_);
+ return &obj_;
+ }
+
+ SLDerefType operator->() { return *obj_; }
+
+ SLType Get() const { return obj_; }
+
+ void Reset() {
+ if (obj_) {
+ (*obj_)->Destroy(obj_);
+ obj_ = nullptr;
+ }
+ }
+
+ private:
+ SLType obj_;
+};
+
+typedef ScopedSLObject<SLObjectItf, const SLObjectItf_*> ScopedSLObjectItf;
+
+// Creates and realizes the main (global) Open SL engine object and returns
+// a reference to it. The engine object is only created at the first call
+// since OpenSL ES for Android only supports a single engine per application.
+// Subsequent calls returns the already created engine.
+// Note: This class must be used single threaded and this is enforced by a
+// thread checker.
+class OpenSLEngineManager
+ : public rtc::RefCountedNonVirtual<OpenSLEngineManager> {
+ public:
+ OpenSLEngineManager();
+ ~OpenSLEngineManager() = default;
+ SLObjectItf GetOpenSLEngine();
+
+ private:
+ SequenceChecker thread_checker_;
+ // This object is the global entry point of the OpenSL ES API.
+ // After creating the engine object, the application can obtain this object‘s
+ // SLEngineItf interface. This interface contains creation methods for all
+ // the other object types in the API. None of these interface are realized
+ // by this class. It only provides access to the global engine object.
+ ScopedSLObjectItf engine_object_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc
new file mode 100644
index 0000000000..6300a3abe1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc
@@ -0,0 +1,446 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/opensles_player.h"
+
+#include <android/log.h>
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+
+#define TAG "OpenSLESPlayer"
+#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
+#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
+#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
+#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
+
+#define RETURN_ON_ERROR(op, ...) \
+ do { \
+ SLresult err = (op); \
+ if (err != SL_RESULT_SUCCESS) { \
+ ALOGE("%s failed: %s", #op, GetSLErrorString(err)); \
+ return __VA_ARGS__; \
+ } \
+ } while (0)
+
+namespace webrtc {
+
+namespace jni {
+
+OpenSLESPlayer::OpenSLESPlayer(
+ const AudioParameters& audio_parameters,
+ rtc::scoped_refptr<OpenSLEngineManager> engine_manager)
+ : audio_parameters_(audio_parameters),
+ audio_device_buffer_(nullptr),
+ initialized_(false),
+ playing_(false),
+ buffer_index_(0),
+ engine_manager_(std::move(engine_manager)),
+ engine_(nullptr),
+ player_(nullptr),
+ simple_buffer_queue_(nullptr),
+ volume_(nullptr),
+ last_play_time_(0) {
+ ALOGD("ctor[tid=%d]", rtc::CurrentThreadId());
+ // Use native audio output parameters provided by the audio manager and
+ // define the PCM format structure.
+ pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(),
+ audio_parameters_.sample_rate(),
+ audio_parameters_.bits_per_sample());
+ // Detach from this thread since we want to use the checker to verify calls
+ // from the internal audio thread.
+ thread_checker_opensles_.Detach();
+}
+
+OpenSLESPlayer::~OpenSLESPlayer() {
+ ALOGD("dtor[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+ DestroyAudioPlayer();
+ DestroyMix();
+ engine_ = nullptr;
+ RTC_DCHECK(!engine_);
+ RTC_DCHECK(!output_mix_.Get());
+ RTC_DCHECK(!player_);
+ RTC_DCHECK(!simple_buffer_queue_);
+ RTC_DCHECK(!volume_);
+}
+
+int OpenSLESPlayer::Init() {
+ ALOGD("Init[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (audio_parameters_.channels() == 2) {
+ ALOGW("Stereo mode is enabled");
+ }
+ return 0;
+}
+
+int OpenSLESPlayer::Terminate() {
+ ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopPlayout();
+ return 0;
+}
+
+int OpenSLESPlayer::InitPlayout() {
+ ALOGD("InitPlayout[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!initialized_);
+ RTC_DCHECK(!playing_);
+ if (!ObtainEngineInterface()) {
+ ALOGE("Failed to obtain SL Engine interface");
+ return -1;
+ }
+ CreateMix();
+ initialized_ = true;
+ buffer_index_ = 0;
+ return 0;
+}
+
+bool OpenSLESPlayer::PlayoutIsInitialized() const {
+ return initialized_;
+}
+
+int OpenSLESPlayer::StartPlayout() {
+ ALOGD("StartPlayout[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!playing_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetPlayout();
+ }
+ // The number of lower latency audio players is limited, hence we create the
+ // audio player in Start() and destroy it in Stop().
+ CreateAudioPlayer();
+ // Fill up audio buffers to avoid initial glitch and to ensure that playback
+ // starts when mode is later changed to SL_PLAYSTATE_PLAYING.
+ // TODO(henrika): we can save some delay by only making one call to
+ // EnqueuePlayoutData. Most likely not worth the risk of adding a glitch.
+ last_play_time_ = rtc::Time();
+ for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) {
+ EnqueuePlayoutData(true);
+ }
+ // Start streaming data by setting the play state to SL_PLAYSTATE_PLAYING.
+ // For a player object, when the object is in the SL_PLAYSTATE_PLAYING
+ // state, adding buffers will implicitly start playback.
+ RETURN_ON_ERROR((*player_)->SetPlayState(player_, SL_PLAYSTATE_PLAYING), -1);
+ playing_ = (GetPlayState() == SL_PLAYSTATE_PLAYING);
+ RTC_DCHECK(playing_);
+ return 0;
+}
+
+int OpenSLESPlayer::StopPlayout() {
+ ALOGD("StopPlayout[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !playing_) {
+ return 0;
+ }
+ // Stop playing by setting the play state to SL_PLAYSTATE_STOPPED.
+ RETURN_ON_ERROR((*player_)->SetPlayState(player_, SL_PLAYSTATE_STOPPED), -1);
+ // Clear the buffer queue to flush out any remaining data.
+ RETURN_ON_ERROR((*simple_buffer_queue_)->Clear(simple_buffer_queue_), -1);
+#if RTC_DCHECK_IS_ON
+ // Verify that the buffer queue is in fact cleared as it should.
+ SLAndroidSimpleBufferQueueState buffer_queue_state;
+ (*simple_buffer_queue_)->GetState(simple_buffer_queue_, &buffer_queue_state);
+ RTC_DCHECK_EQ(0, buffer_queue_state.count);
+ RTC_DCHECK_EQ(0, buffer_queue_state.index);
+#endif
+ // The number of lower latency audio players is limited, hence we create the
+ // audio player in Start() and destroy it in Stop().
+ DestroyAudioPlayer();
+ thread_checker_opensles_.Detach();
+ initialized_ = false;
+ playing_ = false;
+ return 0;
+}
+
+bool OpenSLESPlayer::Playing() const {
+ return playing_;
+}
+
+bool OpenSLESPlayer::SpeakerVolumeIsAvailable() {
+ return false;
+}
+
+int OpenSLESPlayer::SetSpeakerVolume(uint32_t volume) {
+ return -1;
+}
+
+absl::optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
+ return absl::nullopt;
+}
+
+absl::optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
+ return absl::nullopt;
+}
+
+absl::optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
+ return absl::nullopt;
+}
+
+void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ ALOGD("AttachAudioBuffer");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz);
+ audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
+ const size_t channels = audio_parameters_.channels();
+ ALOGD("SetPlayoutChannels(%zu)", channels);
+ audio_device_buffer_->SetPlayoutChannels(channels);
+ RTC_CHECK(audio_device_buffer_);
+ AllocateDataBuffers();
+}
+
+void OpenSLESPlayer::AllocateDataBuffers() {
+ ALOGD("AllocateDataBuffers");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!simple_buffer_queue_);
+ RTC_CHECK(audio_device_buffer_);
+ // Create a modified audio buffer class which allows us to ask for any number
+ // of samples (and not only multiple of 10ms) to match the native OpenSL ES
+ // buffer size. The native buffer size corresponds to the
+ // PROPERTY_OUTPUT_FRAMES_PER_BUFFER property which is the number of audio
+ // frames that the HAL (Hardware Abstraction Layer) buffer can hold. It is
+ // recommended to construct audio buffers so that they contain an exact
+ // multiple of this number. If so, callbacks will occur at regular intervals,
+ // which reduces jitter.
+ const size_t buffer_size_in_samples =
+ audio_parameters_.frames_per_buffer() * audio_parameters_.channels();
+ ALOGD("native buffer size: %zu", buffer_size_in_samples);
+ ALOGD("native buffer size in ms: %.2f",
+ audio_parameters_.GetBufferSizeInMilliseconds());
+ fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_);
+ // Allocated memory for audio buffers.
+ for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) {
+ audio_buffers_[i].reset(new SLint16[buffer_size_in_samples]);
+ }
+}
+
+bool OpenSLESPlayer::ObtainEngineInterface() {
+ ALOGD("ObtainEngineInterface");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (engine_)
+ return true;
+ // Get access to (or create if not already existing) the global OpenSL Engine
+ // object.
+ SLObjectItf engine_object = engine_manager_->GetOpenSLEngine();
+ if (engine_object == nullptr) {
+ ALOGE("Failed to access the global OpenSL engine");
+ return false;
+ }
+ // Get the SL Engine Interface which is implicit.
+ RETURN_ON_ERROR(
+ (*engine_object)->GetInterface(engine_object, SL_IID_ENGINE, &engine_),
+ false);
+ return true;
+}
+
+bool OpenSLESPlayer::CreateMix() {
+ ALOGD("CreateMix");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(engine_);
+ if (output_mix_.Get())
+ return true;
+
+ // Create the ouput mix on the engine object. No interfaces will be used.
+ RETURN_ON_ERROR((*engine_)->CreateOutputMix(engine_, output_mix_.Receive(), 0,
+ nullptr, nullptr),
+ false);
+ RETURN_ON_ERROR(output_mix_->Realize(output_mix_.Get(), SL_BOOLEAN_FALSE),
+ false);
+ return true;
+}
+
+void OpenSLESPlayer::DestroyMix() {
+ ALOGD("DestroyMix");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!output_mix_.Get())
+ return;
+ output_mix_.Reset();
+}
+
+bool OpenSLESPlayer::CreateAudioPlayer() {
+ ALOGD("CreateAudioPlayer");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(output_mix_.Get());
+ if (player_object_.Get())
+ return true;
+ RTC_DCHECK(!player_);
+ RTC_DCHECK(!simple_buffer_queue_);
+ RTC_DCHECK(!volume_);
+
+ // source: Android Simple Buffer Queue Data Locator is source.
+ SLDataLocator_AndroidSimpleBufferQueue simple_buffer_queue = {
+ SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
+ static_cast<SLuint32>(kNumOfOpenSLESBuffers)};
+ SLDataSource audio_source = {&simple_buffer_queue, &pcm_format_};
+
+ // sink: OutputMix-based data is sink.
+ SLDataLocator_OutputMix locator_output_mix = {SL_DATALOCATOR_OUTPUTMIX,
+ output_mix_.Get()};
+ SLDataSink audio_sink = {&locator_output_mix, nullptr};
+
+ // Define interfaces that we indend to use and realize.
+ const SLInterfaceID interface_ids[] = {SL_IID_ANDROIDCONFIGURATION,
+ SL_IID_BUFFERQUEUE, SL_IID_VOLUME};
+ const SLboolean interface_required[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
+ SL_BOOLEAN_TRUE};
+
+ // Create the audio player on the engine interface.
+ RETURN_ON_ERROR(
+ (*engine_)->CreateAudioPlayer(
+ engine_, player_object_.Receive(), &audio_source, &audio_sink,
+ arraysize(interface_ids), interface_ids, interface_required),
+ false);
+
+ // Use the Android configuration interface to set platform-specific
+ // parameters. Should be done before player is realized.
+ SLAndroidConfigurationItf player_config;
+ RETURN_ON_ERROR(
+ player_object_->GetInterface(player_object_.Get(),
+ SL_IID_ANDROIDCONFIGURATION, &player_config),
+ false);
+ // Set audio player configuration to SL_ANDROID_STREAM_VOICE which
+ // corresponds to android.media.AudioManager.STREAM_VOICE_CALL.
+ SLint32 stream_type = SL_ANDROID_STREAM_VOICE;
+ RETURN_ON_ERROR(
+ (*player_config)
+ ->SetConfiguration(player_config, SL_ANDROID_KEY_STREAM_TYPE,
+ &stream_type, sizeof(SLint32)),
+ false);
+
+ // Realize the audio player object after configuration has been set.
+ RETURN_ON_ERROR(
+ player_object_->Realize(player_object_.Get(), SL_BOOLEAN_FALSE), false);
+
+ // Get the SLPlayItf interface on the audio player.
+ RETURN_ON_ERROR(
+ player_object_->GetInterface(player_object_.Get(), SL_IID_PLAY, &player_),
+ false);
+
+ // Get the SLAndroidSimpleBufferQueueItf interface on the audio player.
+ RETURN_ON_ERROR(
+ player_object_->GetInterface(player_object_.Get(), SL_IID_BUFFERQUEUE,
+ &simple_buffer_queue_),
+ false);
+
+ // Register callback method for the Android Simple Buffer Queue interface.
+ // This method will be called when the native audio layer needs audio data.
+ RETURN_ON_ERROR((*simple_buffer_queue_)
+ ->RegisterCallback(simple_buffer_queue_,
+ SimpleBufferQueueCallback, this),
+ false);
+
+ // Get the SLVolumeItf interface on the audio player.
+ RETURN_ON_ERROR(player_object_->GetInterface(player_object_.Get(),
+ SL_IID_VOLUME, &volume_),
+ false);
+
+ // TODO(henrika): might not be required to set volume to max here since it
+ // seems to be default on most devices. Might be required for unit tests.
+ // RETURN_ON_ERROR((*volume_)->SetVolumeLevel(volume_, 0), false);
+
+ return true;
+}
+
+void OpenSLESPlayer::DestroyAudioPlayer() {
+ ALOGD("DestroyAudioPlayer");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!player_object_.Get())
+ return;
+ (*simple_buffer_queue_)
+ ->RegisterCallback(simple_buffer_queue_, nullptr, nullptr);
+ player_object_.Reset();
+ player_ = nullptr;
+ simple_buffer_queue_ = nullptr;
+ volume_ = nullptr;
+}
+
+// static
+void OpenSLESPlayer::SimpleBufferQueueCallback(
+ SLAndroidSimpleBufferQueueItf caller,
+ void* context) {
+ OpenSLESPlayer* stream = reinterpret_cast<OpenSLESPlayer*>(context);
+ stream->FillBufferQueue();
+}
+
+void OpenSLESPlayer::FillBufferQueue() {
+ RTC_DCHECK(thread_checker_opensles_.IsCurrent());
+ SLuint32 state = GetPlayState();
+ if (state != SL_PLAYSTATE_PLAYING) {
+ ALOGW("Buffer callback in non-playing state!");
+ return;
+ }
+ EnqueuePlayoutData(false);
+}
+
+void OpenSLESPlayer::EnqueuePlayoutData(bool silence) {
+ // Check delta time between two successive callbacks and provide a warning
+ // if it becomes very large.
+ // TODO(henrika): using 150ms as upper limit but this value is rather random.
+ const uint32_t current_time = rtc::Time();
+ const uint32_t diff = current_time - last_play_time_;
+ if (diff > 150) {
+ ALOGW("Bad OpenSL ES playout timing, dT=%u [ms]", diff);
+ }
+ last_play_time_ = current_time;
+ SLint8* audio_ptr8 =
+ reinterpret_cast<SLint8*>(audio_buffers_[buffer_index_].get());
+ if (silence) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // Avoid acquiring real audio data from WebRTC and fill the buffer with
+ // zeros instead. Used to prime the buffer with silence and to avoid asking
+ // for audio data from two different threads.
+ memset(audio_ptr8, 0, audio_parameters_.GetBytesPerBuffer());
+ } else {
+ RTC_DCHECK(thread_checker_opensles_.IsCurrent());
+ // Read audio data from the WebRTC source using the FineAudioBuffer object
+ // to adjust for differences in buffer size between WebRTC (10ms) and native
+ // OpenSL ES. Use hardcoded delay estimate since OpenSL ES does not support
+ // delay estimation.
+ fine_audio_buffer_->GetPlayoutData(
+ rtc::ArrayView<int16_t>(audio_buffers_[buffer_index_].get(),
+ audio_parameters_.frames_per_buffer() *
+ audio_parameters_.channels()),
+ 25);
+ }
+ // Enqueue the decoded audio buffer for playback.
+ SLresult err = (*simple_buffer_queue_)
+ ->Enqueue(simple_buffer_queue_, audio_ptr8,
+ audio_parameters_.GetBytesPerBuffer());
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("Enqueue failed: %d", err);
+ }
+ buffer_index_ = (buffer_index_ + 1) % kNumOfOpenSLESBuffers;
+}
+
+SLuint32 OpenSLESPlayer::GetPlayState() const {
+ RTC_DCHECK(player_);
+ SLuint32 state;
+ SLresult err = (*player_)->GetPlayState(player_, &state);
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("GetPlayState failed: %d", err);
+ }
+ return state;
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h
new file mode 100644
index 0000000000..8a22432309
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h
@@ -0,0 +1,199 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <SLES/OpenSLES_AndroidConfiguration.h>
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+#include "sdk/android/src/jni/audio_device/opensles_common.h"
+
+namespace webrtc {
+
+class FineAudioBuffer;
+
+namespace jni {
+
+// Implements 16-bit mono PCM audio output support for Android using the
+// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread. A
+// thread checker will RTC_DCHECK if any method is called on an invalid thread.
+// Decoded audio buffers are requested on a dedicated internal thread managed by
+// the OpenSL ES layer.
+//
+// The existing design forces the user to call InitPlayout() after Stoplayout()
+// to be able to call StartPlayout() again. This is inline with how the Java-
+// based implementation works.
+//
+// OpenSL ES is a native C API which have no Dalvik-related overhead such as
+// garbage collection pauses and it supports reduced audio output latency.
+// If the device doesn't claim this feature but supports API level 9 (Android
+// platform version 2.3) or later, then we can still use the OpenSL ES APIs but
+// the output latency may be higher.
+class OpenSLESPlayer : public AudioOutput {
+ public:
+ // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is
+ // required for lower latency. Beginning with API level 18 (Android 4.3), a
+ // buffer count of 1 is sufficient for lower latency. In addition, the buffer
+ // size and sample rate must be compatible with the device's native output
+ // configuration provided via the audio manager at construction.
+ // TODO(henrika): perhaps set this value dynamically based on OS version.
+ static const int kNumOfOpenSLESBuffers = 2;
+
+ OpenSLESPlayer(const AudioParameters& audio_parameters,
+ rtc::scoped_refptr<OpenSLEngineManager> engine_manager);
+ ~OpenSLESPlayer() override;
+
+ int Init() override;
+ int Terminate() override;
+
+ int InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+
+ int StartPlayout() override;
+ int StopPlayout() override;
+ bool Playing() const override;
+
+ bool SpeakerVolumeIsAvailable() override;
+ int SetSpeakerVolume(uint32_t volume) override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ int GetPlayoutUnderrunCount() override { return -1; }
+
+ private:
+ // These callback methods are called when data is required for playout.
+ // They are both called from an internal "OpenSL ES thread" which is not
+ // attached to the Dalvik VM.
+ static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller,
+ void* context);
+ void FillBufferQueue();
+ // Reads audio data in PCM format using the AudioDeviceBuffer.
+ // Can be called both on the main thread (during Start()) and from the
+ // internal audio thread while output streaming is active.
+ // If the `silence` flag is set, the audio is filled with zeros instead of
+ // asking the WebRTC layer for real audio data. This procedure is also known
+ // as audio priming.
+ void EnqueuePlayoutData(bool silence);
+
+ // Allocate memory for audio buffers which will be used to render audio
+ // via the SLAndroidSimpleBufferQueueItf interface.
+ void AllocateDataBuffers();
+
+ // Obtaines the SL Engine Interface from the existing global Engine object.
+ // The interface exposes creation methods of all the OpenSL ES object types.
+ // This method defines the `engine_` member variable.
+ bool ObtainEngineInterface();
+
+ // Creates/destroys the output mix object.
+ bool CreateMix();
+ void DestroyMix();
+
+ // Creates/destroys the audio player and the simple-buffer object.
+ // Also creates the volume object.
+ bool CreateAudioPlayer();
+ void DestroyAudioPlayer();
+
+ SLuint32 GetPlayState() const;
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to SimpleBufferQueueCallback() from internal
+ // non-application thread which is not attached to the Dalvik JVM.
+ // Detached during construction of this object.
+ SequenceChecker thread_checker_opensles_;
+
+ const AudioParameters audio_parameters_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_;
+
+ bool initialized_;
+ bool playing_;
+
+ // PCM-type format definition.
+ // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if
+ // 32-bit float representation is needed.
+ SLDataFormat_PCM pcm_format_;
+
+ // Queue of audio buffers to be used by the player object for rendering
+ // audio.
+ std::unique_ptr<SLint16[]> audio_buffers_[kNumOfOpenSLESBuffers];
+
+ // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data
+ // in chunks of 10ms. It then allows for this data to be pulled in
+ // a finer or coarser granularity. I.e. interacting with this class instead
+ // of directly with the AudioDeviceBuffer one can ask for any number of
+ // audio data samples.
+ // Example: native buffer size can be 192 audio frames at 48kHz sample rate.
+ // WebRTC will provide 480 audio frames per 10ms but OpenSL ES asks for 192
+ // in each callback (one every 4th ms). This class can then ask for 192 and
+ // the FineAudioBuffer will ask WebRTC for new data approximately only every
+ // second callback and also cache non-utilized audio.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue.
+ // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ...
+ int buffer_index_;
+
+ const rtc::scoped_refptr<OpenSLEngineManager> engine_manager_;
+ // This interface exposes creation methods for all the OpenSL ES object types.
+ // It is the OpenSL ES API entry point.
+ SLEngineItf engine_;
+
+ // Output mix object to be used by the player object.
+ ScopedSLObjectItf output_mix_;
+
+ // The audio player media object plays out audio to the speakers. It also
+ // supports volume control.
+ ScopedSLObjectItf player_object_;
+
+ // This interface is supported on the audio player and it controls the state
+ // of the audio player.
+ SLPlayItf player_;
+
+ // The Android Simple Buffer Queue interface is supported on the audio player
+ // and it provides methods to send audio data from the source to the audio
+ // player for rendering.
+ SLAndroidSimpleBufferQueueItf simple_buffer_queue_;
+
+ // This interface exposes controls for manipulating the object’s audio volume
+ // properties. This interface is supported on the Audio Player object.
+ SLVolumeItf volume_;
+
+ // Last time the OpenSL ES layer asked for audio data to play out.
+ uint32_t last_play_time_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc
new file mode 100644
index 0000000000..c426a8d92b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc
@@ -0,0 +1,445 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/opensles_recorder.h"
+
+#include <android/log.h>
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+
+#define TAG "OpenSLESRecorder"
+#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
+#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
+#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
+#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
+
+#define LOG_ON_ERROR(op) \
+ [](SLresult err) { \
+ if (err != SL_RESULT_SUCCESS) { \
+ ALOGE("%s:%d %s failed: %s", __FILE__, __LINE__, #op, \
+ GetSLErrorString(err)); \
+ return true; \
+ } \
+ return false; \
+ }(op)
+
+namespace webrtc {
+
+namespace jni {
+
+OpenSLESRecorder::OpenSLESRecorder(
+ const AudioParameters& audio_parameters,
+ rtc::scoped_refptr<OpenSLEngineManager> engine_manager)
+ : audio_parameters_(audio_parameters),
+ audio_device_buffer_(nullptr),
+ initialized_(false),
+ recording_(false),
+ engine_manager_(std::move(engine_manager)),
+ engine_(nullptr),
+ recorder_(nullptr),
+ simple_buffer_queue_(nullptr),
+ buffer_index_(0),
+ last_rec_time_(0) {
+ ALOGD("ctor[tid=%d]", rtc::CurrentThreadId());
+ // Detach from this thread since we want to use the checker to verify calls
+ // from the internal audio thread.
+ thread_checker_opensles_.Detach();
+ // Use native audio output parameters provided by the audio manager and
+ // define the PCM format structure.
+ pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(),
+ audio_parameters_.sample_rate(),
+ audio_parameters_.bits_per_sample());
+}
+
+OpenSLESRecorder::~OpenSLESRecorder() {
+ ALOGD("dtor[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+ DestroyAudioRecorder();
+ engine_ = nullptr;
+ RTC_DCHECK(!engine_);
+ RTC_DCHECK(!recorder_);
+ RTC_DCHECK(!simple_buffer_queue_);
+}
+
+int OpenSLESRecorder::Init() {
+ ALOGD("Init[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (audio_parameters_.channels() == 2) {
+ ALOGD("Stereo mode is enabled");
+ }
+ return 0;
+}
+
+int OpenSLESRecorder::Terminate() {
+ ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopRecording();
+ return 0;
+}
+
+int OpenSLESRecorder::InitRecording() {
+ ALOGD("InitRecording[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!initialized_);
+ RTC_DCHECK(!recording_);
+ if (!ObtainEngineInterface()) {
+ ALOGE("Failed to obtain SL Engine interface");
+ return -1;
+ }
+ CreateAudioRecorder();
+ initialized_ = true;
+ buffer_index_ = 0;
+ return 0;
+}
+
+bool OpenSLESRecorder::RecordingIsInitialized() const {
+ return initialized_;
+}
+
+int OpenSLESRecorder::StartRecording() {
+ ALOGD("StartRecording[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!recording_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetRecord();
+ }
+ // Add buffers to the queue before changing state to SL_RECORDSTATE_RECORDING
+ // to ensure that recording starts as soon as the state is modified. On some
+ // devices, SLAndroidSimpleBufferQueue::Clear() used in Stop() does not flush
+ // the buffers as intended and we therefore check the number of buffers
+ // already queued first. Enqueue() can return SL_RESULT_BUFFER_INSUFFICIENT
+ // otherwise.
+ int num_buffers_in_queue = GetBufferCount();
+ for (int i = 0; i < kNumOfOpenSLESBuffers - num_buffers_in_queue; ++i) {
+ if (!EnqueueAudioBuffer()) {
+ recording_ = false;
+ return -1;
+ }
+ }
+ num_buffers_in_queue = GetBufferCount();
+ RTC_DCHECK_EQ(num_buffers_in_queue, kNumOfOpenSLESBuffers);
+ LogBufferState();
+ // Start audio recording by changing the state to SL_RECORDSTATE_RECORDING.
+ // Given that buffers are already enqueued, recording should start at once.
+ // The macro returns -1 if recording fails to start.
+ last_rec_time_ = rtc::Time();
+ if (LOG_ON_ERROR(
+ (*recorder_)->SetRecordState(recorder_, SL_RECORDSTATE_RECORDING))) {
+ return -1;
+ }
+ recording_ = (GetRecordState() == SL_RECORDSTATE_RECORDING);
+ RTC_DCHECK(recording_);
+ return 0;
+}
+
+int OpenSLESRecorder::StopRecording() {
+ ALOGD("StopRecording[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !recording_) {
+ return 0;
+ }
+ // Stop recording by setting the record state to SL_RECORDSTATE_STOPPED.
+ if (LOG_ON_ERROR(
+ (*recorder_)->SetRecordState(recorder_, SL_RECORDSTATE_STOPPED))) {
+ return -1;
+ }
+ // Clear the buffer queue to get rid of old data when resuming recording.
+ if (LOG_ON_ERROR((*simple_buffer_queue_)->Clear(simple_buffer_queue_))) {
+ return -1;
+ }
+ thread_checker_opensles_.Detach();
+ initialized_ = false;
+ recording_ = false;
+ return 0;
+}
+
+bool OpenSLESRecorder::Recording() const {
+ return recording_;
+}
+
+void OpenSLESRecorder::AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) {
+ ALOGD("AttachAudioBuffer");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_CHECK(audio_buffer);
+ audio_device_buffer_ = audio_buffer;
+ // Ensure that the audio device buffer is informed about the native sample
+ // rate used on the recording side.
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ ALOGD("SetRecordingSampleRate(%d)", sample_rate_hz);
+ audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz);
+ // Ensure that the audio device buffer is informed about the number of
+ // channels preferred by the OS on the recording side.
+ const size_t channels = audio_parameters_.channels();
+ ALOGD("SetRecordingChannels(%zu)", channels);
+ audio_device_buffer_->SetRecordingChannels(channels);
+ // Allocated memory for internal data buffers given existing audio parameters.
+ AllocateDataBuffers();
+}
+
+bool OpenSLESRecorder::IsAcousticEchoCancelerSupported() const {
+ return false;
+}
+
+bool OpenSLESRecorder::IsNoiseSuppressorSupported() const {
+ return false;
+}
+
+int OpenSLESRecorder::EnableBuiltInAEC(bool enable) {
+ ALOGD("EnableBuiltInAEC(%d)", enable);
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ ALOGE("Not implemented");
+ return 0;
+}
+
+int OpenSLESRecorder::EnableBuiltInNS(bool enable) {
+ ALOGD("EnableBuiltInNS(%d)", enable);
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ ALOGE("Not implemented");
+ return 0;
+}
+
+bool OpenSLESRecorder::ObtainEngineInterface() {
+ ALOGD("ObtainEngineInterface");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (engine_)
+ return true;
+ // Get access to (or create if not already existing) the global OpenSL Engine
+ // object.
+ SLObjectItf engine_object = engine_manager_->GetOpenSLEngine();
+ if (engine_object == nullptr) {
+ ALOGE("Failed to access the global OpenSL engine");
+ return false;
+ }
+ // Get the SL Engine Interface which is implicit.
+ if (LOG_ON_ERROR(
+ (*engine_object)
+ ->GetInterface(engine_object, SL_IID_ENGINE, &engine_))) {
+ return false;
+ }
+ return true;
+}
+
+bool OpenSLESRecorder::CreateAudioRecorder() {
+ ALOGD("CreateAudioRecorder");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (recorder_object_.Get())
+ return true;
+ RTC_DCHECK(!recorder_);
+ RTC_DCHECK(!simple_buffer_queue_);
+
+ // Audio source configuration.
+ SLDataLocator_IODevice mic_locator = {SL_DATALOCATOR_IODEVICE,
+ SL_IODEVICE_AUDIOINPUT,
+ SL_DEFAULTDEVICEID_AUDIOINPUT, NULL};
+ SLDataSource audio_source = {&mic_locator, NULL};
+
+ // Audio sink configuration.
+ SLDataLocator_AndroidSimpleBufferQueue buffer_queue = {
+ SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
+ static_cast<SLuint32>(kNumOfOpenSLESBuffers)};
+ SLDataSink audio_sink = {&buffer_queue, &pcm_format_};
+
+ // Create the audio recorder object (requires the RECORD_AUDIO permission).
+ // Do not realize the recorder yet. Set the configuration first.
+ const SLInterfaceID interface_id[] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+ SL_IID_ANDROIDCONFIGURATION};
+ const SLboolean interface_required[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
+ if (LOG_ON_ERROR((*engine_)->CreateAudioRecorder(
+ engine_, recorder_object_.Receive(), &audio_source, &audio_sink,
+ arraysize(interface_id), interface_id, interface_required))) {
+ return false;
+ }
+
+ // Configure the audio recorder (before it is realized).
+ SLAndroidConfigurationItf recorder_config;
+ if (LOG_ON_ERROR((recorder_object_->GetInterface(recorder_object_.Get(),
+ SL_IID_ANDROIDCONFIGURATION,
+ &recorder_config)))) {
+ return false;
+ }
+
+ // Uses the default microphone tuned for audio communication.
+ // Note that, SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION leads to a fast
+ // track but also excludes usage of required effects like AEC, AGC and NS.
+ // SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION
+ SLint32 stream_type = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION;
+ if (LOG_ON_ERROR(((*recorder_config)
+ ->SetConfiguration(recorder_config,
+ SL_ANDROID_KEY_RECORDING_PRESET,
+ &stream_type, sizeof(SLint32))))) {
+ return false;
+ }
+
+ // The audio recorder can now be realized (in synchronous mode).
+ if (LOG_ON_ERROR((recorder_object_->Realize(recorder_object_.Get(),
+ SL_BOOLEAN_FALSE)))) {
+ return false;
+ }
+
+ // Get the implicit recorder interface (SL_IID_RECORD).
+ if (LOG_ON_ERROR((recorder_object_->GetInterface(
+ recorder_object_.Get(), SL_IID_RECORD, &recorder_)))) {
+ return false;
+ }
+
+ // Get the simple buffer queue interface (SL_IID_ANDROIDSIMPLEBUFFERQUEUE).
+ // It was explicitly requested.
+ if (LOG_ON_ERROR((recorder_object_->GetInterface(
+ recorder_object_.Get(), SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+ &simple_buffer_queue_)))) {
+ return false;
+ }
+
+ // Register the input callback for the simple buffer queue.
+ // This callback will be called when receiving new data from the device.
+ if (LOG_ON_ERROR(((*simple_buffer_queue_)
+ ->RegisterCallback(simple_buffer_queue_,
+ SimpleBufferQueueCallback, this)))) {
+ return false;
+ }
+ return true;
+}
+
+void OpenSLESRecorder::DestroyAudioRecorder() {
+ ALOGD("DestroyAudioRecorder");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!recorder_object_.Get())
+ return;
+ (*simple_buffer_queue_)
+ ->RegisterCallback(simple_buffer_queue_, nullptr, nullptr);
+ recorder_object_.Reset();
+ recorder_ = nullptr;
+ simple_buffer_queue_ = nullptr;
+}
+
+void OpenSLESRecorder::SimpleBufferQueueCallback(
+ SLAndroidSimpleBufferQueueItf buffer_queue,
+ void* context) {
+ OpenSLESRecorder* stream = static_cast<OpenSLESRecorder*>(context);
+ stream->ReadBufferQueue();
+}
+
+void OpenSLESRecorder::AllocateDataBuffers() {
+ ALOGD("AllocateDataBuffers");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!simple_buffer_queue_);
+ RTC_CHECK(audio_device_buffer_);
+ // Create a modified audio buffer class which allows us to deliver any number
+ // of samples (and not only multiple of 10ms) to match the native audio unit
+ // buffer size.
+ ALOGD("frames per native buffer: %zu", audio_parameters_.frames_per_buffer());
+ ALOGD("frames per 10ms buffer: %zu",
+ audio_parameters_.frames_per_10ms_buffer());
+ ALOGD("bytes per native buffer: %zu", audio_parameters_.GetBytesPerBuffer());
+ ALOGD("native sample rate: %d", audio_parameters_.sample_rate());
+ RTC_DCHECK(audio_device_buffer_);
+ fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_);
+ // Allocate queue of audio buffers that stores recorded audio samples.
+ const int buffer_size_samples =
+ audio_parameters_.frames_per_buffer() * audio_parameters_.channels();
+ audio_buffers_.reset(new std::unique_ptr<SLint16[]>[kNumOfOpenSLESBuffers]);
+ for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) {
+ audio_buffers_[i].reset(new SLint16[buffer_size_samples]);
+ }
+}
+
+void OpenSLESRecorder::ReadBufferQueue() {
+ RTC_DCHECK(thread_checker_opensles_.IsCurrent());
+ SLuint32 state = GetRecordState();
+ if (state != SL_RECORDSTATE_RECORDING) {
+ ALOGW("Buffer callback in non-recording state!");
+ return;
+ }
+ // Check delta time between two successive callbacks and provide a warning
+ // if it becomes very large.
+ // TODO(henrika): using 150ms as upper limit but this value is rather random.
+ const uint32_t current_time = rtc::Time();
+ const uint32_t diff = current_time - last_rec_time_;
+ if (diff > 150) {
+ ALOGW("Bad OpenSL ES record timing, dT=%u [ms]", diff);
+ }
+ last_rec_time_ = current_time;
+ // Send recorded audio data to the WebRTC sink.
+ // TODO(henrika): fix delay estimates. It is OK to use fixed values for now
+ // since there is no support to turn off built-in EC in combination with
+ // OpenSL ES anyhow. Hence, as is, the WebRTC based AEC (which would use
+ // these estimates) will never be active.
+ fine_audio_buffer_->DeliverRecordedData(
+ rtc::ArrayView<const int16_t>(
+ audio_buffers_[buffer_index_].get(),
+ audio_parameters_.frames_per_buffer() * audio_parameters_.channels()),
+ 25);
+ // Enqueue the utilized audio buffer and use if for recording again.
+ EnqueueAudioBuffer();
+}
+
+bool OpenSLESRecorder::EnqueueAudioBuffer() {
+ SLresult err =
+ (*simple_buffer_queue_)
+ ->Enqueue(
+ simple_buffer_queue_,
+ reinterpret_cast<SLint8*>(audio_buffers_[buffer_index_].get()),
+ audio_parameters_.GetBytesPerBuffer());
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("Enqueue failed: %s", GetSLErrorString(err));
+ return false;
+ }
+ buffer_index_ = (buffer_index_ + 1) % kNumOfOpenSLESBuffers;
+ return true;
+}
+
+SLuint32 OpenSLESRecorder::GetRecordState() const {
+ RTC_DCHECK(recorder_);
+ SLuint32 state;
+ SLresult err = (*recorder_)->GetRecordState(recorder_, &state);
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("GetRecordState failed: %s", GetSLErrorString(err));
+ }
+ return state;
+}
+
+SLAndroidSimpleBufferQueueState OpenSLESRecorder::GetBufferQueueState() const {
+ RTC_DCHECK(simple_buffer_queue_);
+ // state.count: Number of buffers currently in the queue.
+ // state.index: Index of the currently filling buffer. This is a linear index
+ // that keeps a cumulative count of the number of buffers recorded.
+ SLAndroidSimpleBufferQueueState state;
+ SLresult err =
+ (*simple_buffer_queue_)->GetState(simple_buffer_queue_, &state);
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("GetState failed: %s", GetSLErrorString(err));
+ }
+ return state;
+}
+
+void OpenSLESRecorder::LogBufferState() const {
+ SLAndroidSimpleBufferQueueState state = GetBufferQueueState();
+ ALOGD("state.count:%d state.index:%d", state.count, state.index);
+}
+
+SLuint32 OpenSLESRecorder::GetBufferCount() {
+ SLAndroidSimpleBufferQueueState state = GetBufferQueueState();
+ return state.count;
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h
new file mode 100644
index 0000000000..93c4e4eec9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h
@@ -0,0 +1,193 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <SLES/OpenSLES_AndroidConfiguration.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+#include "sdk/android/src/jni/audio_device/opensles_common.h"
+
+namespace webrtc {
+
+class FineAudioBuffer;
+
+namespace jni {
+
+// Implements 16-bit mono PCM audio input support for Android using the
+// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread. A
+// thread checker will RTC_DCHECK if any method is called on an invalid thread.
+// Recorded audio buffers are provided on a dedicated internal thread managed by
+// the OpenSL ES layer.
+//
+// The existing design forces the user to call InitRecording() after
+// StopRecording() to be able to call StartRecording() again. This is inline
+// with how the Java-based implementation works.
+//
+// As of API level 21, lower latency audio input is supported on select devices.
+// To take advantage of this feature, first confirm that lower latency output is
+// available. The capability for lower latency output is a prerequisite for the
+// lower latency input feature. Then, create an AudioRecorder with the same
+// sample rate and buffer size as would be used for output. OpenSL ES interfaces
+// for input effects preclude the lower latency path.
+// See https://developer.android.com/ndk/guides/audio/opensl-prog-notes.html
+// for more details.
+class OpenSLESRecorder : public AudioInput {
+ public:
+ // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is
+ // required for lower latency. Beginning with API level 18 (Android 4.3), a
+ // buffer count of 1 is sufficient for lower latency. In addition, the buffer
+ // size and sample rate must be compatible with the device's native input
+ // configuration provided via the audio manager at construction.
+ // TODO(henrika): perhaps set this value dynamically based on OS version.
+ static const int kNumOfOpenSLESBuffers = 2;
+
+ OpenSLESRecorder(const AudioParameters& audio_parameters,
+ rtc::scoped_refptr<OpenSLEngineManager> engine_manager);
+ ~OpenSLESRecorder() override;
+
+ int Init() override;
+ int Terminate() override;
+
+ int InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int StartRecording() override;
+ int StopRecording() override;
+ bool Recording() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) override;
+
+ // TODO(henrika): add support using OpenSL ES APIs when available.
+ bool IsAcousticEchoCancelerSupported() const override;
+ bool IsNoiseSuppressorSupported() const override;
+ int EnableBuiltInAEC(bool enable) override;
+ int EnableBuiltInNS(bool enable) override;
+
+ private:
+ // Obtaines the SL Engine Interface from the existing global Engine object.
+ // The interface exposes creation methods of all the OpenSL ES object types.
+ // This method defines the `engine_` member variable.
+ bool ObtainEngineInterface();
+
+ // Creates/destroys the audio recorder and the simple-buffer queue object.
+ bool CreateAudioRecorder();
+ void DestroyAudioRecorder();
+
+ // Allocate memory for audio buffers which will be used to capture audio
+ // via the SLAndroidSimpleBufferQueueItf interface.
+ void AllocateDataBuffers();
+
+ // These callback methods are called when data has been written to the input
+ // buffer queue. They are both called from an internal "OpenSL ES thread"
+ // which is not attached to the Dalvik VM.
+ static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller,
+ void* context);
+ void ReadBufferQueue();
+
+ // Wraps calls to SLAndroidSimpleBufferQueueState::Enqueue() and it can be
+ // called both on the main thread (but before recording has started) and from
+ // the internal audio thread while input streaming is active. It uses
+ // `simple_buffer_queue_` but no lock is needed since the initial calls from
+ // the main thread and the native callback thread are mutually exclusive.
+ bool EnqueueAudioBuffer();
+
+ // Returns the current recorder state.
+ SLuint32 GetRecordState() const;
+
+ // Returns the current buffer queue state.
+ SLAndroidSimpleBufferQueueState GetBufferQueueState() const;
+
+ // Number of buffers currently in the queue.
+ SLuint32 GetBufferCount();
+
+ // Prints a log message of the current queue state. Can be used for debugging
+ // purposes.
+ void LogBufferState() const;
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to SimpleBufferQueueCallback() from internal
+ // non-application thread which is not attached to the Dalvik JVM.
+ // Detached during construction of this object.
+ SequenceChecker thread_checker_opensles_;
+
+ const AudioParameters audio_parameters_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_;
+
+ // PCM-type format definition.
+ // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if
+ // 32-bit float representation is needed.
+ SLDataFormat_PCM pcm_format_;
+
+ bool initialized_;
+ bool recording_;
+
+ const rtc::scoped_refptr<OpenSLEngineManager> engine_manager_;
+ // This interface exposes creation methods for all the OpenSL ES object types.
+ // It is the OpenSL ES API entry point.
+ SLEngineItf engine_;
+
+ // The audio recorder media object records audio to the destination specified
+ // by the data sink capturing it from the input specified by the data source.
+ ScopedSLObjectItf recorder_object_;
+
+ // This interface is supported on the audio recorder object and it controls
+ // the state of the audio recorder.
+ SLRecordItf recorder_;
+
+ // The Android Simple Buffer Queue interface is supported on the audio
+ // recorder. For recording, an app should enqueue empty buffers. When a
+ // registered callback sends notification that the system has finished writing
+ // data to the buffer, the app can read the buffer.
+ SLAndroidSimpleBufferQueueItf simple_buffer_queue_;
+
+ // Consumes audio of native buffer size and feeds the WebRTC layer with 10ms
+ // chunks of audio.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Queue of audio buffers to be used by the recorder object for capturing
+ // audio. They will be used in a Round-robin way and the size of each buffer
+ // is given by AudioParameters::frames_per_buffer(), i.e., it corresponds to
+ // the native OpenSL ES buffer size.
+ std::unique_ptr<std::unique_ptr<SLint16[]>[]> audio_buffers_;
+
+ // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue.
+ // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ...
+ int buffer_index_;
+
+ // Last time the OpenSL ES layer delivered recorded audio data.
+ uint32_t last_rec_time_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc
new file mode 100644
index 0000000000..d445cc754e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/generated_builtin_audio_codecs_jni/BuiltinAudioDecoderFactoryFactory_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong
+JNI_BuiltinAudioDecoderFactoryFactory_CreateBuiltinAudioDecoderFactory(
+ JNIEnv* env) {
+ return NativeToJavaPointer(CreateBuiltinAudioDecoderFactory().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc
new file mode 100644
index 0000000000..e5a4b10eee
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/generated_builtin_audio_codecs_jni/BuiltinAudioEncoderFactoryFactory_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong
+JNI_BuiltinAudioEncoderFactoryFactory_CreateBuiltinAudioEncoderFactory(
+ JNIEnv* env) {
+ return NativeToJavaPointer(CreateBuiltinAudioEncoderFactory().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc
new file mode 100644
index 0000000000..1246d88c0b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/av1/dav1d_decoder.h"
+#include "sdk/android/generated_dav1d_jni/Dav1dDecoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_Dav1dDecoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(webrtc::CreateDav1dDecoder().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc b/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc
new file mode 100644
index 0000000000..1bbc7031a0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <EGL/egl.h>
+
+#include "sdk/android/generated_video_egl_jni/EglBase10Impl_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_EglBase10Impl_GetCurrentNativeEGLContext(JNIEnv* jni) {
+ return reinterpret_cast<jlong>(eglGetCurrentContext());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc
new file mode 100644
index 0000000000..9bd73a4a51
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/encoded_image.h"
+
+#include "api/video/encoded_image.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_video_jni/EncodedImage_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/scoped_java_ref_counted.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+class JavaEncodedImageBuffer : public EncodedImageBufferInterface {
+ public:
+ JavaEncodedImageBuffer(JNIEnv* env,
+ const JavaRef<jobject>& j_encoded_image,
+ const uint8_t* payload,
+ size_t size)
+ : j_encoded_image_(ScopedJavaRefCounted::Retain(env, j_encoded_image)),
+ data_(const_cast<uint8_t*>(payload)),
+ size_(size) {}
+
+ const uint8_t* data() const override { return data_; }
+ uint8_t* data() override { return data_; }
+ size_t size() const override { return size_; }
+
+ private:
+ // The Java object owning the buffer.
+ const ScopedJavaRefCounted j_encoded_image_;
+
+ // TODO(bugs.webrtc.org/9378): Make const, and delete above const_cast.
+ uint8_t* const data_;
+ size_t const size_;
+};
+} // namespace
+
+ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env,
+ VideoFrameType frame_type) {
+ return Java_FrameType_fromNativeIndex(env, static_cast<int>(frame_type));
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage(
+ JNIEnv* jni,
+ const EncodedImage& image) {
+ ScopedJavaLocalRef<jobject> buffer = NewDirectByteBuffer(
+ jni, const_cast<uint8_t*>(image.data()), image.size());
+ ScopedJavaLocalRef<jobject> frame_type =
+ NativeToJavaFrameType(jni, image._frameType);
+ ScopedJavaLocalRef<jobject> qp;
+ if (image.qp_ != -1)
+ qp = NativeToJavaInteger(jni, image.qp_);
+ // TODO(bugs.webrtc.org/9378): Keep a reference to the C++ EncodedImage data,
+ // and use the releaseCallback to manage lifetime.
+ return Java_EncodedImage_Constructor(
+ jni, buffer,
+ /*releaseCallback=*/ScopedJavaGlobalRef<jobject>(nullptr),
+ static_cast<int>(image._encodedWidth),
+ static_cast<int>(image._encodedHeight),
+ image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec, frame_type,
+ static_cast<jint>(image.rotation_), qp);
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaFrameTypeArray(
+ JNIEnv* env,
+ const std::vector<VideoFrameType>& frame_types) {
+ return NativeToJavaObjectArray(
+ env, frame_types, org_webrtc_EncodedImage_00024FrameType_clazz(env),
+ &NativeToJavaFrameType);
+}
+
+EncodedImage JavaToNativeEncodedImage(JNIEnv* env,
+ const JavaRef<jobject>& j_encoded_image) {
+ const JavaRef<jobject>& j_buffer =
+ Java_EncodedImage_getBuffer(env, j_encoded_image);
+ const uint8_t* buffer =
+ static_cast<uint8_t*>(env->GetDirectBufferAddress(j_buffer.obj()));
+ const size_t buffer_size = env->GetDirectBufferCapacity(j_buffer.obj());
+
+ EncodedImage frame;
+ frame.SetEncodedData(rtc::make_ref_counted<JavaEncodedImageBuffer>(
+ env, j_encoded_image, buffer, buffer_size));
+
+ frame._encodedWidth = Java_EncodedImage_getEncodedWidth(env, j_encoded_image);
+ frame._encodedHeight =
+ Java_EncodedImage_getEncodedHeight(env, j_encoded_image);
+ frame.rotation_ =
+ (VideoRotation)Java_EncodedImage_getRotation(env, j_encoded_image);
+
+ frame.qp_ = JavaToNativeOptionalInt(
+ env, Java_EncodedImage_getQp(env, j_encoded_image))
+ .value_or(-1);
+
+ frame._frameType =
+ (VideoFrameType)Java_EncodedImage_getFrameType(env, j_encoded_image);
+ return frame;
+}
+
+int64_t GetJavaEncodedImageCaptureTimeNs(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_encoded_image) {
+ return Java_EncodedImage_getCaptureTimeNs(env, j_encoded_image);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h
new file mode 100644
index 0000000000..fc6d06243c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_
+#define SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_
+
+#include <jni.h>
+#include <vector>
+
+#include "api/video/video_frame_type.h"
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+
+class EncodedImage;
+
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env,
+ VideoFrameType frame_type);
+ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage(JNIEnv* jni,
+ const EncodedImage& image);
+ScopedJavaLocalRef<jobjectArray> NativeToJavaFrameTypeArray(
+ JNIEnv* env,
+ const std::vector<VideoFrameType>& frame_types);
+
+EncodedImage JavaToNativeEncodedImage(JNIEnv* env,
+ const JavaRef<jobject>& j_encoded_image);
+
+int64_t GetJavaEncodedImageCaptureTimeNs(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoded_image);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc b/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc
new file mode 100644
index 0000000000..882df95b82
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "sdk/android/generated_video_jni/H264Utils_jni.h"
+#include "sdk/android/src/jni/video_codec_info.h"
+
+namespace webrtc {
+namespace jni {
+
+static jboolean JNI_H264Utils_IsSameH264Profile(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& params1,
+ const JavaParamRef<jobject>& params2) {
+ return H264IsSameProfile(JavaToNativeStringMap(env, params1),
+ JavaToNativeStringMap(env, params2));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc
new file mode 100644
index 0000000000..95dcd66bb5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/generated_video_jni/JavaI420Buffer_jni.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_JavaI420Buffer_CropAndScaleI420(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src_y,
+ jint src_stride_y,
+ const JavaParamRef<jobject>& j_src_u,
+ jint src_stride_u,
+ const JavaParamRef<jobject>& j_src_v,
+ jint src_stride_v,
+ jint crop_x,
+ jint crop_y,
+ jint crop_width,
+ jint crop_height,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v,
+ jint scale_width,
+ jint scale_height) {
+ uint8_t const* src_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj()));
+ uint8_t const* src_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj()));
+ uint8_t const* src_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ // Perform cropping using pointer arithmetic.
+ src_y += crop_x + crop_y * src_stride_y;
+ src_u += crop_x / 2 + crop_y / 2 * src_stride_u;
+ src_v += crop_x / 2 + crop_y / 2 * src_stride_v;
+
+ bool ret = libyuv::I420Scale(
+ src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width,
+ crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
+ dst_stride_v, scale_width, scale_height, libyuv::kFilterBox);
+ RTC_DCHECK_EQ(ret, 0) << "I420Scale failed";
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc
new file mode 100644
index 0000000000..3764f8deeb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/ref_count.h"
+#include "sdk/android/generated_base_jni/JniCommon_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_JniCommon_AddRef(JNIEnv* jni,
+ jlong j_native_ref_counted_pointer) {
+ reinterpret_cast<rtc::RefCountInterface*>(j_native_ref_counted_pointer)
+ ->AddRef();
+}
+
+static void JNI_JniCommon_ReleaseRef(JNIEnv* jni,
+ jlong j_native_ref_counted_pointer) {
+ reinterpret_cast<rtc::RefCountInterface*>(j_native_ref_counted_pointer)
+ ->Release();
+}
+
+static ScopedJavaLocalRef<jobject> JNI_JniCommon_AllocateByteBuffer(
+ JNIEnv* jni,
+ jint size) {
+ void* new_data = ::operator new(size);
+ return NewDirectByteBuffer(jni, new_data, size);
+}
+
+static void JNI_JniCommon_FreeByteBuffer(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& byte_buffer) {
+ void* data = jni->GetDirectBufferAddress(byte_buffer.obj());
+ ::operator delete(data);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc
new file mode 100644
index 0000000000..dc34849d1b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/jni_generator_helper.h"
+
+#include "sdk/android/native_api/jni/class_loader.h"
+
+namespace webrtc {
+
+// If `atomic_class_id` set, it'll return immediately. Otherwise, it will look
+// up the class and store it. If there's a race, we take care to only store one
+// global reference (and the duplicated effort will happen only once).
+jclass LazyGetClass(JNIEnv* env,
+ const char* class_name,
+ std::atomic<jclass>* atomic_class_id) {
+ const jclass value = std::atomic_load(atomic_class_id);
+ if (value)
+ return value;
+ webrtc::ScopedJavaGlobalRef<jclass> clazz(webrtc::GetClass(env, class_name));
+ RTC_CHECK(!clazz.is_null()) << class_name;
+ jclass cas_result = nullptr;
+ if (std::atomic_compare_exchange_strong(atomic_class_id, &cas_result,
+ clazz.obj())) {
+ // We sucessfully stored `clazz` in `atomic_class_id`, so we are
+ // intentionally leaking the global ref since it's now stored there.
+ return clazz.Release();
+ } else {
+ // Some other thread came before us and stored a global pointer in
+ // `atomic_class_id`. Relase our global ref and return the ref from the
+ // other thread.
+ return cas_result;
+ }
+}
+
+// If `atomic_method_id` set, it'll return immediately. Otherwise, it will look
+// up the method id and store it. If there's a race, it's ok since the values
+// are the same (and the duplicated effort will happen only once).
+template <MethodID::Type type>
+jmethodID MethodID::LazyGet(JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id) {
+ const jmethodID value = std::atomic_load(atomic_method_id);
+ if (value)
+ return value;
+ auto get_method_ptr = type == MethodID::TYPE_STATIC
+ ? &JNIEnv::GetStaticMethodID
+ : &JNIEnv::GetMethodID;
+ jmethodID id = (env->*get_method_ptr)(clazz, method_name, jni_signature);
+ CHECK_EXCEPTION(env) << "error during GetMethodID: " << method_name << ", "
+ << jni_signature;
+ RTC_CHECK(id) << method_name << ", " << jni_signature;
+ std::atomic_store(atomic_method_id, id);
+ return id;
+}
+
+// Various template instantiations.
+template jmethodID MethodID::LazyGet<MethodID::TYPE_STATIC>(
+ JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id);
+
+template jmethodID MethodID::LazyGet<MethodID::TYPE_INSTANCE>(
+ JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id);
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h
new file mode 100644
index 0000000000..23695ca8c7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+// Do not include this file directly. It's intended to be used only by the JNI
+// generation script. We are exporting types in strange namespaces in order to
+// be compatible with the generated code targeted for Chromium.
+
+#ifndef SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_
+#define SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_
+
+#include <jni.h>
+#include <atomic>
+
+#include "rtc_base/checks.h"
+#include "sdk/android/native_api/jni/jni_int_wrapper.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+#define CHECK_CLAZZ(env, jcaller, clazz, ...) RTC_DCHECK(clazz);
+#define CHECK_NATIVE_PTR(env, jcaller, native_ptr, method_name, ...) \
+ RTC_DCHECK(native_ptr) << method_name;
+
+#define BASE_EXPORT
+#define JNI_REGISTRATION_EXPORT __attribute__((visibility("default")))
+
+#if defined(WEBRTC_ARCH_X86)
+// Dalvik JIT generated code doesn't guarantee 16-byte stack alignment on
+// x86 - use force_align_arg_pointer to realign the stack at the JNI
+// boundary. crbug.com/655248
+#define JNI_GENERATOR_EXPORT \
+ __attribute__((force_align_arg_pointer)) extern "C" JNIEXPORT JNICALL
+#else
+#define JNI_GENERATOR_EXPORT extern "C" JNIEXPORT JNICALL
+#endif
+
+#define CHECK_EXCEPTION(jni) \
+ RTC_CHECK(!jni->ExceptionCheck()) \
+ << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+namespace webrtc {
+
+// This function will initialize `atomic_class_id` to contain a global ref to
+// the given class, and will return that ref on subsequent calls. The caller is
+// responsible to zero-initialize `atomic_class_id`. It's fine to
+// simultaneously call this on multiple threads referencing the same
+// `atomic_method_id`.
+jclass LazyGetClass(JNIEnv* env,
+ const char* class_name,
+ std::atomic<jclass>* atomic_class_id);
+
+// This class is a wrapper for JNIEnv Get(Static)MethodID.
+class MethodID {
+ public:
+ enum Type {
+ TYPE_STATIC,
+ TYPE_INSTANCE,
+ };
+
+ // This function will initialize `atomic_method_id` to contain a ref to
+ // the given method, and will return that ref on subsequent calls. The caller
+ // is responsible to zero-initialize `atomic_method_id`. It's fine to
+ // simultaneously call this on multiple threads referencing the same
+ // `atomic_method_id`.
+ template <Type type>
+ static jmethodID LazyGet(JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id);
+};
+
+} // namespace webrtc
+
+// Re-export relevant classes into the namespaces the script expects.
+namespace base {
+namespace android {
+
+using webrtc::JavaParamRef;
+using webrtc::JavaRef;
+using webrtc::ScopedJavaLocalRef;
+using webrtc::LazyGetClass;
+using webrtc::MethodID;
+
+} // namespace android
+} // namespace base
+
+namespace jni_generator {
+inline void CheckException(JNIEnv* env) {
+ CHECK_EXCEPTION(env);
+}
+
+// A 32 bit number could be an address on stack. Random 64 bit marker on the
+// stack is much less likely to be present on stack.
+constexpr uint64_t kJniStackMarkerValue = 0xbdbdef1bebcade1b;
+
+// Context about the JNI call with exception checked to be stored in stack.
+struct BASE_EXPORT JniJavaCallContextUnchecked {
+ inline JniJavaCallContextUnchecked() {
+// TODO(ssid): Implement for other architectures.
+#if defined(__arm__) || defined(__aarch64__)
+ // This assumes that this method does not increment the stack pointer.
+ asm volatile("mov %0, sp" : "=r"(sp));
+#else
+ sp = 0;
+#endif
+ }
+
+ // Force no inline to reduce code size.
+ template <base::android::MethodID::Type type>
+ void Init(JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id) {
+ env1 = env;
+
+ // Make sure compiler doesn't optimize out the assignment.
+ memcpy(&marker, &kJniStackMarkerValue, sizeof(kJniStackMarkerValue));
+ // Gets PC of the calling function.
+ pc = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
+
+ method_id = base::android::MethodID::LazyGet<type>(
+ env, clazz, method_name, jni_signature, atomic_method_id);
+ }
+
+ ~JniJavaCallContextUnchecked() {
+ // Reset so that spurious marker finds are avoided.
+ memset(&marker, 0, sizeof(marker));
+ }
+
+ uint64_t marker;
+ uintptr_t sp;
+ uintptr_t pc;
+
+ JNIEnv* env1;
+ jmethodID method_id;
+};
+
+// Context about the JNI call with exception unchecked to be stored in stack.
+struct BASE_EXPORT JniJavaCallContextChecked {
+ // Force no inline to reduce code size.
+ template <base::android::MethodID::Type type>
+ void Init(JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id) {
+ base.Init<type>(env, clazz, method_name, jni_signature, atomic_method_id);
+ // Reset `pc` to correct caller.
+ base.pc = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
+ }
+
+ ~JniJavaCallContextChecked() { jni_generator::CheckException(base.env1); }
+
+ JniJavaCallContextUnchecked base;
+};
+
+static_assert(sizeof(JniJavaCallContextChecked) ==
+ sizeof(JniJavaCallContextUnchecked),
+ "Stack unwinder cannot work with structs of different sizes.");
+} // namespace jni_generator
+
+#endif // SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc
new file mode 100644
index 0000000000..53399abab1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/src/jni/jni_helpers.h"
+
+#include <vector>
+
+#include "sdk/android/native_api/jni/java_types.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NewDirectByteBuffer(JNIEnv* env,
+ void* address,
+ jlong capacity) {
+ ScopedJavaLocalRef<jobject> buffer(
+ env, env->NewDirectByteBuffer(address, capacity));
+ CHECK_EXCEPTION(env) << "error NewDirectByteBuffer";
+ return buffer;
+}
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o) {
+ jobject ret = jni->NewGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
+ RTC_CHECK(ret);
+ return ret;
+}
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o) {
+ jni->DeleteGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
+}
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
+ RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
+}
+ScopedLocalRefFrame::~ScopedLocalRefFrame() {
+ jni_->PopLocalFrame(nullptr);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h
new file mode 100644
index 0000000000..7a2f27b99d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contain convenience functions and classes for JNI.
+// Before using any of the methods, InitGlobalJniVariables must be called.
+
+#ifndef SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
+#define SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
+
+#include <jni.h>
+#include <string>
+
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/android/src/jni/jvm.h"
+
+// Convenience macro defining JNI-accessible methods in the org.webrtc package.
+// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
+#if defined(WEBRTC_ARCH_X86)
+// Dalvik JIT generated code doesn't guarantee 16-byte stack alignment on
+// x86 - use force_align_arg_pointer to realign the stack at the JNI
+// boundary. crbug.com/655248
+#define JNI_FUNCTION_DECLARATION(rettype, name, ...) \
+ __attribute__((force_align_arg_pointer)) extern "C" JNIEXPORT rettype \
+ JNICALL Java_org_webrtc_##name(__VA_ARGS__)
+#else
+#define JNI_FUNCTION_DECLARATION(rettype, name, ...) \
+ extern "C" JNIEXPORT rettype JNICALL Java_org_webrtc_##name(__VA_ARGS__)
+#endif
+
+namespace webrtc {
+namespace jni {
+
+// TODO(sakal): Remove once clients have migrated.
+using ::webrtc::JavaToStdMapStrings;
+
+// Deprecated, use NativeToJavaPointer.
+inline long jlongFromPointer(void* ptr) {
+ return NativeToJavaPointer(ptr);
+}
+
+ScopedJavaLocalRef<jobject> NewDirectByteBuffer(JNIEnv* env,
+ void* address,
+ jlong capacity);
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o);
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o);
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+class ScopedLocalRefFrame {
+ public:
+ explicit ScopedLocalRefFrame(JNIEnv* jni);
+ ~ScopedLocalRefFrame();
+
+ private:
+ JNIEnv* jni_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+// TODO(magjed): Remove once external clients are updated.
+namespace webrtc_jni {
+
+using webrtc::AttachCurrentThreadIfNeeded;
+using webrtc::jni::InitGlobalJniVariables;
+
+} // namespace webrtc_jni
+
+#endif // SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc
new file mode 100644
index 0000000000..a1829ad0b1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "rtc_base/ssl_adapter.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ RTC_DCHECK_GE(ret, 0);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ webrtc::InitClassLoader(GetEnv());
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) {
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jvm.cc b/third_party/libwebrtc/sdk/android/src/jni/jvm.cc
new file mode 100644
index 0000000000..4cf1aa5e8e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jvm.cc
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/jvm.h"
+
+#include <asm/unistd.h>
+#include <pthread.h>
+#include <sys/prctl.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+
+#include <string>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace jni {
+
+static JavaVM* g_jvm = nullptr;
+
+static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
+
+// Key for per-thread JNIEnv* data. Non-NULL in threads attached to `g_jvm` by
+// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
+// were attached by the JVM because of a Java->native call.
+static pthread_key_t g_jni_ptr;
+
+JavaVM* GetJVM() {
+ RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
+ return g_jvm;
+}
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv() {
+ void* env = nullptr;
+ jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
+ RTC_CHECK(((env != nullptr) && (status == JNI_OK)) ||
+ ((env == nullptr) && (status == JNI_EDETACHED)))
+ << "Unexpected GetEnv return: " << status << ":" << env;
+ return reinterpret_cast<JNIEnv*>(env);
+}
+
+static void ThreadDestructor(void* prev_jni_ptr) {
+ // This function only runs on threads where `g_jni_ptr` is non-NULL, meaning
+ // we were responsible for originally attaching the thread, so are responsible
+ // for detaching it now. However, because some JVM implementations (notably
+ // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
+ // the JVMs accounting info for this thread may already be wiped out by the
+ // time this is called. Thus it may appear we are already detached even though
+ // it was our responsibility to detach! Oh well.
+ if (!GetEnv())
+ return;
+
+ RTC_CHECK(GetEnv() == prev_jni_ptr)
+ << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
+ jint status = g_jvm->DetachCurrentThread();
+ RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
+ RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???";
+}
+
+static void CreateJNIPtrKey() {
+ RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
+ << "pthread_key_create";
+}
+
+jint InitGlobalJniVariables(JavaVM* jvm) {
+ RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!";
+ g_jvm = jvm;
+ RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?";
+
+ RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
+
+ JNIEnv* jni = nullptr;
+ if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
+ return -1;
+
+ return JNI_VERSION_1_6;
+}
+
+// Return thread ID as a string.
+static std::string GetThreadId() {
+ char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
+ RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld",
+ static_cast<long>(syscall(__NR_gettid))),
+ sizeof(buf))
+ << "Thread id is bigger than uint64??";
+ return std::string(buf);
+}
+
+// Return the current thread's name.
+static std::string GetThreadName() {
+ char name[17] = {0};
+ if (prctl(PR_GET_NAME, name) != 0)
+ return std::string("<noname>");
+ return std::string(name);
+}
+
+// Return a |JNIEnv*| usable on this thread. Attaches to `g_jvm` if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded() {
+ JNIEnv* jni = GetEnv();
+ if (jni)
+ return jni;
+ RTC_CHECK(!pthread_getspecific(g_jni_ptr))
+ << "TLS has a JNIEnv* but not attached?";
+
+ std::string name(GetThreadName() + " - " + GetThreadId());
+ JavaVMAttachArgs args;
+ args.version = JNI_VERSION_1_6;
+ args.name = &name[0];
+ args.group = nullptr;
+// Deal with difference in signatures between Oracle's jni.h and Android's.
+#ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec!
+ void* env = nullptr;
+#else
+ JNIEnv* env = nullptr;
+#endif
+ RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args))
+ << "Failed to attach thread";
+ RTC_CHECK(env) << "AttachCurrentThread handed back NULL!";
+ jni = reinterpret_cast<JNIEnv*>(env);
+ RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
+ return jni;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jvm.h b/third_party/libwebrtc/sdk/android/src/jni/jvm.h
new file mode 100644
index 0000000000..296a7fee1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jvm.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_JVM_H_
+#define SDK_ANDROID_SRC_JNI_JVM_H_
+
+#include <jni.h>
+
+namespace webrtc {
+namespace jni {
+
+jint InitGlobalJniVariables(JavaVM* jvm);
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv();
+
+JavaVM* GetJVM();
+
+// Return a |JNIEnv*| usable on this thread. Attaches to `g_jvm` if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded();
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_JVM_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc
new file mode 100644
index 0000000000..143055f79b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h"
+#include "sdk/android/generated_libaom_av1_decoder_if_supported_jni/LibaomAv1Decoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibaomAv1Decoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(webrtc::CreateLibaomAv1Decoder().release());
+}
+
+static jboolean JNI_LibaomAv1Decoder_IsSupported(JNIEnv* jni) {
+ return webrtc::kIsLibaomAv1DecoderSupported;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc
new file mode 100644
index 0000000000..400c3124fe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
+#include "sdk/android/generated_libaom_av1_encoder_jni/LibaomAv1Encoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) {
+ return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc
new file mode 100644
index 0000000000..84394d8ee5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/src/jni/logging/log_sink.h"
+
+#include "absl/strings/string_view.h"
+#include "sdk/android/generated_logging_jni/JNILogging_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+JNILogSink::JNILogSink(JNIEnv* env, const JavaRef<jobject>& j_logging)
+ : j_logging_(env, j_logging) {}
+JNILogSink::~JNILogSink() = default;
+
+void JNILogSink::OnLogMessage(const std::string& msg) {
+ RTC_DCHECK_NOTREACHED();
+}
+
+void JNILogSink::OnLogMessage(const std::string& msg,
+ rtc::LoggingSeverity severity,
+ const char* tag) {
+ OnLogMessage(absl::string_view{msg}, severity, tag);
+}
+
+void JNILogSink::OnLogMessage(absl::string_view msg,
+ rtc::LoggingSeverity severity,
+ const char* tag) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_JNILogging_logToInjectable(
+ env, j_logging_, NativeToJavaString(env, std::string(msg)),
+ NativeToJavaInteger(env, severity), NativeToJavaString(env, tag));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h
new file mode 100644
index 0000000000..8e681ac3ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_
+#define SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+class JNILogSink : public rtc::LogSink {
+ public:
+ JNILogSink(JNIEnv* env, const JavaRef<jobject>& j_logging);
+ ~JNILogSink() override;
+
+ void OnLogMessage(const std::string& msg) override;
+ void OnLogMessage(const std::string& msg,
+ rtc::LoggingSeverity severity,
+ const char* tag) override;
+ void OnLogMessage(absl::string_view msg,
+ rtc::LoggingSeverity severity,
+ const char* tag) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_logging_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc
new file mode 100644
index 0000000000..f8eb48422b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/native_capturer_observer.h"
+
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_video_jni/NativeCapturerObserver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/android_video_track_source.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
+ JNIEnv* env,
+ rtc::scoped_refptr<AndroidVideoTrackSource> native_source) {
+ return Java_NativeCapturerObserver_Constructor(
+ env, NativeToJavaPointer(native_source.release()));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h
new file mode 100644
index 0000000000..51acf41f03
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_
+#define SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_
+
+#include <jni.h>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/android/src/jni/android_video_track_source.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
+ JNIEnv* env,
+ rtc::scoped_refptr<AndroidVideoTrackSource> native_source);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc
new file mode 100644
index 0000000000..d0e7972446
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#include <vector>
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+#include "rtc_base/checks.h"
+#include "sdk/android/generated_video_jni/NV12Buffer_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_NV12Buffer_CropAndScale(JNIEnv* jni,
+ jint crop_x,
+ jint crop_y,
+ jint crop_width,
+ jint crop_height,
+ jint scale_width,
+ jint scale_height,
+ const JavaParamRef<jobject>& j_src,
+ jint src_width,
+ jint src_height,
+ jint src_stride,
+ jint src_slice_height,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v) {
+ const int src_stride_y = src_stride;
+ const int src_stride_uv = src_stride;
+ const int crop_chroma_x = crop_x / 2;
+ const int crop_chroma_y = crop_y / 2;
+ const int crop_chroma_width = (crop_width + 1) / 2;
+ const int crop_chroma_height = (crop_height + 1) / 2;
+ const int tmp_stride_u = crop_chroma_width;
+ const int tmp_stride_v = crop_chroma_width;
+ const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v);
+
+ uint8_t const* src_y =
+ static_cast<uint8_t const*>(jni->GetDirectBufferAddress(j_src.obj()));
+ uint8_t const* src_uv = src_y + src_slice_height * src_stride_y;
+
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ // Crop using pointer arithmetic.
+ src_y += crop_x + crop_y * src_stride_y;
+ src_uv += 2 * crop_chroma_x + crop_chroma_y * src_stride_uv;
+
+ std::vector<uint8_t> tmp_buffer(tmp_size);
+ uint8_t* tmp_u = tmp_buffer.data();
+ uint8_t* tmp_v = tmp_u + crop_chroma_height * tmp_stride_u;
+
+ libyuv::SplitUVPlane(src_uv, src_stride_uv, tmp_u, tmp_stride_u, tmp_v,
+ tmp_stride_v, crop_chroma_width, crop_chroma_height);
+
+ libyuv::I420Scale(src_y, src_stride_y, tmp_u, tmp_stride_u, tmp_v,
+ tmp_stride_v, crop_width, crop_height, dst_y, dst_stride_y,
+ dst_u, dst_stride_u, dst_v, dst_stride_v, scale_width,
+ scale_height, libyuv::kFilterBox);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc
new file mode 100644
index 0000000000..10e3316f33
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#include <vector>
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "sdk/android/generated_video_jni/NV21Buffer_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_NV21Buffer_CropAndScale(JNIEnv* jni,
+ jint crop_x,
+ jint crop_y,
+ jint crop_width,
+ jint crop_height,
+ jint scale_width,
+ jint scale_height,
+ const JavaParamRef<jbyteArray>& j_src,
+ jint src_width,
+ jint src_height,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v) {
+ const int src_stride_y = src_width;
+ const int src_stride_uv = src_width;
+ const int crop_chroma_x = crop_x / 2;
+ const int crop_chroma_y = crop_y / 2;
+
+ jboolean was_copy;
+ jbyte* src_bytes = jni->GetByteArrayElements(j_src.obj(), &was_copy);
+ RTC_DCHECK(!was_copy);
+ uint8_t const* src_y = reinterpret_cast<uint8_t const*>(src_bytes);
+ uint8_t const* src_uv = src_y + src_height * src_stride_y;
+
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ // Crop using pointer arithmetic.
+ src_y += crop_x + crop_y * src_stride_y;
+ src_uv += 2 * crop_chroma_x + crop_chroma_y * src_stride_uv;
+
+ NV12ToI420Scaler scaler;
+ // U- and V-planes are swapped because this is NV21 not NV12.
+ scaler.NV12ToI420Scale(src_y, src_stride_y, src_uv, src_stride_uv, crop_width,
+ crop_height, dst_y, dst_stride_y, dst_v, dst_stride_v,
+ dst_u, dst_stride_u, scale_width, scale_height);
+
+ jni->ReleaseByteArrayElements(j_src.obj(), src_bytes, JNI_ABORT);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc
new file mode 100644
index 0000000000..7f3dddbb28
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/add_ice_candidate_observer.h"
+
+#include <utility>
+
+#include "sdk/android/generated_peerconnection_jni/AddIceObserver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/media_constraints.h"
+
+namespace webrtc {
+namespace jni {
+
+AddIceCandidateObserverJni::AddIceCandidateObserverJni(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(env, j_observer) {}
+
+void AddIceCandidateObserverJni::OnComplete(webrtc::RTCError error) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ if (error.ok()) {
+ Java_AddIceObserver_onAddSuccess(env, j_observer_global_);
+ } else {
+ Java_AddIceObserver_onAddFailure(env, j_observer_global_,
+ NativeToJavaString(env, error.message()));
+ }
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h
new file mode 100644
index 0000000000..1128385389
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_
+
+#include <memory>
+#include <string>
+
+#include "api/peer_connection_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+class AddIceCandidateObserverJni final
+ : public rtc::RefCountedNonVirtual<AddIceCandidateObserverJni> {
+ public:
+ AddIceCandidateObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer);
+ ~AddIceCandidateObserverJni() = default;
+
+ void OnComplete(RTCError error);
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h b/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h
new file mode 100644
index 0000000000..609c1b056e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(sakal): Remove this file once clients have update to the native API.
+#include "sdk/android/src/jni/android_network_monitor.h"
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc
new file mode 100644
index 0000000000..74c8b5547a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/audio.h"
+
+#include "modules/audio_processing/include/audio_processing.h"
+
+namespace webrtc {
+namespace jni {
+
+rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing() {
+ return AudioProcessingBuilder().Create();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h
new file mode 100644
index 0000000000..7a79bed986
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_AUDIO_H_
+#define SDK_ANDROID_SRC_JNI_PC_AUDIO_H_
+
+#include "api/scoped_refptr.h"
+// Adding 'nogncheck' to disable the gn include headers check.
+// We don't want this target depend on audio related targets
+#include "modules/audio_processing/include/audio_processing.h" // nogncheck
+
+namespace webrtc {
+namespace jni {
+
+rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing();
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_AUDIO_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc
new file mode 100644
index 0000000000..b00287eaae
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/generated_peerconnection_jni/AudioTrack_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_AudioTrack_SetVolume(JNIEnv*,
+ jlong j_p,
+ jdouble volume) {
+ rtc::scoped_refptr<AudioSourceInterface> source(
+ reinterpret_cast<AudioTrackInterface*>(j_p)->GetSource());
+ source->SetVolume(volume);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc
new file mode 100644
index 0000000000..b937a0d03a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/log_sinks.h"
+#include "sdk/android/generated_peerconnection_jni/CallSessionFileRotatingLogSink_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_CallSessionFileRotatingLogSink_AddSink(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_dirPath,
+ jint j_maxFileSize,
+ jint j_severity) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::CallSessionFileRotatingLogSink* sink =
+ new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
+ if (!sink->Init()) {
+ RTC_LOG_V(rtc::LoggingSeverity::LS_WARNING)
+ << "Failed to init CallSessionFileRotatingLogSink for path "
+ << dir_path;
+ delete sink;
+ return 0;
+ }
+ rtc::LogMessage::AddLogToStream(
+ sink, static_cast<rtc::LoggingSeverity>(j_severity));
+ return jlongFromPointer(sink);
+}
+
+static void JNI_CallSessionFileRotatingLogSink_DeleteSink(
+ JNIEnv* jni,
+ jlong j_sink) {
+ rtc::CallSessionFileRotatingLogSink* sink =
+ reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink);
+ rtc::LogMessage::RemoveLogToStream(sink);
+ delete sink;
+}
+
+static ScopedJavaLocalRef<jbyteArray>
+JNI_CallSessionFileRotatingLogSink_GetLogData(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_dirPath) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::CallSessionFileRotatingStreamReader file_reader(dir_path);
+ size_t log_size = file_reader.GetSize();
+ if (log_size == 0) {
+ RTC_LOG_V(rtc::LoggingSeverity::LS_WARNING)
+ << "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
+ return ScopedJavaLocalRef<jbyteArray>(jni, jni->NewByteArray(0));
+ }
+
+ // TODO(nisse, sakal): To avoid copying, change api to use ByteBuffer.
+ std::unique_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size)));
+ size_t read = file_reader.ReadAll(buffer.get(), log_size);
+
+ ScopedJavaLocalRef<jbyteArray> result =
+ ScopedJavaLocalRef<jbyteArray>(jni, jni->NewByteArray(read));
+ jni->SetByteArrayRegion(result.obj(), 0, read, buffer.get());
+
+ return result;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc
new file mode 100644
index 0000000000..af5f195d98
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/crypto_options.h"
+
+#include "sdk/android/generated_peerconnection_jni/CryptoOptions_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+absl::optional<CryptoOptions> JavaToNativeOptionalCryptoOptions(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_crypto_options) {
+ if (j_crypto_options.is_null()) {
+ return absl::nullopt;
+ }
+
+ ScopedJavaLocalRef<jobject> j_srtp =
+ Java_CryptoOptions_getSrtp(jni, j_crypto_options);
+ ScopedJavaLocalRef<jobject> j_sframe =
+ Java_CryptoOptions_getSFrame(jni, j_crypto_options);
+
+ CryptoOptions native_crypto_options;
+ native_crypto_options.srtp.enable_gcm_crypto_suites =
+ Java_Srtp_getEnableGcmCryptoSuites(jni, j_srtp);
+ native_crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher =
+ Java_Srtp_getEnableAes128Sha1_32CryptoCipher(jni, j_srtp);
+ native_crypto_options.srtp.enable_encrypted_rtp_header_extensions =
+ Java_Srtp_getEnableEncryptedRtpHeaderExtensions(jni, j_srtp);
+ native_crypto_options.sframe.require_frame_encryption =
+ Java_SFrame_getRequireFrameEncryption(jni, j_sframe);
+ return absl::optional<CryptoOptions>(native_crypto_options);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h
new file mode 100644
index 0000000000..a9c8f2609a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_
+#define SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_
+
+#include <jni.h>
+
+#include "absl/types/optional.h"
+#include "api/crypto/crypto_options.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+absl::optional<CryptoOptions> JavaToNativeOptionalCryptoOptions(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_crypto_options);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc
new file mode 100644
index 0000000000..3552974443
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include <limits>
+
+#include "api/data_channel_interface.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_peerconnection_jni/DataChannel_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/data_channel.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
+// and dispatching the callback from C++ back to Java.
+class DataChannelObserverJni : public DataChannelObserver {
+ public:
+ DataChannelObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer);
+ ~DataChannelObserverJni() override {}
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override;
+ void OnStateChange() override;
+ void OnMessage(const DataBuffer& buffer) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+DataChannelObserverJni::DataChannelObserverJni(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(jni, j_observer) {}
+
+void DataChannelObserverJni::OnBufferedAmountChange(uint64_t previous_amount) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onBufferedAmountChange(env, j_observer_global_,
+ previous_amount);
+}
+
+void DataChannelObserverJni::OnStateChange() {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onStateChange(env, j_observer_global_);
+}
+
+void DataChannelObserverJni::OnMessage(const DataBuffer& buffer) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> byte_buffer = NewDirectByteBuffer(
+ env, const_cast<char*>(buffer.data.data<char>()), buffer.data.size());
+ ScopedJavaLocalRef<jobject> j_buffer =
+ Java_Buffer_Constructor(env, byte_buffer, buffer.binary);
+ Java_Observer_onMessage(env, j_observer_global_, j_buffer);
+}
+
+DataChannelInterface* ExtractNativeDC(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ return reinterpret_cast<DataChannelInterface*>(
+ Java_DataChannel_getNativeDataChannel(jni, j_dc));
+}
+
+} // namespace
+
+DataChannelInit JavaToNativeDataChannelInit(JNIEnv* env,
+ const JavaRef<jobject>& j_init) {
+ DataChannelInit init;
+ init.ordered = Java_Init_getOrdered(env, j_init);
+ init.maxRetransmitTime = Java_Init_getMaxRetransmitTimeMs(env, j_init);
+ init.maxRetransmits = Java_Init_getMaxRetransmits(env, j_init);
+ init.protocol = JavaToStdString(env, Java_Init_getProtocol(env, j_init));
+ init.negotiated = Java_Init_getNegotiated(env, j_init);
+ init.id = Java_Init_getId(env, j_init);
+ return init;
+}
+
+ScopedJavaLocalRef<jobject> WrapNativeDataChannel(
+ JNIEnv* env,
+ rtc::scoped_refptr<DataChannelInterface> channel) {
+ if (!channel)
+ return nullptr;
+ // Channel is now owned by Java object, and will be freed from there.
+ return Java_DataChannel_Constructor(env, jlongFromPointer(channel.release()));
+}
+
+static jlong JNI_DataChannel_RegisterObserver(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc,
+ const JavaParamRef<jobject>& j_observer) {
+ auto observer = std::make_unique<DataChannelObserverJni>(jni, j_observer);
+ ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
+ return jlongFromPointer(observer.release());
+}
+
+static void JNI_DataChannel_UnregisterObserver(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc,
+ jlong native_observer) {
+ ExtractNativeDC(jni, j_dc)->UnregisterObserver();
+ delete reinterpret_cast<DataChannelObserverJni*>(native_observer);
+}
+
+static ScopedJavaLocalRef<jstring> JNI_DataChannel_Label(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ return NativeToJavaString(jni, ExtractNativeDC(jni, j_dc)->label());
+}
+
+static jint JNI_DataChannel_Id(JNIEnv* jni, const JavaParamRef<jobject>& j_dc) {
+ int id = ExtractNativeDC(jni, j_dc)->id();
+ RTC_CHECK_LE(id, std::numeric_limits<int32_t>::max())
+ << "id overflowed jint!";
+ return static_cast<jint>(id);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_DataChannel_State(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ return Java_State_fromNativeIndex(jni, ExtractNativeDC(jni, j_dc)->state());
+}
+
+static jlong JNI_DataChannel_BufferedAmount(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
+ RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
+ << "buffered_amount overflowed jlong!";
+ return static_cast<jlong>(buffered_amount);
+}
+
+static void JNI_DataChannel_Close(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ ExtractNativeDC(jni, j_dc)->Close();
+}
+
+static jboolean JNI_DataChannel_Send(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc,
+ const JavaParamRef<jbyteArray>& data,
+ jboolean binary) {
+ std::vector<int8_t> buffer = JavaToNativeByteArray(jni, data);
+ bool ret = ExtractNativeDC(jni, j_dc)->Send(
+ DataBuffer(rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()), binary));
+ return ret;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h
new file mode 100644
index 0000000000..9da1b67dae
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_
+#define SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_
+
+namespace webrtc {
+namespace jni {
+
+DataChannelInit JavaToNativeDataChannelInit(JNIEnv* env,
+ const JavaRef<jobject>& j_init);
+
+ScopedJavaLocalRef<jobject> WrapNativeDataChannel(
+ JNIEnv* env,
+ rtc::scoped_refptr<DataChannelInterface> channel);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc
new file mode 100644
index 0000000000..13cb027f6d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/dtmf_sender_interface.h"
+#include "sdk/android/generated_peerconnection_jni/DtmfSender_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jboolean JNI_DtmfSender_CanInsertDtmf(JNIEnv* jni,
+ jlong j_dtmf_sender_pointer) {
+ return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
+ ->CanInsertDtmf();
+}
+
+static jboolean JNI_DtmfSender_InsertDtmf(JNIEnv* jni,
+ jlong j_dtmf_sender_pointer,
+ const JavaParamRef<jstring>& tones,
+ jint duration,
+ jint inter_tone_gap) {
+ return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
+ ->InsertDtmf(JavaToStdString(jni, tones), duration, inter_tone_gap);
+}
+
+static ScopedJavaLocalRef<jstring> JNI_DtmfSender_Tones(
+ JNIEnv* jni,
+ jlong j_dtmf_sender_pointer) {
+ return NativeToJavaString(
+ jni,
+ reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)->tones());
+}
+
+static jint JNI_DtmfSender_Duration(JNIEnv* jni,
+ jlong j_dtmf_sender_pointer) {
+ return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
+ ->duration();
+}
+
+static jint JNI_DtmfSender_InterToneGap(JNIEnv* jni,
+ jlong j_dtmf_sender_pointer) {
+ return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
+ ->inter_tone_gap();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc
new file mode 100644
index 0000000000..af92ff8e89
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/ice_candidate.h"
+
+#include <string>
+
+#include "pc/webrtc_sdp.h"
+#include "sdk/android/generated_peerconnection_jni/IceCandidate_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+#include "sdk/android/src/jni/pc/peer_connection.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+ScopedJavaLocalRef<jobject> CreateJavaIceCandidate(JNIEnv* env,
+ const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& sdp,
+ const std::string server_url,
+ int adapterType) {
+ return Java_IceCandidate_Constructor(
+ env, NativeToJavaString(env, sdp_mid), sdp_mline_index,
+ NativeToJavaString(env, sdp), NativeToJavaString(env, server_url),
+ NativeToJavaAdapterType(env, adapterType));
+}
+
+} // namespace
+
+cricket::Candidate JavaToNativeCandidate(JNIEnv* jni,
+ const JavaRef<jobject>& j_candidate) {
+ std::string sdp_mid =
+ JavaToStdString(jni, Java_IceCandidate_getSdpMid(jni, j_candidate));
+ std::string sdp =
+ JavaToStdString(jni, Java_IceCandidate_getSdp(jni, j_candidate));
+ cricket::Candidate candidate;
+ if (!SdpDeserializeCandidate(sdp_mid, sdp, &candidate, NULL)) {
+ RTC_LOG(LS_ERROR) << "SdpDescrializeCandidate failed with sdp " << sdp;
+ }
+ return candidate;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaCandidate(
+ JNIEnv* env,
+ const cricket::Candidate& candidate) {
+ std::string sdp = SdpSerializeCandidate(candidate);
+ RTC_CHECK(!sdp.empty()) << "got an empty ICE candidate";
+ // sdp_mline_index is not used, pass an invalid value -1.
+ return CreateJavaIceCandidate(env, candidate.transport_name(),
+ -1 /* sdp_mline_index */, sdp,
+ "" /* server_url */, candidate.network_type());
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaIceCandidate(
+ JNIEnv* env,
+ const IceCandidateInterface& candidate) {
+ std::string sdp;
+ RTC_CHECK(candidate.ToString(&sdp)) << "got so far: " << sdp;
+ return CreateJavaIceCandidate(env, candidate.sdp_mid(),
+ candidate.sdp_mline_index(), sdp,
+ candidate.candidate().url(), 0);
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaCandidateArray(
+ JNIEnv* jni,
+ const std::vector<cricket::Candidate>& candidates) {
+ return NativeToJavaObjectArray(jni, candidates,
+ org_webrtc_IceCandidate_clazz(jni),
+ &NativeToJavaCandidate);
+}
+
+PeerConnectionInterface::IceTransportsType JavaToNativeIceTransportsType(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_transports_type) {
+ std::string enum_name = GetJavaEnumName(jni, j_ice_transports_type);
+
+ if (enum_name == "ALL")
+ return PeerConnectionInterface::kAll;
+
+ if (enum_name == "RELAY")
+ return PeerConnectionInterface::kRelay;
+
+ if (enum_name == "NOHOST")
+ return PeerConnectionInterface::kNoHost;
+
+ if (enum_name == "NONE")
+ return PeerConnectionInterface::kNone;
+
+ RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name;
+ return PeerConnectionInterface::kAll;
+}
+
+PeerConnectionInterface::BundlePolicy JavaToNativeBundlePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_bundle_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_bundle_policy);
+
+ if (enum_name == "BALANCED")
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+
+ if (enum_name == "MAXBUNDLE")
+ return PeerConnectionInterface::kBundlePolicyMaxBundle;
+
+ if (enum_name == "MAXCOMPAT")
+ return PeerConnectionInterface::kBundlePolicyMaxCompat;
+
+ RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+}
+
+PeerConnectionInterface::RtcpMuxPolicy JavaToNativeRtcpMuxPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtcp_mux_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_rtcp_mux_policy);
+
+ if (enum_name == "NEGOTIATE")
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+
+ if (enum_name == "REQUIRE")
+ return PeerConnectionInterface::kRtcpMuxPolicyRequire;
+
+ RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+}
+
+PeerConnectionInterface::TcpCandidatePolicy JavaToNativeTcpCandidatePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_tcp_candidate_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_tcp_candidate_policy);
+
+ if (enum_name == "ENABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+
+ if (enum_name == "DISABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+
+ RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+}
+
+PeerConnectionInterface::CandidateNetworkPolicy
+JavaToNativeCandidateNetworkPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_candidate_network_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_candidate_network_policy);
+
+ if (enum_name == "ALL")
+ return PeerConnectionInterface::kCandidateNetworkPolicyAll;
+
+ if (enum_name == "LOW_COST")
+ return PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
+
+ RTC_CHECK(false) << "Unexpected CandidateNetworkPolicy enum_name "
+ << enum_name;
+ return PeerConnectionInterface::kCandidateNetworkPolicyAll;
+}
+
+rtc::KeyType JavaToNativeKeyType(JNIEnv* jni,
+ const JavaRef<jobject>& j_key_type) {
+ std::string enum_name = GetJavaEnumName(jni, j_key_type);
+
+ if (enum_name == "RSA")
+ return rtc::KT_RSA;
+ if (enum_name == "ECDSA")
+ return rtc::KT_ECDSA;
+
+ RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name;
+ return rtc::KT_ECDSA;
+}
+
+PeerConnectionInterface::ContinualGatheringPolicy
+JavaToNativeContinualGatheringPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_gathering_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_gathering_policy);
+ if (enum_name == "GATHER_ONCE")
+ return PeerConnectionInterface::GATHER_ONCE;
+
+ if (enum_name == "GATHER_CONTINUALLY")
+ return PeerConnectionInterface::GATHER_CONTINUALLY;
+
+ RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name "
+ << enum_name;
+ return PeerConnectionInterface::GATHER_ONCE;
+}
+
+webrtc::PortPrunePolicy JavaToNativePortPrunePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_port_prune_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_port_prune_policy);
+ if (enum_name == "NO_PRUNE") {
+ return webrtc::NO_PRUNE;
+ }
+ if (enum_name == "PRUNE_BASED_ON_PRIORITY") {
+ return webrtc::PRUNE_BASED_ON_PRIORITY;
+ }
+ if (enum_name == "KEEP_FIRST_READY") {
+ return webrtc::KEEP_FIRST_READY;
+ }
+
+ RTC_CHECK(false) << " Unexpected PortPrunePolicy enum name " << enum_name;
+
+ return webrtc::NO_PRUNE;
+}
+
+PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_server_tls_cert_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_ice_server_tls_cert_policy);
+
+ if (enum_name == "TLS_CERT_POLICY_SECURE")
+ return PeerConnectionInterface::kTlsCertPolicySecure;
+
+ if (enum_name == "TLS_CERT_POLICY_INSECURE_NO_CHECK")
+ return PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
+
+ RTC_CHECK(false) << "Unexpected TlsCertPolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kTlsCertPolicySecure;
+}
+
+absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_network_preference) {
+ std::string enum_name = GetJavaEnumName(jni, j_network_preference);
+
+ if (enum_name == "UNKNOWN")
+ return absl::nullopt;
+
+ if (enum_name == "ETHERNET")
+ return rtc::ADAPTER_TYPE_ETHERNET;
+
+ if (enum_name == "WIFI")
+ return rtc::ADAPTER_TYPE_WIFI;
+
+ if (enum_name == "CELLULAR")
+ return rtc::ADAPTER_TYPE_CELLULAR;
+
+ if (enum_name == "VPN")
+ return rtc::ADAPTER_TYPE_VPN;
+
+ if (enum_name == "LOOPBACK")
+ return rtc::ADAPTER_TYPE_LOOPBACK;
+
+ RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name;
+ return absl::nullopt;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h
new file mode 100644
index 0000000000..4bdeea61c6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_
+#define SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_
+
+#include <vector>
+
+#include "api/data_channel_interface.h"
+#include "api/jsep.h"
+#include "api/jsep_ice_candidate.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtp_parameters.h"
+#include "rtc_base/ssl_identity.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+cricket::Candidate JavaToNativeCandidate(JNIEnv* jni,
+ const JavaRef<jobject>& j_candidate);
+
+ScopedJavaLocalRef<jobject> NativeToJavaCandidate(
+ JNIEnv* env,
+ const cricket::Candidate& candidate);
+
+ScopedJavaLocalRef<jobject> NativeToJavaIceCandidate(
+ JNIEnv* env,
+ const IceCandidateInterface& candidate);
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaCandidateArray(
+ JNIEnv* jni,
+ const std::vector<cricket::Candidate>& candidates);
+
+/*****************************************************
+ * Below are all things that go into RTCConfiguration.
+ *****************************************************/
+PeerConnectionInterface::IceTransportsType JavaToNativeIceTransportsType(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_transports_type);
+
+PeerConnectionInterface::BundlePolicy JavaToNativeBundlePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_bundle_policy);
+
+PeerConnectionInterface::RtcpMuxPolicy JavaToNativeRtcpMuxPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtcp_mux_policy);
+
+PeerConnectionInterface::TcpCandidatePolicy JavaToNativeTcpCandidatePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_tcp_candidate_policy);
+
+PeerConnectionInterface::CandidateNetworkPolicy
+JavaToNativeCandidateNetworkPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_candidate_network_policy);
+
+rtc::KeyType JavaToNativeKeyType(JNIEnv* jni,
+ const JavaRef<jobject>& j_key_type);
+
+PeerConnectionInterface::ContinualGatheringPolicy
+JavaToNativeContinualGatheringPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_gathering_policy);
+
+webrtc::PortPrunePolicy JavaToNativePortPrunePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_port_prune_policy);
+
+PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_server_tls_cert_policy);
+
+absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_network_preference);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc
new file mode 100644
index 0000000000..7b35ca051c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "rtc_base/logging.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+JNI_FUNCTION_DECLARATION(void,
+ Logging_nativeEnableLogToDebugOutput,
+ JNIEnv* jni,
+ jclass,
+ jint nativeSeverity) {
+ if (nativeSeverity >= rtc::LS_VERBOSE && nativeSeverity <= rtc::LS_NONE) {
+ rtc::LogMessage::LogToDebug(
+ static_cast<rtc::LoggingSeverity>(nativeSeverity));
+ }
+}
+
+JNI_FUNCTION_DECLARATION(void,
+ Logging_nativeEnableLogThreads,
+ JNIEnv* jni,
+ jclass) {
+ rtc::LogMessage::LogThreads(true);
+}
+
+JNI_FUNCTION_DECLARATION(void,
+ Logging_nativeEnableLogTimeStamps,
+ JNIEnv* jni,
+ jclass) {
+ rtc::LogMessage::LogTimestamps(true);
+}
+
+JNI_FUNCTION_DECLARATION(void,
+ Logging_nativeLog,
+ JNIEnv* jni,
+ jclass,
+ jint j_severity,
+ jstring j_tag,
+ jstring j_message) {
+ std::string message = JavaToStdString(jni, JavaParamRef<jstring>(j_message));
+ std::string tag = JavaToStdString(jni, JavaParamRef<jstring>(j_tag));
+ RTC_LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag.c_str())
+ << message;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc
new file mode 100644
index 0000000000..4e1a3ba406
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/media_constraints.h"
+
+#include <memory>
+
+#include "sdk/android/generated_peerconnection_jni/MediaConstraints_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+// Helper for translating a List<Pair<String, String>> to a Constraints.
+MediaConstraints::Constraints PopulateConstraintsFromJavaPairList(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_list) {
+ MediaConstraints::Constraints constraints;
+ for (const JavaRef<jobject>& entry : Iterable(env, j_list)) {
+ constraints.emplace_back(
+ JavaToStdString(env, Java_KeyValuePair_getKey(env, entry)),
+ JavaToStdString(env, Java_KeyValuePair_getValue(env, entry)));
+ }
+ return constraints;
+}
+
+} // namespace
+
+// Copies all needed data so Java object is no longer needed at return.
+std::unique_ptr<MediaConstraints> JavaToNativeMediaConstraints(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_constraints) {
+ return std::make_unique<MediaConstraints>(
+ PopulateConstraintsFromJavaPairList(
+ env, Java_MediaConstraints_getMandatory(env, j_constraints)),
+ PopulateConstraintsFromJavaPairList(
+ env, Java_MediaConstraints_getOptional(env, j_constraints)));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h
new file mode 100644
index 0000000000..68cedc7f2d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_
+#define SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_
+
+#include <jni.h>
+#include <memory>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/media_constraints.h"
+
+namespace webrtc {
+namespace jni {
+
+std::unique_ptr<MediaConstraints> JavaToNativeMediaConstraints(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_constraints);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc
new file mode 100644
index 0000000000..e20f28f310
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/generated_peerconnection_jni/MediaSource_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static ScopedJavaLocalRef<jobject> JNI_MediaSource_GetState(JNIEnv* jni,
+ jlong j_p) {
+ return Java_State_fromNativeIndex(
+ jni, reinterpret_cast<MediaSourceInterface*>(j_p)->state());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc
new file mode 100644
index 0000000000..20d59a6f8f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/media_stream.h"
+
+#include <memory>
+
+#include "sdk/android/generated_peerconnection_jni/MediaStream_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+JavaMediaStream::JavaMediaStream(
+ JNIEnv* env,
+ rtc::scoped_refptr<MediaStreamInterface> media_stream)
+ : j_media_stream_(
+ env,
+ Java_MediaStream_Constructor(env,
+ jlongFromPointer(media_stream.get()))) {
+ // Create an observer to update the Java stream when the native stream's set
+ // of tracks changes.
+ observer_.reset(new MediaStreamObserver(
+ media_stream.get(),
+ [this](AudioTrackInterface* audio_track,
+ MediaStreamInterface* media_stream) {
+ OnAudioTrackAddedToStream(audio_track, media_stream);
+ },
+ [this](AudioTrackInterface* audio_track,
+ MediaStreamInterface* media_stream) {
+ OnAudioTrackRemovedFromStream(audio_track, media_stream);
+ },
+ [this](VideoTrackInterface* video_track,
+ MediaStreamInterface* media_stream) {
+ OnVideoTrackAddedToStream(video_track, media_stream);
+ },
+ [this](VideoTrackInterface* video_track,
+ MediaStreamInterface* media_stream) {
+ OnVideoTrackRemovedFromStream(video_track, media_stream);
+ }));
+ for (rtc::scoped_refptr<AudioTrackInterface> track :
+ media_stream->GetAudioTracks()) {
+ Java_MediaStream_addNativeAudioTrack(env, j_media_stream_,
+ jlongFromPointer(track.release()));
+ }
+ for (rtc::scoped_refptr<VideoTrackInterface> track :
+ media_stream->GetVideoTracks()) {
+ Java_MediaStream_addNativeVideoTrack(env, j_media_stream_,
+ jlongFromPointer(track.release()));
+ }
+ // `j_media_stream` holds one reference. Corresponding Release() is in
+ // MediaStream_free, triggered by MediaStream.dispose().
+ media_stream.release();
+}
+
+JavaMediaStream::~JavaMediaStream() {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Remove the observer first, so it doesn't react to events during deletion.
+ observer_ = nullptr;
+ Java_MediaStream_dispose(env, j_media_stream_);
+}
+
+void JavaMediaStream::OnAudioTrackAddedToStream(AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(env);
+ track->AddRef();
+ Java_MediaStream_addNativeAudioTrack(env, j_media_stream_,
+ jlongFromPointer(track));
+}
+
+void JavaMediaStream::OnVideoTrackAddedToStream(VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(env);
+ track->AddRef();
+ Java_MediaStream_addNativeVideoTrack(env, j_media_stream_,
+ jlongFromPointer(track));
+}
+
+void JavaMediaStream::OnAudioTrackRemovedFromStream(
+ AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(env);
+ Java_MediaStream_removeAudioTrack(env, j_media_stream_,
+ jlongFromPointer(track));
+}
+
+void JavaMediaStream::OnVideoTrackRemovedFromStream(
+ VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(env);
+ Java_MediaStream_removeVideoTrack(env, j_media_stream_,
+ jlongFromPointer(track));
+}
+
+jclass GetMediaStreamClass(JNIEnv* env) {
+ return org_webrtc_MediaStream_clazz(env);
+}
+
+static jboolean JNI_MediaStream_AddAudioTrackToNativeStream(
+ JNIEnv* jni,
+ jlong pointer,
+ jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+ rtc::scoped_refptr<AudioTrackInterface>(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer)));
+}
+
+static jboolean JNI_MediaStream_AddVideoTrackToNativeStream(
+ JNIEnv* jni,
+ jlong pointer,
+ jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+ rtc::scoped_refptr<VideoTrackInterface>(
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)));
+}
+
+static jboolean JNI_MediaStream_RemoveAudioTrack(JNIEnv* jni,
+ jlong pointer,
+ jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ rtc::scoped_refptr<AudioTrackInterface>(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer)));
+}
+
+static jboolean JNI_MediaStream_RemoveVideoTrack(JNIEnv* jni,
+ jlong pointer,
+ jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ rtc::scoped_refptr<VideoTrackInterface>(
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)));
+}
+
+static ScopedJavaLocalRef<jstring> JNI_MediaStream_GetId(JNIEnv* jni,
+ jlong j_p) {
+ return NativeToJavaString(jni,
+ reinterpret_cast<MediaStreamInterface*>(j_p)->id());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h
new file mode 100644
index 0000000000..efa177c43e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_
+#define SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_
+
+#include <jni.h>
+#include <memory>
+
+#include "api/media_stream_interface.h"
+#include "pc/media_stream_observer.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+class JavaMediaStream {
+ public:
+ explicit JavaMediaStream(
+ JNIEnv* env,
+ rtc::scoped_refptr<MediaStreamInterface> media_stream);
+ ~JavaMediaStream();
+
+ const ScopedJavaGlobalRef<jobject>& j_media_stream() {
+ return j_media_stream_;
+ }
+
+ private:
+ void OnAudioTrackAddedToStream(AudioTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnVideoTrackAddedToStream(VideoTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnAudioTrackRemovedFromStream(AudioTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnVideoTrackRemovedFromStream(VideoTrackInterface* track,
+ MediaStreamInterface* stream);
+
+ ScopedJavaGlobalRef<jobject> j_media_stream_;
+ std::unique_ptr<MediaStreamObserver> observer_;
+};
+
+jclass GetMediaStreamClass(JNIEnv* env);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc
new file mode 100644
index 0000000000..928f10c03a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/generated_peerconnection_jni/MediaStreamTrack_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaMediaType(
+ JNIEnv* jni,
+ cricket::MediaType media_type) {
+ return Java_MediaType_fromNativeIndex(jni, media_type);
+}
+
+cricket::MediaType JavaToNativeMediaType(JNIEnv* jni,
+ const JavaRef<jobject>& j_media_type) {
+ return static_cast<cricket::MediaType>(
+ Java_MediaType_getNative(jni, j_media_type));
+}
+
+static ScopedJavaLocalRef<jstring> JNI_MediaStreamTrack_GetId(
+ JNIEnv* jni,
+ jlong j_p) {
+ return NativeToJavaString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
+}
+
+static ScopedJavaLocalRef<jstring> JNI_MediaStreamTrack_GetKind(
+ JNIEnv* jni,
+ jlong j_p) {
+ return NativeToJavaString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
+}
+
+static jboolean JNI_MediaStreamTrack_GetEnabled(JNIEnv* jni,
+ jlong j_p) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
+}
+
+static ScopedJavaLocalRef<jobject> JNI_MediaStreamTrack_GetState(
+ JNIEnv* jni,
+ jlong j_p) {
+ return Java_State_fromNativeIndex(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
+}
+
+static jboolean JNI_MediaStreamTrack_SetEnabled(JNIEnv* jni,
+ jlong j_p,
+ jboolean enabled) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->set_enabled(
+ enabled);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h
new file mode 100644
index 0000000000..8bfe302db7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_
+#define SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_
+
+#include <jni.h>
+
+#include "api/media_types.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaMediaType(
+ JNIEnv* jni,
+ cricket::MediaType media_type);
+cricket::MediaType JavaToNativeMediaType(JNIEnv* jni,
+ const JavaRef<jobject>& j_media_type);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc
new file mode 100644
index 0000000000..d595c481f8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/owned_factory_and_threads.h"
+
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+OwnedFactoryAndThreads::OwnedFactoryAndThreads(
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread,
+ const rtc::scoped_refptr<PeerConnectionFactoryInterface>& factory)
+ : socket_factory_(std::move(socket_factory)),
+ network_thread_(std::move(network_thread)),
+ worker_thread_(std::move(worker_thread)),
+ signaling_thread_(std::move(signaling_thread)),
+ factory_(factory) {}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h
new file mode 100644
index 0000000000..7dc9443ea5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_
+#define SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_
+
+#include <jni.h>
+#include <memory>
+#include <utility>
+
+#include "api/peer_connection_interface.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace jni {
+
+// Helper struct for working around the fact that CreatePeerConnectionFactory()
+// comes in two flavors: either entirely automagical (constructing its own
+// threads and deleting them on teardown, but no external codec factory support)
+// or entirely manual (requires caller to delete threads after factory
+// teardown). This struct takes ownership of its ctor's arguments to present a
+// single thing for Java to hold and eventually free.
+class OwnedFactoryAndThreads {
+ public:
+ OwnedFactoryAndThreads(
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread,
+ const rtc::scoped_refptr<PeerConnectionFactoryInterface>& factory);
+
+ ~OwnedFactoryAndThreads() = default;
+
+ PeerConnectionFactoryInterface* factory() { return factory_.get(); }
+ rtc::SocketFactory* socket_factory() { return socket_factory_.get(); }
+ rtc::Thread* network_thread() { return network_thread_.get(); }
+ rtc::Thread* signaling_thread() { return signaling_thread_.get(); }
+ rtc::Thread* worker_thread() { return worker_thread_.get(); }
+
+ private:
+ // Usually implemented by the SocketServer associated with the network thread,
+ // so needs to outlive the network thread.
+ const std::unique_ptr<rtc::SocketFactory> socket_factory_;
+ const std::unique_ptr<rtc::Thread> network_thread_;
+ const std::unique_ptr<rtc::Thread> worker_thread_;
+ const std::unique_ptr<rtc::Thread> signaling_thread_;
+ const rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc
new file mode 100644
index 0000000000..502763a2d0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc
@@ -0,0 +1,917 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Lifecycle notes: objects are owned where they will be called; in other words
+// FooObservers are owned by C++-land, and user-callable objects (e.g.
+// PeerConnection and VideoTrack) are owned by Java-land.
+// When this file (or other files in this directory) allocates C++
+// RefCountInterfaces it AddRef()s an artificial ref simulating the jlong held
+// in Java-land, and then Release()s the ref in the respective free call.
+// Sometimes this AddRef is implicit in the construction of a scoped_refptr<>
+// which is then .release()d. Any persistent (non-local) references from C++ to
+// Java must be global or weak (in which case they must be checked before use)!
+//
+// Exception notes: pretty much all JNI calls can throw Java exceptions, so each
+// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
+// call. In this file this is done in CHECK_EXCEPTION, making for much easier
+// debugging in case of failure (the alternative is to wait for control to
+// return to the Java frame that called code in this file, at which point it's
+// impossible to tell which JNI call broke).
+
+#include "sdk/android/src/jni/pc/peer_connection.h"
+
+#include <limits>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/peer_connection_interface.h"
+#include "api/rtc_event_log_output_file.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h"
+#include "sdk/android/generated_peerconnection_jni/IceCandidateErrorEvent_jni.h"
+#include "sdk/android/generated_peerconnection_jni/PeerConnection_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/add_ice_candidate_observer.h"
+#include "sdk/android/src/jni/pc/crypto_options.h"
+#include "sdk/android/src/jni/pc/data_channel.h"
+#include "sdk/android/src/jni/pc/ice_candidate.h"
+#include "sdk/android/src/jni/pc/media_constraints.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+#include "sdk/android/src/jni/pc/rtc_certificate.h"
+#include "sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h"
+#include "sdk/android/src/jni/pc/rtp_sender.h"
+#include "sdk/android/src/jni/pc/sdp_observer.h"
+#include "sdk/android/src/jni/pc/session_description.h"
+#include "sdk/android/src/jni/pc/stats_observer.h"
+#include "sdk/android/src/jni/pc/turn_customizer.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+PeerConnectionInterface* ExtractNativePC(JNIEnv* jni,
+ const JavaRef<jobject>& j_pc) {
+ return reinterpret_cast<OwnedPeerConnection*>(
+ Java_PeerConnection_getNativeOwnedPeerConnection(jni, j_pc))
+ ->pc();
+}
+
+PeerConnectionInterface::IceServers JavaToNativeIceServers(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_servers) {
+ PeerConnectionInterface::IceServers ice_servers;
+ for (const JavaRef<jobject>& j_ice_server : Iterable(jni, j_ice_servers)) {
+ ScopedJavaLocalRef<jobject> j_ice_server_tls_cert_policy =
+ Java_IceServer_getTlsCertPolicy(jni, j_ice_server);
+ ScopedJavaLocalRef<jobject> urls =
+ Java_IceServer_getUrls(jni, j_ice_server);
+ ScopedJavaLocalRef<jstring> username =
+ Java_IceServer_getUsername(jni, j_ice_server);
+ ScopedJavaLocalRef<jstring> password =
+ Java_IceServer_getPassword(jni, j_ice_server);
+ PeerConnectionInterface::TlsCertPolicy tls_cert_policy =
+ JavaToNativeTlsCertPolicy(jni, j_ice_server_tls_cert_policy);
+ ScopedJavaLocalRef<jstring> hostname =
+ Java_IceServer_getHostname(jni, j_ice_server);
+ ScopedJavaLocalRef<jobject> tls_alpn_protocols =
+ Java_IceServer_getTlsAlpnProtocols(jni, j_ice_server);
+ ScopedJavaLocalRef<jobject> tls_elliptic_curves =
+ Java_IceServer_getTlsEllipticCurves(jni, j_ice_server);
+ PeerConnectionInterface::IceServer server;
+ server.urls = JavaListToNativeVector<std::string, jstring>(
+ jni, urls, &JavaToNativeString);
+ server.username = JavaToNativeString(jni, username);
+ server.password = JavaToNativeString(jni, password);
+ server.tls_cert_policy = tls_cert_policy;
+ server.hostname = JavaToNativeString(jni, hostname);
+ server.tls_alpn_protocols = JavaListToNativeVector<std::string, jstring>(
+ jni, tls_alpn_protocols, &JavaToNativeString);
+ server.tls_elliptic_curves = JavaListToNativeVector<std::string, jstring>(
+ jni, tls_elliptic_curves, &JavaToNativeString);
+ ice_servers.push_back(server);
+ }
+ return ice_servers;
+}
+
+SdpSemantics JavaToNativeSdpSemantics(JNIEnv* jni,
+ const JavaRef<jobject>& j_sdp_semantics) {
+ std::string enum_name = GetJavaEnumName(jni, j_sdp_semantics);
+
+ if (enum_name == "PLAN_B")
+ return SdpSemantics::kPlanB_DEPRECATED;
+
+ if (enum_name == "UNIFIED_PLAN")
+ return SdpSemantics::kUnifiedPlan;
+
+ RTC_DCHECK_NOTREACHED();
+ return SdpSemantics::kUnifiedPlan;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaCandidatePairChange(
+ JNIEnv* env,
+ const cricket::CandidatePairChangeEvent& event) {
+ const auto& selected_pair = event.selected_candidate_pair;
+ return Java_CandidatePairChangeEvent_Constructor(
+ env, NativeToJavaCandidate(env, selected_pair.local_candidate()),
+ NativeToJavaCandidate(env, selected_pair.remote_candidate()),
+ static_cast<int>(event.last_data_received_ms),
+ NativeToJavaString(env, event.reason),
+ static_cast<int>(event.estimated_disconnected_time_ms));
+}
+
+} // namespace
+
+ScopedJavaLocalRef<jobject> NativeToJavaAdapterType(JNIEnv* env,
+ int adapterType) {
+ return Java_AdapterType_fromNativeIndex(env, adapterType);
+}
+
+void JavaToNativeRTCConfiguration(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtc_config,
+ PeerConnectionInterface::RTCConfiguration* rtc_config) {
+ ScopedJavaLocalRef<jobject> j_ice_transports_type =
+ Java_RTCConfiguration_getIceTransportsType(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_bundle_policy =
+ Java_RTCConfiguration_getBundlePolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_rtcp_mux_policy =
+ Java_RTCConfiguration_getRtcpMuxPolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_rtc_certificate =
+ Java_RTCConfiguration_getCertificate(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_tcp_candidate_policy =
+ Java_RTCConfiguration_getTcpCandidatePolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_candidate_network_policy =
+ Java_RTCConfiguration_getCandidateNetworkPolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_ice_servers =
+ Java_RTCConfiguration_getIceServers(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_continual_gathering_policy =
+ Java_RTCConfiguration_getContinualGatheringPolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_turn_port_prune_policy =
+ Java_RTCConfiguration_getTurnPortPrunePolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_turn_customizer =
+ Java_RTCConfiguration_getTurnCustomizer(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_network_preference =
+ Java_RTCConfiguration_getNetworkPreference(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_sdp_semantics =
+ Java_RTCConfiguration_getSdpSemantics(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_crypto_options =
+ Java_RTCConfiguration_getCryptoOptions(jni, j_rtc_config);
+
+ rtc_config->type = JavaToNativeIceTransportsType(jni, j_ice_transports_type);
+ rtc_config->bundle_policy = JavaToNativeBundlePolicy(jni, j_bundle_policy);
+ rtc_config->rtcp_mux_policy =
+ JavaToNativeRtcpMuxPolicy(jni, j_rtcp_mux_policy);
+ if (!j_rtc_certificate.is_null()) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::FromPEM(
+ JavaToNativeRTCCertificatePEM(jni, j_rtc_certificate));
+ RTC_CHECK(certificate != nullptr) << "supplied certificate is malformed.";
+ rtc_config->certificates.push_back(certificate);
+ }
+ rtc_config->tcp_candidate_policy =
+ JavaToNativeTcpCandidatePolicy(jni, j_tcp_candidate_policy);
+ rtc_config->candidate_network_policy =
+ JavaToNativeCandidateNetworkPolicy(jni, j_candidate_network_policy);
+ rtc_config->servers = JavaToNativeIceServers(jni, j_ice_servers);
+ rtc_config->audio_jitter_buffer_max_packets =
+ Java_RTCConfiguration_getAudioJitterBufferMaxPackets(jni, j_rtc_config);
+ rtc_config->audio_jitter_buffer_fast_accelerate =
+ Java_RTCConfiguration_getAudioJitterBufferFastAccelerate(jni,
+ j_rtc_config);
+ rtc_config->ice_connection_receiving_timeout =
+ Java_RTCConfiguration_getIceConnectionReceivingTimeout(jni, j_rtc_config);
+ rtc_config->ice_backup_candidate_pair_ping_interval =
+ Java_RTCConfiguration_getIceBackupCandidatePairPingInterval(jni,
+ j_rtc_config);
+ rtc_config->continual_gathering_policy =
+ JavaToNativeContinualGatheringPolicy(jni, j_continual_gathering_policy);
+ rtc_config->ice_candidate_pool_size =
+ Java_RTCConfiguration_getIceCandidatePoolSize(jni, j_rtc_config);
+ rtc_config->prune_turn_ports =
+ Java_RTCConfiguration_getPruneTurnPorts(jni, j_rtc_config);
+ rtc_config->turn_port_prune_policy =
+ JavaToNativePortPrunePolicy(jni, j_turn_port_prune_policy);
+ rtc_config->presume_writable_when_fully_relayed =
+ Java_RTCConfiguration_getPresumeWritableWhenFullyRelayed(jni,
+ j_rtc_config);
+ rtc_config->surface_ice_candidates_on_ice_transport_type_changed =
+ Java_RTCConfiguration_getSurfaceIceCandidatesOnIceTransportTypeChanged(
+ jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_ice_check_interval_strong_connectivity =
+ Java_RTCConfiguration_getIceCheckIntervalStrongConnectivity(jni,
+ j_rtc_config);
+ rtc_config->ice_check_interval_strong_connectivity =
+ JavaToNativeOptionalInt(jni, j_ice_check_interval_strong_connectivity);
+ ScopedJavaLocalRef<jobject> j_ice_check_interval_weak_connectivity =
+ Java_RTCConfiguration_getIceCheckIntervalWeakConnectivity(jni,
+ j_rtc_config);
+ rtc_config->ice_check_interval_weak_connectivity =
+ JavaToNativeOptionalInt(jni, j_ice_check_interval_weak_connectivity);
+ ScopedJavaLocalRef<jobject> j_ice_check_min_interval =
+ Java_RTCConfiguration_getIceCheckMinInterval(jni, j_rtc_config);
+ rtc_config->ice_check_min_interval =
+ JavaToNativeOptionalInt(jni, j_ice_check_min_interval);
+ ScopedJavaLocalRef<jobject> j_ice_unwritable_timeout =
+ Java_RTCConfiguration_getIceUnwritableTimeout(jni, j_rtc_config);
+ rtc_config->ice_unwritable_timeout =
+ JavaToNativeOptionalInt(jni, j_ice_unwritable_timeout);
+ ScopedJavaLocalRef<jobject> j_ice_unwritable_min_checks =
+ Java_RTCConfiguration_getIceUnwritableMinChecks(jni, j_rtc_config);
+ rtc_config->ice_unwritable_min_checks =
+ JavaToNativeOptionalInt(jni, j_ice_unwritable_min_checks);
+ ScopedJavaLocalRef<jobject> j_stun_candidate_keepalive_interval =
+ Java_RTCConfiguration_getStunCandidateKeepaliveInterval(jni,
+ j_rtc_config);
+ rtc_config->stun_candidate_keepalive_interval =
+ JavaToNativeOptionalInt(jni, j_stun_candidate_keepalive_interval);
+ ScopedJavaLocalRef<jobject> j_stable_writable_connection_ping_interval_ms =
+ Java_RTCConfiguration_getStableWritableConnectionPingIntervalMs(
+ jni, j_rtc_config);
+ rtc_config->stable_writable_connection_ping_interval_ms =
+ JavaToNativeOptionalInt(jni,
+ j_stable_writable_connection_ping_interval_ms);
+ rtc_config->disable_ipv6_on_wifi =
+ Java_RTCConfiguration_getDisableIPv6OnWifi(jni, j_rtc_config);
+ rtc_config->max_ipv6_networks =
+ Java_RTCConfiguration_getMaxIPv6Networks(jni, j_rtc_config);
+
+ rtc_config->turn_customizer = GetNativeTurnCustomizer(jni, j_turn_customizer);
+
+ rtc_config->disable_ipv6 =
+ Java_RTCConfiguration_getDisableIpv6(jni, j_rtc_config);
+ rtc_config->media_config.enable_dscp =
+ Java_RTCConfiguration_getEnableDscp(jni, j_rtc_config);
+ rtc_config->media_config.video.enable_cpu_adaptation =
+ Java_RTCConfiguration_getEnableCpuOveruseDetection(jni, j_rtc_config);
+ rtc_config->media_config.video.suspend_below_min_bitrate =
+ Java_RTCConfiguration_getSuspendBelowMinBitrate(jni, j_rtc_config);
+ rtc_config->screencast_min_bitrate = JavaToNativeOptionalInt(
+ jni, Java_RTCConfiguration_getScreencastMinBitrate(jni, j_rtc_config));
+ rtc_config->combined_audio_video_bwe = JavaToNativeOptionalBool(
+ jni, Java_RTCConfiguration_getCombinedAudioVideoBwe(jni, j_rtc_config));
+ rtc_config->network_preference =
+ JavaToNativeNetworkPreference(jni, j_network_preference);
+ rtc_config->sdp_semantics = JavaToNativeSdpSemantics(jni, j_sdp_semantics);
+ rtc_config->active_reset_srtp_params =
+ Java_RTCConfiguration_getActiveResetSrtpParams(jni, j_rtc_config);
+ rtc_config->crypto_options =
+ JavaToNativeOptionalCryptoOptions(jni, j_crypto_options);
+
+ rtc_config->allow_codec_switching = JavaToNativeOptionalBool(
+ jni, Java_RTCConfiguration_getAllowCodecSwitching(jni, j_rtc_config));
+
+ rtc_config->offer_extmap_allow_mixed =
+ Java_RTCConfiguration_getOfferExtmapAllowMixed(jni, j_rtc_config);
+ rtc_config->enable_implicit_rollback =
+ Java_RTCConfiguration_getEnableImplicitRollback(jni, j_rtc_config);
+
+ ScopedJavaLocalRef<jstring> j_turn_logging_id =
+ Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config);
+ if (!IsNull(jni, j_turn_logging_id)) {
+ rtc_config->turn_logging_id = JavaToNativeString(jni, j_turn_logging_id);
+ }
+}
+
+rtc::KeyType GetRtcConfigKeyType(JNIEnv* env,
+ const JavaRef<jobject>& j_rtc_config) {
+ return JavaToNativeKeyType(
+ env, Java_RTCConfiguration_getKeyType(env, j_rtc_config));
+}
+
+PeerConnectionObserverJni::PeerConnectionObserverJni(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(jni, j_observer) {}
+
+PeerConnectionObserverJni::~PeerConnectionObserverJni() = default;
+
+void PeerConnectionObserverJni::OnIceCandidate(
+ const IceCandidateInterface* candidate) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceCandidate(env, j_observer_global_,
+ NativeToJavaIceCandidate(env, *candidate));
+}
+
+void PeerConnectionObserverJni::OnIceCandidateError(
+ const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> event = Java_IceCandidateErrorEvent_Constructor(
+ env, NativeToJavaString(env, address), port, NativeToJavaString(env, url),
+ error_code, NativeToJavaString(env, error_text));
+ Java_Observer_onIceCandidateError(env, j_observer_global_, event);
+}
+
+void PeerConnectionObserverJni::OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceCandidatesRemoved(
+ env, j_observer_global_, NativeToJavaCandidateArray(env, candidates));
+}
+
+void PeerConnectionObserverJni::OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onSignalingChange(
+ env, j_observer_global_,
+ Java_SignalingState_fromNativeIndex(env, new_state));
+}
+
+void PeerConnectionObserverJni::OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceConnectionChange(
+ env, j_observer_global_,
+ Java_IceConnectionState_fromNativeIndex(env, new_state));
+}
+
+void PeerConnectionObserverJni::OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onStandardizedIceConnectionChange(
+ env, j_observer_global_,
+ Java_IceConnectionState_fromNativeIndex(env, new_state));
+}
+
+void PeerConnectionObserverJni::OnConnectionChange(
+ PeerConnectionInterface::PeerConnectionState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onConnectionChange(env, j_observer_global_,
+ Java_PeerConnectionState_fromNativeIndex(
+ env, static_cast<int>(new_state)));
+}
+
+void PeerConnectionObserverJni::OnIceConnectionReceivingChange(bool receiving) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceConnectionReceivingChange(env, j_observer_global_,
+ receiving);
+}
+
+void PeerConnectionObserverJni::OnIceSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent& event) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onSelectedCandidatePairChanged(
+ env, j_observer_global_, NativeToJavaCandidatePairChange(env, event));
+}
+
+void PeerConnectionObserverJni::OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceGatheringChange(
+ env, j_observer_global_,
+ Java_IceGatheringState_fromNativeIndex(env, new_state));
+}
+
+void PeerConnectionObserverJni::OnAddStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onAddStream(
+ env, j_observer_global_,
+ GetOrCreateJavaStream(env, stream).j_media_stream());
+}
+
+void PeerConnectionObserverJni::OnRemoveStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get());
+ RTC_CHECK(it != remote_streams_.end())
+ << "unexpected stream: " << stream.get();
+ Java_Observer_onRemoveStream(env, j_observer_global_,
+ it->second.j_media_stream());
+ remote_streams_.erase(it);
+}
+
+void PeerConnectionObserverJni::OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> channel) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onDataChannel(env, j_observer_global_,
+ WrapNativeDataChannel(env, channel));
+}
+
+void PeerConnectionObserverJni::OnRenegotiationNeeded() {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onRenegotiationNeeded(env, j_observer_global_);
+}
+
+void PeerConnectionObserverJni::OnAddTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_rtp_receiver =
+ NativeToJavaRtpReceiver(env, receiver);
+ rtp_receivers_.emplace_back(env, j_rtp_receiver);
+
+ Java_Observer_onAddTrack(env, j_observer_global_, j_rtp_receiver,
+ NativeToJavaMediaStreamArray(env, streams));
+}
+
+void PeerConnectionObserverJni::OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_rtp_receiver =
+ NativeToJavaRtpReceiver(env, receiver);
+ rtp_receivers_.emplace_back(env, j_rtp_receiver);
+
+ Java_Observer_onRemoveTrack(env, j_observer_global_, j_rtp_receiver);
+}
+
+void PeerConnectionObserverJni::OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_rtp_transceiver =
+ NativeToJavaRtpTransceiver(env, transceiver);
+ rtp_transceivers_.emplace_back(env, j_rtp_transceiver);
+
+ Java_Observer_onTrack(env, j_observer_global_, j_rtp_transceiver);
+}
+
+// If the NativeToJavaStreamsMap contains the stream, return it.
+// Otherwise, create a new Java MediaStream.
+JavaMediaStream& PeerConnectionObserverJni::GetOrCreateJavaStream(
+ JNIEnv* env,
+ const rtc::scoped_refptr<MediaStreamInterface>& stream) {
+ NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get());
+ if (it == remote_streams_.end()) {
+ it = remote_streams_
+ .emplace(std::piecewise_construct,
+ std::forward_as_tuple(stream.get()),
+ std::forward_as_tuple(env, stream))
+ .first;
+ }
+ return it->second;
+}
+
+ScopedJavaLocalRef<jobjectArray>
+PeerConnectionObserverJni::NativeToJavaMediaStreamArray(
+ JNIEnv* jni,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ return NativeToJavaObjectArray(
+ jni, streams, GetMediaStreamClass(jni),
+ [this](JNIEnv* env, rtc::scoped_refptr<MediaStreamInterface> stream)
+ -> const ScopedJavaGlobalRef<jobject>& {
+ return GetOrCreateJavaStream(env, stream).j_media_stream();
+ });
+}
+
+OwnedPeerConnection::OwnedPeerConnection(
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+ std::unique_ptr<PeerConnectionObserver> observer)
+ : OwnedPeerConnection(peer_connection,
+ std::move(observer),
+ nullptr /* constraints */) {}
+
+OwnedPeerConnection::OwnedPeerConnection(
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+ std::unique_ptr<PeerConnectionObserver> observer,
+ std::unique_ptr<MediaConstraints> constraints)
+ : peer_connection_(peer_connection),
+ observer_(std::move(observer)),
+ constraints_(std::move(constraints)) {}
+
+OwnedPeerConnection::~OwnedPeerConnection() {
+ // Ensure that PeerConnection is destroyed before the observer.
+ peer_connection_ = nullptr;
+}
+
+static jlong JNI_PeerConnection_CreatePeerConnectionObserver(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_observer) {
+ return jlongFromPointer(new PeerConnectionObserverJni(jni, j_observer));
+}
+
+static void JNI_PeerConnection_FreeOwnedPeerConnection(JNIEnv*, jlong j_p) {
+ delete reinterpret_cast<OwnedPeerConnection*>(j_p);
+}
+
+static jlong JNI_PeerConnection_GetNativePeerConnection(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ return jlongFromPointer(ExtractNativePC(jni, j_pc));
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetLocalDescription(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ PeerConnectionInterface* pc = ExtractNativePC(jni, j_pc);
+ // It's only safe to operate on SessionDescriptionInterface on the
+ // signaling thread, but `jni` may only be used on the current thread, so we
+ // must do this odd dance.
+ std::string sdp;
+ std::string type;
+ pc->signaling_thread()->Invoke<void>(RTC_FROM_HERE, [pc, &sdp, &type] {
+ const SessionDescriptionInterface* desc = pc->local_description();
+ if (desc) {
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ type = desc->type();
+ }
+ });
+ return sdp.empty() ? nullptr : NativeToJavaSessionDescription(jni, sdp, type);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetRemoteDescription(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ PeerConnectionInterface* pc = ExtractNativePC(jni, j_pc);
+ // It's only safe to operate on SessionDescriptionInterface on the
+ // signaling thread, but `jni` may only be used on the current thread, so we
+ // must do this odd dance.
+ std::string sdp;
+ std::string type;
+ pc->signaling_thread()->Invoke<void>(RTC_FROM_HERE, [pc, &sdp, &type] {
+ const SessionDescriptionInterface* desc = pc->remote_description();
+ if (desc) {
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ type = desc->type();
+ }
+ });
+ return sdp.empty() ? nullptr : NativeToJavaSessionDescription(jni, sdp, type);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetCertificate(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ const PeerConnectionInterface::RTCConfiguration rtc_config =
+ ExtractNativePC(jni, j_pc)->GetConfiguration();
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc_config.certificates[0];
+ return NativeToJavaRTCCertificatePEM(jni, certificate->ToPEM());
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_CreateDataChannel(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jstring>& j_label,
+ const JavaParamRef<jobject>& j_init) {
+ DataChannelInit init = JavaToNativeDataChannelInit(jni, j_init);
+ auto result = ExtractNativePC(jni, j_pc)->CreateDataChannelOrError(
+ JavaToNativeString(jni, j_label), &init);
+ if (!result.ok()) {
+ return WrapNativeDataChannel(jni, nullptr);
+ }
+ return WrapNativeDataChannel(jni, result.MoveValue());
+}
+
+static void JNI_PeerConnection_CreateOffer(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ const JavaParamRef<jobject>& j_constraints) {
+ std::unique_ptr<MediaConstraints> constraints =
+ JavaToNativeMediaConstraints(jni, j_constraints);
+ auto observer = rtc::make_ref_counted<CreateSdpObserverJni>(
+ jni, j_observer, std::move(constraints));
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options);
+ ExtractNativePC(jni, j_pc)->CreateOffer(observer.get(), options);
+}
+
+static void JNI_PeerConnection_CreateAnswer(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ const JavaParamRef<jobject>& j_constraints) {
+ std::unique_ptr<MediaConstraints> constraints =
+ JavaToNativeMediaConstraints(jni, j_constraints);
+ auto observer = rtc::make_ref_counted<CreateSdpObserverJni>(
+ jni, j_observer, std::move(constraints));
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options);
+ ExtractNativePC(jni, j_pc)->CreateAnswer(observer.get(), options);
+}
+
+static void JNI_PeerConnection_SetLocalDescriptionAutomatically(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer) {
+ auto observer =
+ rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
+ ExtractNativePC(jni, j_pc)->SetLocalDescription(observer);
+}
+
+static void JNI_PeerConnection_SetLocalDescription(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ const JavaParamRef<jobject>& j_sdp) {
+ auto observer =
+ rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
+ ExtractNativePC(jni, j_pc)->SetLocalDescription(
+ JavaToNativeSessionDescription(jni, j_sdp), observer);
+}
+
+static void JNI_PeerConnection_SetRemoteDescription(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ const JavaParamRef<jobject>& j_sdp) {
+ auto observer =
+ rtc::make_ref_counted<SetRemoteSdpObserverJni>(jni, j_observer);
+ ExtractNativePC(jni, j_pc)->SetRemoteDescription(
+ JavaToNativeSessionDescription(jni, j_sdp), observer);
+}
+
+static void JNI_PeerConnection_RestartIce(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ ExtractNativePC(jni, j_pc)->RestartIce();
+}
+
+static void JNI_PeerConnection_SetAudioPlayout(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jboolean playout) {
+ ExtractNativePC(jni, j_pc)->SetAudioPlayout(playout);
+}
+
+static void JNI_PeerConnection_SetAudioRecording(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jboolean recording) {
+ ExtractNativePC(jni, j_pc)->SetAudioRecording(recording);
+}
+
+static jboolean JNI_PeerConnection_SetConfiguration(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_rtc_config) {
+ // Need to merge constraints into RTCConfiguration again, which are stored
+ // in the OwnedPeerConnection object.
+ OwnedPeerConnection* owned_pc = reinterpret_cast<OwnedPeerConnection*>(
+ Java_PeerConnection_getNativeOwnedPeerConnection(jni, j_pc));
+ PeerConnectionInterface::RTCConfiguration rtc_config(
+ PeerConnectionInterface::RTCConfigurationType::kAggressive);
+ JavaToNativeRTCConfiguration(jni, j_rtc_config, &rtc_config);
+ if (owned_pc->constraints()) {
+ CopyConstraintsIntoRtcConfiguration(owned_pc->constraints(), &rtc_config);
+ }
+ return owned_pc->pc()->SetConfiguration(rtc_config).ok();
+}
+
+static jboolean JNI_PeerConnection_AddIceCandidate(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jstring>& j_sdp_mid,
+ jint j_sdp_mline_index,
+ const JavaParamRef<jstring>& j_candidate_sdp) {
+ std::string sdp_mid = JavaToNativeString(jni, j_sdp_mid);
+ std::string sdp = JavaToNativeString(jni, j_candidate_sdp);
+ std::unique_ptr<IceCandidateInterface> candidate(
+ CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr));
+ return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
+}
+
+static void JNI_PeerConnection_AddIceCandidateWithObserver(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jstring>& j_sdp_mid,
+ jint j_sdp_mline_index,
+ const JavaParamRef<jstring>& j_candidate_sdp,
+ const JavaParamRef<jobject>& j_observer) {
+ std::string sdp_mid = JavaToNativeString(jni, j_sdp_mid);
+ std::string sdp = JavaToNativeString(jni, j_candidate_sdp);
+ std::unique_ptr<IceCandidateInterface> candidate(
+ CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr));
+
+ rtc::scoped_refptr<AddIceCandidateObserverJni> observer(
+ new AddIceCandidateObserverJni(jni, j_observer));
+ ExtractNativePC(jni, j_pc)->AddIceCandidate(
+ std::move(candidate),
+ [observer](RTCError error) { observer->OnComplete(error); });
+}
+
+static jboolean JNI_PeerConnection_RemoveIceCandidates(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobjectArray>& j_candidates) {
+ std::vector<cricket::Candidate> candidates =
+ JavaToNativeVector<cricket::Candidate>(jni, j_candidates,
+ &JavaToNativeCandidate);
+ return ExtractNativePC(jni, j_pc)->RemoveIceCandidates(candidates);
+}
+
+static jboolean JNI_PeerConnection_AddLocalStream(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jlong native_stream) {
+ return ExtractNativePC(jni, j_pc)->AddStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+static void JNI_PeerConnection_RemoveLocalStream(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jlong native_stream) {
+ ExtractNativePC(jni, j_pc)->RemoveStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_CreateSender(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jstring>& j_kind,
+ const JavaParamRef<jstring>& j_stream_id) {
+ std::string kind = JavaToNativeString(jni, j_kind);
+ std::string stream_id = JavaToNativeString(jni, j_stream_id);
+ rtc::scoped_refptr<RtpSenderInterface> sender =
+ ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
+ return NativeToJavaRtpSender(jni, sender);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetSenders(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetSenders(),
+ &NativeToJavaRtpSender);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetReceivers(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetReceivers(),
+ &NativeToJavaRtpReceiver);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetTransceivers(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetTransceivers(),
+ &NativeToJavaRtpTransceiver);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTrack(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const jlong native_track,
+ const JavaParamRef<jobject>& j_stream_labels) {
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> result =
+ ExtractNativePC(jni, j_pc)->AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface>(
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
+ JavaListToNativeVector<std::string, jstring>(jni, j_stream_labels,
+ &JavaToNativeString));
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to add track: " << result.error().message();
+ return nullptr;
+ } else {
+ return NativeToJavaRtpSender(jni, result.MoveValue());
+ }
+}
+
+static jboolean JNI_PeerConnection_RemoveTrack(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jlong native_sender) {
+ return ExtractNativePC(jni, j_pc)
+ ->RemoveTrackOrError(rtc::scoped_refptr<RtpSenderInterface>(
+ reinterpret_cast<RtpSenderInterface*>(native_sender)))
+ .ok();
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTransceiverWithTrack(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jlong native_track,
+ const JavaParamRef<jobject>& j_init) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ ExtractNativePC(jni, j_pc)->AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface>(
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
+ JavaToNativeRtpTransceiverInit(jni, j_init));
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to add transceiver: "
+ << result.error().message();
+ return nullptr;
+ } else {
+ return NativeToJavaRtpTransceiver(jni, result.MoveValue());
+ }
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTransceiverOfType(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_media_type,
+ const JavaParamRef<jobject>& j_init) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ ExtractNativePC(jni, j_pc)->AddTransceiver(
+ JavaToNativeMediaType(jni, j_media_type),
+ JavaToNativeRtpTransceiverInit(jni, j_init));
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to add transceiver: "
+ << result.error().message();
+ return nullptr;
+ } else {
+ return NativeToJavaRtpTransceiver(jni, result.MoveValue());
+ }
+}
+
+static jboolean JNI_PeerConnection_OldGetStats(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ jlong native_track) {
+ auto observer = rtc::make_ref_counted<StatsObserverJni>(jni, j_observer);
+ return ExtractNativePC(jni, j_pc)->GetStats(
+ observer.get(),
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+ PeerConnectionInterface::kStatsOutputLevelStandard);
+}
+
+static void JNI_PeerConnection_NewGetStats(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_callback) {
+ auto callback =
+ rtc::make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
+ ExtractNativePC(jni, j_pc)->GetStats(callback.get());
+}
+
+static jboolean JNI_PeerConnection_SetBitrate(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_min,
+ const JavaParamRef<jobject>& j_current,
+ const JavaParamRef<jobject>& j_max) {
+ BitrateSettings params;
+ params.min_bitrate_bps = JavaToNativeOptionalInt(jni, j_min);
+ params.start_bitrate_bps = JavaToNativeOptionalInt(jni, j_current);
+ params.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max);
+ return ExtractNativePC(jni, j_pc)->SetBitrate(params).ok();
+}
+
+static jboolean JNI_PeerConnection_StartRtcEventLog(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ int file_descriptor,
+ int max_size_bytes) {
+ // TODO(eladalon): It would be better to not allow negative values into PC.
+ const size_t max_size = (max_size_bytes < 0)
+ ? RtcEventLog::kUnlimitedOutput
+ : rtc::saturated_cast<size_t>(max_size_bytes);
+ FILE* f = fdopen(file_descriptor, "wb");
+ if (!f) {
+ close(file_descriptor);
+ return false;
+ }
+ return ExtractNativePC(jni, j_pc)->StartRtcEventLog(
+ std::make_unique<RtcEventLogOutputFile>(f, max_size));
+}
+
+static void JNI_PeerConnection_StopRtcEventLog(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ ExtractNativePC(jni, j_pc)->StopRtcEventLog();
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_SignalingState(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_pc) {
+ return Java_SignalingState_fromNativeIndex(
+ env, ExtractNativePC(env, j_pc)->signaling_state());
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_IceConnectionState(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_pc) {
+ return Java_IceConnectionState_fromNativeIndex(
+ env, ExtractNativePC(env, j_pc)->ice_connection_state());
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_ConnectionState(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_pc) {
+ return Java_PeerConnectionState_fromNativeIndex(
+ env,
+ static_cast<int>(ExtractNativePC(env, j_pc)->peer_connection_state()));
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_IceGatheringState(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_pc) {
+ return Java_IceGatheringState_fromNativeIndex(
+ env, ExtractNativePC(env, j_pc)->ice_gathering_state());
+}
+
+static void JNI_PeerConnection_Close(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ ExtractNativePC(jni, j_pc)->Close();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h
new file mode 100644
index 0000000000..9976e8e4f5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_
+#define SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "api/peer_connection_interface.h"
+#include "pc/media_stream_observer.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/media_constraints.h"
+#include "sdk/android/src/jni/pc/media_stream.h"
+#include "sdk/android/src/jni/pc/rtp_receiver.h"
+#include "sdk/android/src/jni/pc/rtp_transceiver.h"
+
+namespace webrtc {
+namespace jni {
+
+void JavaToNativeRTCConfiguration(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtc_config,
+ PeerConnectionInterface::RTCConfiguration* rtc_config);
+
+rtc::KeyType GetRtcConfigKeyType(JNIEnv* env,
+ const JavaRef<jobject>& j_rtc_config);
+
+ScopedJavaLocalRef<jobject> NativeToJavaAdapterType(JNIEnv* env,
+ int adapterType);
+
+// Adapter between the C++ PeerConnectionObserver interface and the Java
+// PeerConnection.Observer interface. Wraps an instance of the Java interface
+// and dispatches C++ callbacks to Java.
+class PeerConnectionObserverJni : public PeerConnectionObserver {
+ public:
+ PeerConnectionObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer);
+ ~PeerConnectionObserverJni() override;
+
+ // Implementation of PeerConnectionObserver interface, which propagates
+ // the callbacks to the Java observer.
+ void OnIceCandidate(const IceCandidateInterface* candidate) override;
+ void OnIceCandidateError(const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text) override;
+
+ void OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) override;
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override;
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override;
+ void OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override;
+ void OnConnectionChange(
+ PeerConnectionInterface::PeerConnectionState new_state) override;
+ void OnIceConnectionReceivingChange(bool receiving) override;
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override;
+ void OnIceSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent& event) override;
+ void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
+ void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
+ void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> channel) override;
+ void OnRenegotiationNeeded() override;
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ streams) override;
+ void OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
+ void OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
+
+ private:
+ typedef std::map<MediaStreamInterface*, JavaMediaStream>
+ NativeToJavaStreamsMap;
+ typedef std::map<MediaStreamTrackInterface*, RtpReceiverInterface*>
+ NativeMediaStreamTrackToNativeRtpReceiver;
+
+ // If the NativeToJavaStreamsMap contains the stream, return it.
+ // Otherwise, create a new Java MediaStream. Returns a global jobject.
+ JavaMediaStream& GetOrCreateJavaStream(
+ JNIEnv* env,
+ const rtc::scoped_refptr<MediaStreamInterface>& stream);
+
+ // Converts array of streams, creating or re-using Java streams as necessary.
+ ScopedJavaLocalRef<jobjectArray> NativeToJavaMediaStreamArray(
+ JNIEnv* jni,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams);
+
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+
+ // C++ -> Java remote streams.
+ NativeToJavaStreamsMap remote_streams_;
+ std::vector<JavaRtpReceiverGlobalOwner> rtp_receivers_;
+ // Holds a reference to the Java transceivers given to the AddTrack
+ // callback, so that the shared ownership by the Java object will be
+ // properly disposed.
+ std::vector<JavaRtpTransceiverGlobalOwner> rtp_transceivers_;
+};
+
+// PeerConnection doesn't take ownership of the observer. In Java API, we don't
+// want the client to have to manually dispose the observer. To solve this, this
+// wrapper class is used for object ownership.
+//
+// Also stores reference to the deprecated PeerConnection constraints for now.
+class OwnedPeerConnection {
+ public:
+ OwnedPeerConnection(
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+ std::unique_ptr<PeerConnectionObserver> observer);
+ // Deprecated. PC constraints are deprecated.
+ OwnedPeerConnection(
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+ std::unique_ptr<PeerConnectionObserver> observer,
+ std::unique_ptr<MediaConstraints> constraints);
+ ~OwnedPeerConnection();
+
+ PeerConnectionInterface* pc() const { return peer_connection_.get(); }
+ const MediaConstraints* constraints() const { return constraints_.get(); }
+
+ private:
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection_;
+ std::unique_ptr<PeerConnectionObserver> observer_;
+ std::unique_ptr<MediaConstraints> constraints_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc
new file mode 100644
index 0000000000..fafcad3caf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc
@@ -0,0 +1,550 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/peer_connection_factory.h"
+
+#include <memory>
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "media/base/media_engine.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/utility/include/jvm_android.h"
+// We don't depend on the audio processing module implementation.
+// The user may pass in a nullptr.
+#include "api/call/call_factory_interface.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "rtc_base/event_tracer.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/native_api/stacktrace/stacktrace.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/logging/log_sink.h"
+#include "sdk/android/src/jni/pc/android_network_monitor.h"
+#include "sdk/android/src/jni/pc/audio.h"
+#include "sdk/android/src/jni/pc/ice_candidate.h"
+#include "sdk/android/src/jni/pc/owned_factory_and_threads.h"
+#include "sdk/android/src/jni/pc/peer_connection.h"
+#include "sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h"
+#include "sdk/android/src/jni/pc/video.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+// Take ownership of the jlong reference and cast it into an rtc::scoped_refptr.
+template <typename T>
+rtc::scoped_refptr<T> TakeOwnershipOfRefPtr(jlong j_pointer) {
+ T* ptr = reinterpret_cast<T*>(j_pointer);
+ rtc::scoped_refptr<T> refptr;
+ refptr.swap(&ptr);
+ return refptr;
+}
+
+// Take ownership of the jlong reference and cast it into a std::unique_ptr.
+template <typename T>
+std::unique_ptr<T> TakeOwnershipOfUniquePtr(jlong native_pointer) {
+ return std::unique_ptr<T>(reinterpret_cast<T*>(native_pointer));
+}
+
+typedef void (*JavaMethodPointer)(JNIEnv*, const JavaRef<jobject>&);
+
+// Post a message on the given thread that will call the Java method on the
+// given Java object.
+void PostJavaCallback(JNIEnv* env,
+ rtc::Thread* queue,
+ const rtc::Location& posted_from,
+ const JavaRef<jobject>& j_object,
+ JavaMethodPointer java_method_pointer) {
+ // One-off message handler that calls the Java method on the specified Java
+ // object before deleting itself.
+ class JavaAsyncCallback : public rtc::MessageHandler {
+ public:
+ JavaAsyncCallback(JNIEnv* env,
+ const JavaRef<jobject>& j_object,
+ JavaMethodPointer java_method_pointer)
+ : j_object_(env, j_object), java_method_pointer_(java_method_pointer) {}
+
+ void OnMessage(rtc::Message*) override {
+ java_method_pointer_(AttachCurrentThreadIfNeeded(), j_object_);
+ // The message has been delivered, clean up after ourself.
+ delete this;
+ }
+
+ private:
+ ScopedJavaGlobalRef<jobject> j_object_;
+ JavaMethodPointer java_method_pointer_;
+ };
+
+ queue->Post(posted_from,
+ new JavaAsyncCallback(env, j_object, java_method_pointer));
+}
+
+absl::optional<PeerConnectionFactoryInterface::Options>
+JavaToNativePeerConnectionFactoryOptions(JNIEnv* jni,
+ const JavaRef<jobject>& j_options) {
+ if (j_options.is_null())
+ return absl::nullopt;
+
+ PeerConnectionFactoryInterface::Options native_options;
+
+ // This doesn't necessarily match the c++ version of this struct; feel free
+ // to add more parameters as necessary.
+ native_options.network_ignore_mask =
+ Java_Options_getNetworkIgnoreMask(jni, j_options);
+ native_options.disable_encryption =
+ Java_Options_getDisableEncryption(jni, j_options);
+ native_options.disable_network_monitor =
+ Java_Options_getDisableNetworkMonitor(jni, j_options);
+
+ return native_options;
+}
+
+// Place static objects into a container that gets leaked so we avoid
+// non-trivial destructor.
+struct StaticObjectContainer {
+ // Field trials initialization string
+ std::unique_ptr<std::string> field_trials_init_string;
+ // Set in PeerConnectionFactory_InjectLoggable().
+ std::unique_ptr<JNILogSink> jni_log_sink;
+};
+
+StaticObjectContainer& GetStaticObjects() {
+ static StaticObjectContainer* static_objects = new StaticObjectContainer();
+ return *static_objects;
+}
+
+ScopedJavaLocalRef<jobject> NativeToScopedJavaPeerConnectionFactory(
+ JNIEnv* env,
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread) {
+ OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
+ std::move(socket_factory), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread), pcf);
+
+ ScopedJavaLocalRef<jobject> j_pcf = Java_PeerConnectionFactory_Constructor(
+ env, NativeToJavaPointer(owned_factory));
+
+ PostJavaCallback(env, owned_factory->network_thread(), RTC_FROM_HERE, j_pcf,
+ &Java_PeerConnectionFactory_onNetworkThreadReady);
+ PostJavaCallback(env, owned_factory->worker_thread(), RTC_FROM_HERE, j_pcf,
+ &Java_PeerConnectionFactory_onWorkerThreadReady);
+ PostJavaCallback(env, owned_factory->signaling_thread(), RTC_FROM_HERE, j_pcf,
+ &Java_PeerConnectionFactory_onSignalingThreadReady);
+
+ return j_pcf;
+}
+
+PeerConnectionFactoryInterface* PeerConnectionFactoryFromJava(jlong j_p) {
+ return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
+}
+
+} // namespace
+
+// Note: Some of the video-specific PeerConnectionFactory methods are
+// implemented in "video.cc". This is done so that if an application
+// doesn't need video support, it can just link with "null_video.cc"
+// instead of "video.cc", which doesn't bring in the video-specific
+// dependencies.
+
+// Set in PeerConnectionFactory_initializeAndroidGlobals().
+static bool factory_static_initialized = false;
+
+jobject NativeToJavaPeerConnectionFactory(
+ JNIEnv* jni,
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread) {
+ return NativeToScopedJavaPeerConnectionFactory(
+ jni, pcf, std::move(socket_factory), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread))
+ .Release();
+}
+
+static void JNI_PeerConnectionFactory_InitializeAndroidGlobals(JNIEnv* jni) {
+ if (!factory_static_initialized) {
+ JVM::Initialize(GetJVM());
+ factory_static_initialized = true;
+ }
+}
+
+static void JNI_PeerConnectionFactory_InitializeFieldTrials(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_trials_init_string) {
+ std::unique_ptr<std::string>& field_trials_init_string =
+ GetStaticObjects().field_trials_init_string;
+
+ if (j_trials_init_string.is_null()) {
+ field_trials_init_string = nullptr;
+ field_trial::InitFieldTrialsFromString(nullptr);
+ return;
+ }
+ field_trials_init_string = std::make_unique<std::string>(
+ JavaToNativeString(jni, j_trials_init_string));
+ RTC_LOG(LS_INFO) << "initializeFieldTrials: " << *field_trials_init_string;
+ field_trial::InitFieldTrialsFromString(field_trials_init_string->c_str());
+}
+
+static void JNI_PeerConnectionFactory_InitializeInternalTracer(JNIEnv* jni) {
+ rtc::tracing::SetupInternalTracer();
+}
+
+static ScopedJavaLocalRef<jstring>
+JNI_PeerConnectionFactory_FindFieldTrialsFullName(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_name) {
+ return NativeToJavaString(
+ jni, field_trial::FindFullName(JavaToStdString(jni, j_name)));
+}
+
+static jboolean JNI_PeerConnectionFactory_StartInternalTracingCapture(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_event_tracing_filename) {
+ if (j_event_tracing_filename.is_null())
+ return false;
+
+ const char* init_string =
+ jni->GetStringUTFChars(j_event_tracing_filename.obj(), NULL);
+ RTC_LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
+ bool ret = rtc::tracing::StartInternalCapture(init_string);
+ jni->ReleaseStringUTFChars(j_event_tracing_filename.obj(), init_string);
+ return ret;
+}
+
+static void JNI_PeerConnectionFactory_StopInternalTracingCapture(JNIEnv* jni) {
+ rtc::tracing::StopInternalCapture();
+}
+
+static void JNI_PeerConnectionFactory_ShutdownInternalTracer(JNIEnv* jni) {
+ rtc::tracing::ShutdownInternalTracer();
+}
+
+// Following parameters are optional:
+// `audio_device_module`, `jencoder_factory`, `jdecoder_factory`,
+// `audio_processor`, `fec_controller_factory`,
+// `network_state_predictor_factory`, `neteq_factory`.
+ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& jcontext,
+ const JavaParamRef<jobject>& joptions,
+ rtc::scoped_refptr<AudioDeviceModule> audio_device_module,
+ rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
+ const JavaParamRef<jobject>& jencoder_factory,
+ const JavaParamRef<jobject>& jdecoder_factory,
+ rtc::scoped_refptr<AudioProcessing> audio_processor,
+ std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory,
+ std::unique_ptr<NetworkControllerFactoryInterface>
+ network_controller_factory,
+ std::unique_ptr<NetworkStatePredictorFactoryInterface>
+ network_state_predictor_factory,
+ std::unique_ptr<NetEqFactory> neteq_factory) {
+ // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+ // ThreadManager only WrapCurrentThread()s the thread where it is first
+ // created. Since the semantics around when auto-wrapping happens in
+ // webrtc/rtc_base/ are convoluted, we simply wrap here to avoid having to
+ // think about ramifications of auto-wrapping there.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ auto socket_server = std::make_unique<rtc::PhysicalSocketServer>();
+ auto network_thread = std::make_unique<rtc::Thread>(socket_server.get());
+ network_thread->SetName("network_thread", nullptr);
+ RTC_CHECK(network_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<rtc::Thread> worker_thread = rtc::Thread::Create();
+ worker_thread->SetName("worker_thread", nullptr);
+ RTC_CHECK(worker_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
+ signaling_thread->SetName("signaling_thread", NULL);
+ RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";
+
+ const absl::optional<PeerConnectionFactoryInterface::Options> options =
+ JavaToNativePeerConnectionFactoryOptions(jni, joptions);
+
+ PeerConnectionFactoryDependencies dependencies;
+ // TODO(bugs.webrtc.org/13145): Also add socket_server.get() to the
+ // dependencies.
+ dependencies.network_thread = network_thread.get();
+ dependencies.worker_thread = worker_thread.get();
+ dependencies.signaling_thread = signaling_thread.get();
+ dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
+ dependencies.call_factory = CreateCallFactory();
+ dependencies.event_log_factory = std::make_unique<RtcEventLogFactory>(
+ dependencies.task_queue_factory.get());
+ dependencies.fec_controller_factory = std::move(fec_controller_factory);
+ dependencies.network_controller_factory =
+ std::move(network_controller_factory);
+ dependencies.network_state_predictor_factory =
+ std::move(network_state_predictor_factory);
+ dependencies.neteq_factory = std::move(neteq_factory);
+ if (!(options && options->disable_network_monitor)) {
+ dependencies.network_monitor_factory =
+ std::make_unique<AndroidNetworkMonitorFactory>();
+ }
+
+ cricket::MediaEngineDependencies media_dependencies;
+ media_dependencies.task_queue_factory = dependencies.task_queue_factory.get();
+ media_dependencies.adm = std::move(audio_device_module);
+ media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory);
+ media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory);
+ media_dependencies.audio_processing = std::move(audio_processor);
+ media_dependencies.video_encoder_factory =
+ absl::WrapUnique(CreateVideoEncoderFactory(jni, jencoder_factory));
+ media_dependencies.video_decoder_factory =
+ absl::WrapUnique(CreateVideoDecoderFactory(jni, jdecoder_factory));
+ dependencies.media_engine =
+ cricket::CreateMediaEngine(std::move(media_dependencies));
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory =
+ CreateModularPeerConnectionFactory(std::move(dependencies));
+
+ RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+ "WebRTC/libjingle init likely failed on this device";
+ // TODO(honghaiz): Maybe put the options as the argument of
+ // CreatePeerConnectionFactory.
+ if (options)
+ factory->SetOptions(*options);
+
+ return NativeToScopedJavaPeerConnectionFactory(
+ jni, factory, std::move(socket_server), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread));
+}
+
+static ScopedJavaLocalRef<jobject>
+JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& jcontext,
+ const JavaParamRef<jobject>& joptions,
+ jlong native_audio_device_module,
+ jlong native_audio_encoder_factory,
+ jlong native_audio_decoder_factory,
+ const JavaParamRef<jobject>& jencoder_factory,
+ const JavaParamRef<jobject>& jdecoder_factory,
+ jlong native_audio_processor,
+ jlong native_fec_controller_factory,
+ jlong native_network_controller_factory,
+ jlong native_network_state_predictor_factory,
+ jlong native_neteq_factory) {
+ rtc::scoped_refptr<AudioProcessing> audio_processor(
+ reinterpret_cast<AudioProcessing*>(native_audio_processor));
+ return CreatePeerConnectionFactoryForJava(
+ jni, jcontext, joptions,
+ rtc::scoped_refptr<AudioDeviceModule>(
+ reinterpret_cast<AudioDeviceModule*>(native_audio_device_module)),
+ TakeOwnershipOfRefPtr<AudioEncoderFactory>(native_audio_encoder_factory),
+ TakeOwnershipOfRefPtr<AudioDecoderFactory>(native_audio_decoder_factory),
+ jencoder_factory, jdecoder_factory,
+ audio_processor ? audio_processor : CreateAudioProcessing(),
+ TakeOwnershipOfUniquePtr<FecControllerFactoryInterface>(
+ native_fec_controller_factory),
+ TakeOwnershipOfUniquePtr<NetworkControllerFactoryInterface>(
+ native_network_controller_factory),
+ TakeOwnershipOfUniquePtr<NetworkStatePredictorFactoryInterface>(
+ native_network_state_predictor_factory),
+ TakeOwnershipOfUniquePtr<NetEqFactory>(native_neteq_factory));
+}
+
+static void JNI_PeerConnectionFactory_FreeFactory(JNIEnv*,
+ jlong j_p) {
+ delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+ field_trial::InitFieldTrialsFromString(nullptr);
+ GetStaticObjects().field_trials_init_string = nullptr;
+}
+
+static jlong JNI_PeerConnectionFactory_CreateLocalMediaStream(
+ JNIEnv* jni,
+ jlong native_factory,
+ const JavaParamRef<jstring>& label) {
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ PeerConnectionFactoryFromJava(native_factory)
+ ->CreateLocalMediaStream(JavaToStdString(jni, label)));
+ return jlongFromPointer(stream.release());
+}
+
+static jlong JNI_PeerConnectionFactory_CreateAudioSource(
+ JNIEnv* jni,
+ jlong native_factory,
+ const JavaParamRef<jobject>& j_constraints) {
+ std::unique_ptr<MediaConstraints> constraints =
+ JavaToNativeMediaConstraints(jni, j_constraints);
+ cricket::AudioOptions options;
+ CopyConstraintsIntoAudioOptions(constraints.get(), &options);
+ rtc::scoped_refptr<AudioSourceInterface> source(
+ PeerConnectionFactoryFromJava(native_factory)
+ ->CreateAudioSource(options));
+ return jlongFromPointer(source.release());
+}
+
+jlong JNI_PeerConnectionFactory_CreateAudioTrack(
+ JNIEnv* jni,
+ jlong native_factory,
+ const JavaParamRef<jstring>& id,
+ jlong native_source) {
+ rtc::scoped_refptr<AudioTrackInterface> track(
+ PeerConnectionFactoryFromJava(native_factory)
+ ->CreateAudioTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<AudioSourceInterface*>(native_source)));
+ return jlongFromPointer(track.release());
+}
+
+static jboolean JNI_PeerConnectionFactory_StartAecDump(
+ JNIEnv* jni,
+ jlong native_factory,
+ jint file_descriptor,
+ jint filesize_limit_bytes) {
+ FILE* f = fdopen(file_descriptor, "wb");
+ if (!f) {
+ close(file_descriptor);
+ return false;
+ }
+
+ return PeerConnectionFactoryFromJava(native_factory)
+ ->StartAecDump(f, filesize_limit_bytes);
+}
+
+static void JNI_PeerConnectionFactory_StopAecDump(JNIEnv* jni,
+ jlong native_factory) {
+ PeerConnectionFactoryFromJava(native_factory)->StopAecDump();
+}
+
+static jlong JNI_PeerConnectionFactory_CreatePeerConnection(
+ JNIEnv* jni,
+ jlong factory,
+ const JavaParamRef<jobject>& j_rtc_config,
+ const JavaParamRef<jobject>& j_constraints,
+ jlong observer_p,
+ const JavaParamRef<jobject>& j_sslCertificateVerifier) {
+ std::unique_ptr<PeerConnectionObserver> observer(
+ reinterpret_cast<PeerConnectionObserver*>(observer_p));
+
+ PeerConnectionInterface::RTCConfiguration rtc_config(
+ PeerConnectionInterface::RTCConfigurationType::kAggressive);
+ JavaToNativeRTCConfiguration(jni, j_rtc_config, &rtc_config);
+
+ if (rtc_config.certificates.empty()) {
+ // Generate non-default certificate.
+ rtc::KeyType key_type = GetRtcConfigKeyType(jni, j_rtc_config);
+ if (key_type != rtc::KT_DEFAULT) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(
+ rtc::KeyParams(key_type), absl::nullopt);
+ if (!certificate) {
+ RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: "
+ << key_type;
+ return 0;
+ }
+ rtc_config.certificates.push_back(certificate);
+ }
+ }
+
+ std::unique_ptr<MediaConstraints> constraints;
+ if (!j_constraints.is_null()) {
+ constraints = JavaToNativeMediaConstraints(jni, j_constraints);
+ CopyConstraintsIntoRtcConfiguration(constraints.get(), &rtc_config);
+ }
+
+ PeerConnectionDependencies peer_connection_dependencies(observer.get());
+ if (!j_sslCertificateVerifier.is_null()) {
+ peer_connection_dependencies.tls_cert_verifier =
+ std::make_unique<SSLCertificateVerifierWrapper>(
+ jni, j_sslCertificateVerifier);
+ }
+
+ auto result =
+ PeerConnectionFactoryFromJava(factory)->CreatePeerConnectionOrError(
+ rtc_config, std::move(peer_connection_dependencies));
+ if (!result.ok())
+ return 0;
+
+ return jlongFromPointer(new OwnedPeerConnection(
+ result.MoveValue(), std::move(observer), std::move(constraints)));
+}
+
+static jlong JNI_PeerConnectionFactory_CreateVideoSource(
+ JNIEnv* jni,
+ jlong native_factory,
+ jboolean is_screencast,
+ jboolean align_timestamps) {
+ OwnedFactoryAndThreads* factory =
+ reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+ return jlongFromPointer(CreateVideoSource(jni, factory->signaling_thread(),
+ factory->worker_thread(),
+ is_screencast, align_timestamps));
+}
+
+static jlong JNI_PeerConnectionFactory_CreateVideoTrack(
+ JNIEnv* jni,
+ jlong native_factory,
+ const JavaParamRef<jstring>& id,
+ jlong native_source) {
+ rtc::scoped_refptr<VideoTrackInterface> track =
+ PeerConnectionFactoryFromJava(native_factory)
+ ->CreateVideoTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<VideoTrackSourceInterface*>(native_source));
+ return jlongFromPointer(track.release());
+}
+
+static jlong JNI_PeerConnectionFactory_GetNativePeerConnectionFactory(
+ JNIEnv* jni,
+ jlong native_factory) {
+ return jlongFromPointer(PeerConnectionFactoryFromJava(native_factory));
+}
+
+static void JNI_PeerConnectionFactory_InjectLoggable(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_logging,
+ jint nativeSeverity) {
+ std::unique_ptr<JNILogSink>& jni_log_sink = GetStaticObjects().jni_log_sink;
+
+ // If there is already a LogSink, remove it from LogMessage.
+ if (jni_log_sink) {
+ rtc::LogMessage::RemoveLogToStream(jni_log_sink.get());
+ }
+ jni_log_sink = std::make_unique<JNILogSink>(jni, j_logging);
+ rtc::LogMessage::AddLogToStream(
+ jni_log_sink.get(), static_cast<rtc::LoggingSeverity>(nativeSeverity));
+ rtc::LogMessage::LogToDebug(rtc::LS_NONE);
+}
+
+static void JNI_PeerConnectionFactory_DeleteLoggable(JNIEnv* jni) {
+ std::unique_ptr<JNILogSink>& jni_log_sink = GetStaticObjects().jni_log_sink;
+
+ if (jni_log_sink) {
+ rtc::LogMessage::RemoveLogToStream(jni_log_sink.get());
+ jni_log_sink.reset();
+ }
+}
+
+static void JNI_PeerConnectionFactory_PrintStackTrace(JNIEnv* env, jint tid) {
+ RTC_LOG(LS_WARNING) << StackTraceToString(GetStackTrace(tid));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h
new file mode 100644
index 0000000000..b5d5e5dcb7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_
+#define SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_
+
+#include <jni.h>
+#include "api/peer_connection_interface.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace jni {
+
+// Creates java PeerConnectionFactory with specified `pcf`.
+jobject NativeToJavaPeerConnectionFactory(
+ JNIEnv* jni,
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc
new file mode 100644
index 0000000000..f305324ac8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtc_certificate.h"
+#include "sdk/android/src/jni/pc/ice_candidate.h"
+
+#include "rtc_base/ref_count.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "sdk/android/generated_peerconnection_jni/RtcCertificatePem_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+rtc::RTCCertificatePEM JavaToNativeRTCCertificatePEM(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtc_certificate) {
+ ScopedJavaLocalRef<jstring> privatekey_field =
+ Java_RtcCertificatePem_getPrivateKey(jni, j_rtc_certificate);
+ ScopedJavaLocalRef<jstring> certificate_field =
+ Java_RtcCertificatePem_getCertificate(jni, j_rtc_certificate);
+ return rtc::RTCCertificatePEM(JavaToNativeString(jni, privatekey_field),
+ JavaToNativeString(jni, certificate_field));
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRTCCertificatePEM(
+ JNIEnv* jni,
+ const rtc::RTCCertificatePEM& certificate) {
+ return Java_RtcCertificatePem_Constructor(
+ jni, NativeToJavaString(jni, certificate.private_key()),
+ NativeToJavaString(jni, certificate.certificate()));
+}
+
+static ScopedJavaLocalRef<jobject> JNI_RtcCertificatePem_GenerateCertificate(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_key_type,
+ jlong j_expires) {
+ rtc::KeyType key_type = JavaToNativeKeyType(jni, j_key_type);
+ uint64_t expires = (uint64_t)j_expires;
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(
+ rtc::KeyParams(key_type), expires);
+ rtc::RTCCertificatePEM pem = certificate->ToPEM();
+ return Java_RtcCertificatePem_Constructor(
+ jni, NativeToJavaString(jni, pem.private_key()),
+ NativeToJavaString(jni, pem.certificate()));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h
new file mode 100644
index 0000000000..91a413cd37
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_
+
+#include "rtc_base/ref_count.h"
+#include "rtc_base/rtc_certificate.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+rtc::RTCCertificatePEM JavaToNativeRTCCertificatePEM(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtc_certificate);
+
+ScopedJavaLocalRef<jobject> NativeToJavaRTCCertificatePEM(
+ JNIEnv* env,
+ const rtc::RTCCertificatePEM& certificate);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
new file mode 100644
index 0000000000..b8eae739f9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h"
+
+#include <string>
+#include <vector>
+
+#include "rtc_base/string_encode.h"
+#include "sdk/android/generated_external_classes_jni/BigInteger_jni.h"
+#include "sdk/android/generated_peerconnection_jni/RTCStatsCollectorCallback_jni.h"
+#include "sdk/android/generated_peerconnection_jni/RTCStatsReport_jni.h"
+#include "sdk/android/generated_peerconnection_jni/RTCStats_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+ScopedJavaLocalRef<jobject> NativeToJavaBigInteger(JNIEnv* env, uint64_t u) {
+ return JNI_BigInteger::Java_BigInteger_ConstructorJMBI_JLS(
+ env, NativeToJavaString(env, rtc::ToString(u)));
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaBigIntegerArray(
+ JNIEnv* env,
+ const std::vector<uint64_t>& container) {
+ return NativeToJavaObjectArray(
+ env, container, java_math_BigInteger_clazz(env), &NativeToJavaBigInteger);
+}
+
+ScopedJavaLocalRef<jobject> MemberToJava(
+ JNIEnv* env,
+ const RTCStatsMemberInterface& member) {
+ switch (member.type()) {
+ case RTCStatsMemberInterface::kBool:
+ return NativeToJavaBoolean(env, *member.cast_to<RTCStatsMember<bool>>());
+
+ case RTCStatsMemberInterface::kInt32:
+ return NativeToJavaInteger(env,
+ *member.cast_to<RTCStatsMember<int32_t>>());
+
+ case RTCStatsMemberInterface::kUint32:
+ return NativeToJavaLong(env, *member.cast_to<RTCStatsMember<uint32_t>>());
+
+ case RTCStatsMemberInterface::kInt64:
+ return NativeToJavaLong(env, *member.cast_to<RTCStatsMember<int64_t>>());
+
+ case RTCStatsMemberInterface::kUint64:
+ return NativeToJavaBigInteger(
+ env, *member.cast_to<RTCStatsMember<uint64_t>>());
+
+ case RTCStatsMemberInterface::kDouble:
+ return NativeToJavaDouble(env, *member.cast_to<RTCStatsMember<double>>());
+
+ case RTCStatsMemberInterface::kString:
+ return NativeToJavaString(env,
+ *member.cast_to<RTCStatsMember<std::string>>());
+
+ case RTCStatsMemberInterface::kSequenceBool:
+ return NativeToJavaBooleanArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<bool>>>());
+
+ case RTCStatsMemberInterface::kSequenceInt32:
+ return NativeToJavaIntegerArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<int32_t>>>());
+
+ case RTCStatsMemberInterface::kSequenceUint32: {
+ const std::vector<uint32_t>& v =
+ *member.cast_to<RTCStatsMember<std::vector<uint32_t>>>();
+ return NativeToJavaLongArray(env,
+ std::vector<int64_t>(v.begin(), v.end()));
+ }
+ case RTCStatsMemberInterface::kSequenceInt64:
+ return NativeToJavaLongArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<int64_t>>>());
+
+ case RTCStatsMemberInterface::kSequenceUint64:
+ return NativeToJavaBigIntegerArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<uint64_t>>>());
+
+ case RTCStatsMemberInterface::kSequenceDouble:
+ return NativeToJavaDoubleArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<double>>>());
+
+ case RTCStatsMemberInterface::kSequenceString:
+ return NativeToJavaStringArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<std::string>>>());
+
+ case RTCStatsMemberInterface::kMapStringUint64:
+ return NativeToJavaMap(
+ env,
+ *member.cast_to<RTCStatsMember<std::map<std::string, uint64_t>>>(),
+ [](JNIEnv* env, const auto& entry) {
+ return std::make_pair(NativeToJavaString(env, entry.first),
+ NativeToJavaBigInteger(env, entry.second));
+ });
+
+ case RTCStatsMemberInterface::kMapStringDouble:
+ return NativeToJavaMap(
+ env, *member.cast_to<RTCStatsMember<std::map<std::string, double>>>(),
+ [](JNIEnv* env, const auto& entry) {
+ return std::make_pair(NativeToJavaString(env, entry.first),
+ NativeToJavaDouble(env, entry.second));
+ });
+ }
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtcStats(JNIEnv* env,
+ const RTCStats& stats) {
+ JavaMapBuilder builder(env);
+ for (auto* const member : stats.Members()) {
+ if (!member->is_defined())
+ continue;
+ builder.put(NativeToJavaString(env, member->name()),
+ MemberToJava(env, *member));
+ }
+ return Java_RTCStats_create(
+ env, stats.timestamp_us(), NativeToJavaString(env, stats.type()),
+ NativeToJavaString(env, stats.id()), builder.GetJavaMap());
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtcStatsReport(
+ JNIEnv* env,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ ScopedJavaLocalRef<jobject> j_stats_map =
+ NativeToJavaMap(env, *report, [](JNIEnv* env, const RTCStats& stats) {
+ return std::make_pair(NativeToJavaString(env, stats.id()),
+ NativeToJavaRtcStats(env, stats));
+ });
+ return Java_RTCStatsReport_create(env, report->timestamp_us(), j_stats_map);
+}
+
+} // namespace
+
+RTCStatsCollectorCallbackWrapper::RTCStatsCollectorCallbackWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_callback)
+ : j_callback_global_(jni, j_callback) {}
+
+RTCStatsCollectorCallbackWrapper::~RTCStatsCollectorCallbackWrapper() = default;
+
+void RTCStatsCollectorCallbackWrapper::OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ Java_RTCStatsCollectorCallback_onStatsDelivered(
+ jni, j_callback_global_, NativeToJavaRtcStatsReport(jni, report));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h
new file mode 100644
index 0000000000..50fad1844d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_
+
+#include <jni.h>
+
+#include "api/peer_connection_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Adapter for a Java RTCStatsCollectorCallback presenting a C++
+// RTCStatsCollectorCallback and dispatching the callback from C++ back to
+// Java.
+class RTCStatsCollectorCallbackWrapper : public RTCStatsCollectorCallback {
+ public:
+ RTCStatsCollectorCallbackWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& j_callback);
+ ~RTCStatsCollectorCallbackWrapper() override;
+
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_callback_global_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc
new file mode 100644
index 0000000000..4bd9ee0e1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtp_parameters.h"
+
+#include "sdk/android/generated_peerconnection_jni/RtpParameters_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+webrtc::DegradationPreference JavaToNativeDegradationPreference(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_degradation_preference) {
+ std::string enum_name = GetJavaEnumName(jni, j_degradation_preference);
+
+ if (enum_name == "DISABLED")
+ return webrtc::DegradationPreference::DISABLED;
+
+ if (enum_name == "MAINTAIN_FRAMERATE")
+ return webrtc::DegradationPreference::MAINTAIN_FRAMERATE;
+
+ if (enum_name == "MAINTAIN_RESOLUTION")
+ return webrtc::DegradationPreference::MAINTAIN_RESOLUTION;
+
+ if (enum_name == "BALANCED")
+ return webrtc::DegradationPreference::BALANCED;
+
+ RTC_CHECK(false) << "Unexpected DegradationPreference enum_name "
+ << enum_name;
+ return webrtc::DegradationPreference::DISABLED;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpEncodingParameter(
+ JNIEnv* env,
+ const RtpEncodingParameters& encoding) {
+ return Java_Encoding_Constructor(
+ env, NativeToJavaString(env, encoding.rid), encoding.active,
+ encoding.bitrate_priority, static_cast<int>(encoding.network_priority),
+ NativeToJavaInteger(env, encoding.max_bitrate_bps),
+ NativeToJavaInteger(env, encoding.min_bitrate_bps),
+ NativeToJavaInteger(env, encoding.max_framerate),
+ NativeToJavaInteger(env, encoding.num_temporal_layers),
+ NativeToJavaDouble(env, encoding.scale_resolution_down_by),
+ encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr,
+ encoding.adaptive_ptime);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpCodecParameter(
+ JNIEnv* env,
+ const RtpCodecParameters& codec) {
+ return Java_Codec_Constructor(env, codec.payload_type,
+ NativeToJavaString(env, codec.name),
+ NativeToJavaMediaType(env, codec.kind),
+ NativeToJavaInteger(env, codec.clock_rate),
+ NativeToJavaInteger(env, codec.num_channels),
+ NativeToJavaStringMap(env, codec.parameters));
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpRtcpParameters(
+ JNIEnv* env,
+ const RtcpParameters& rtcp) {
+ return Java_Rtcp_Constructor(env, NativeToJavaString(env, rtcp.cname),
+ rtcp.reduced_size);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpHeaderExtensionParameter(
+ JNIEnv* env,
+ const RtpExtension& extension) {
+ return Java_HeaderExtension_Constructor(
+ env, NativeToJavaString(env, extension.uri), extension.id,
+ extension.encrypt);
+}
+
+} // namespace
+
+RtpEncodingParameters JavaToNativeRtpEncodingParameters(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoding_parameters) {
+ RtpEncodingParameters encoding;
+ ScopedJavaLocalRef<jstring> j_rid =
+ Java_Encoding_getRid(jni, j_encoding_parameters);
+ if (!IsNull(jni, j_rid)) {
+ encoding.rid = JavaToNativeString(jni, j_rid);
+ }
+ encoding.active = Java_Encoding_getActive(jni, j_encoding_parameters);
+ ScopedJavaLocalRef<jobject> j_max_bitrate =
+ Java_Encoding_getMaxBitrateBps(jni, j_encoding_parameters);
+ encoding.bitrate_priority =
+ Java_Encoding_getBitratePriority(jni, j_encoding_parameters);
+ encoding.network_priority = static_cast<webrtc::Priority>(
+ Java_Encoding_getNetworkPriority(jni, j_encoding_parameters));
+ encoding.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max_bitrate);
+ ScopedJavaLocalRef<jobject> j_min_bitrate =
+ Java_Encoding_getMinBitrateBps(jni, j_encoding_parameters);
+ encoding.min_bitrate_bps = JavaToNativeOptionalInt(jni, j_min_bitrate);
+ ScopedJavaLocalRef<jobject> j_max_framerate =
+ Java_Encoding_getMaxFramerate(jni, j_encoding_parameters);
+ encoding.max_framerate = JavaToNativeOptionalInt(jni, j_max_framerate);
+ ScopedJavaLocalRef<jobject> j_num_temporal_layers =
+ Java_Encoding_getNumTemporalLayers(jni, j_encoding_parameters);
+ encoding.num_temporal_layers =
+ JavaToNativeOptionalInt(jni, j_num_temporal_layers);
+ ScopedJavaLocalRef<jobject> j_scale_resolution_down_by =
+ Java_Encoding_getScaleResolutionDownBy(jni, j_encoding_parameters);
+ encoding.scale_resolution_down_by =
+ JavaToNativeOptionalDouble(jni, j_scale_resolution_down_by);
+ encoding.adaptive_ptime =
+ Java_Encoding_getAdaptivePTime(jni, j_encoding_parameters);
+ ScopedJavaLocalRef<jobject> j_ssrc =
+ Java_Encoding_getSsrc(jni, j_encoding_parameters);
+ if (!IsNull(jni, j_ssrc))
+ encoding.ssrc = JavaToNativeLong(jni, j_ssrc);
+ return encoding;
+}
+
+RtpParameters JavaToNativeRtpParameters(JNIEnv* jni,
+ const JavaRef<jobject>& j_parameters) {
+ RtpParameters parameters;
+
+ ScopedJavaLocalRef<jstring> j_transaction_id =
+ Java_RtpParameters_getTransactionId(jni, j_parameters);
+ parameters.transaction_id = JavaToNativeString(jni, j_transaction_id);
+
+ ScopedJavaLocalRef<jobject> j_degradation_preference =
+ Java_RtpParameters_getDegradationPreference(jni, j_parameters);
+ if (!IsNull(jni, j_degradation_preference)) {
+ parameters.degradation_preference =
+ JavaToNativeDegradationPreference(jni, j_degradation_preference);
+ }
+
+ ScopedJavaLocalRef<jobject> j_rtcp =
+ Java_RtpParameters_getRtcp(jni, j_parameters);
+ ScopedJavaLocalRef<jstring> j_rtcp_cname = Java_Rtcp_getCname(jni, j_rtcp);
+ jboolean j_rtcp_reduced_size = Java_Rtcp_getReducedSize(jni, j_rtcp);
+ parameters.rtcp.cname = JavaToNativeString(jni, j_rtcp_cname);
+ parameters.rtcp.reduced_size = j_rtcp_reduced_size;
+
+ ScopedJavaLocalRef<jobject> j_header_extensions =
+ Java_RtpParameters_getHeaderExtensions(jni, j_parameters);
+ for (const JavaRef<jobject>& j_header_extension :
+ Iterable(jni, j_header_extensions)) {
+ RtpExtension header_extension;
+ header_extension.uri = JavaToStdString(
+ jni, Java_HeaderExtension_getUri(jni, j_header_extension));
+ header_extension.id = Java_HeaderExtension_getId(jni, j_header_extension);
+ header_extension.encrypt =
+ Java_HeaderExtension_getEncrypted(jni, j_header_extension);
+ parameters.header_extensions.push_back(header_extension);
+ }
+
+ // Convert encodings.
+ ScopedJavaLocalRef<jobject> j_encodings =
+ Java_RtpParameters_getEncodings(jni, j_parameters);
+ for (const JavaRef<jobject>& j_encoding_parameters :
+ Iterable(jni, j_encodings)) {
+ RtpEncodingParameters encoding =
+ JavaToNativeRtpEncodingParameters(jni, j_encoding_parameters);
+ parameters.encodings.push_back(encoding);
+ }
+
+ // Convert codecs.
+ ScopedJavaLocalRef<jobject> j_codecs =
+ Java_RtpParameters_getCodecs(jni, j_parameters);
+ for (const JavaRef<jobject>& j_codec : Iterable(jni, j_codecs)) {
+ RtpCodecParameters codec;
+ codec.payload_type = Java_Codec_getPayloadType(jni, j_codec);
+ codec.name = JavaToStdString(jni, Java_Codec_getName(jni, j_codec));
+ codec.kind = JavaToNativeMediaType(jni, Java_Codec_getKind(jni, j_codec));
+ codec.clock_rate =
+ JavaToNativeOptionalInt(jni, Java_Codec_getClockRate(jni, j_codec));
+ codec.num_channels =
+ JavaToNativeOptionalInt(jni, Java_Codec_getNumChannels(jni, j_codec));
+ auto parameters_map =
+ JavaToNativeStringMap(jni, Java_Codec_getParameters(jni, j_codec));
+ codec.parameters.insert(parameters_map.begin(), parameters_map.end());
+ parameters.codecs.push_back(codec);
+ }
+ return parameters;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpParameters(
+ JNIEnv* env,
+ const RtpParameters& parameters) {
+ return Java_RtpParameters_Constructor(
+ env, NativeToJavaString(env, parameters.transaction_id),
+ parameters.degradation_preference.has_value()
+ ? Java_DegradationPreference_fromNativeIndex(
+ env, static_cast<int>(*parameters.degradation_preference))
+ : nullptr,
+ NativeToJavaRtpRtcpParameters(env, parameters.rtcp),
+ NativeToJavaList(env, parameters.header_extensions,
+ &NativeToJavaRtpHeaderExtensionParameter),
+ NativeToJavaList(env, parameters.encodings,
+ &NativeToJavaRtpEncodingParameter),
+ NativeToJavaList(env, parameters.codecs, &NativeToJavaRtpCodecParameter));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h
new file mode 100644
index 0000000000..3bcd343fae
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_
+
+#include <jni.h>
+
+#include "api/rtp_parameters.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+RtpEncodingParameters JavaToNativeRtpEncodingParameters(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_encoding_parameters);
+
+RtpParameters JavaToNativeRtpParameters(JNIEnv* jni,
+ const JavaRef<jobject>& j_parameters);
+ScopedJavaLocalRef<jobject> NativeToJavaRtpParameters(
+ JNIEnv* jni,
+ const RtpParameters& parameters);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc
new file mode 100644
index 0000000000..7a3600b424
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtp_receiver.h"
+
+#include "sdk/android/generated_peerconnection_jni/RtpReceiver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+#include "sdk/android/src/jni/pc/rtp_parameters.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+// Adapter between the C++ RtpReceiverObserverInterface and the Java
+// RtpReceiver.Observer interface. Wraps an instance of the Java interface and
+// dispatches C++ callbacks to Java.
+class RtpReceiverObserverJni : public RtpReceiverObserverInterface {
+ public:
+ RtpReceiverObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer)
+ : j_observer_global_(env, j_observer) {}
+
+ ~RtpReceiverObserverJni() override = default;
+
+ void OnFirstPacketReceived(cricket::MediaType media_type) override {
+ JNIEnv* const env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onFirstPacketReceived(env, j_observer_global_,
+ NativeToJavaMediaType(env, media_type));
+ }
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+} // namespace
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpReceiver(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ // Receiver is now owned by Java object, and will be freed from there.
+ return Java_RtpReceiver_Constructor(env,
+ jlongFromPointer(receiver.release()));
+}
+
+JavaRtpReceiverGlobalOwner::JavaRtpReceiverGlobalOwner(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_receiver)
+ : j_receiver_(env, j_receiver) {}
+
+JavaRtpReceiverGlobalOwner::JavaRtpReceiverGlobalOwner(
+ JavaRtpReceiverGlobalOwner&& other) = default;
+
+JavaRtpReceiverGlobalOwner::~JavaRtpReceiverGlobalOwner() {
+ if (j_receiver_.obj())
+ Java_RtpReceiver_dispose(AttachCurrentThreadIfNeeded(), j_receiver_);
+}
+
+static jlong JNI_RtpReceiver_GetTrack(JNIEnv* jni,
+ jlong j_rtp_receiver_pointer) {
+ // MediaStreamTrack will have shared ownership by the MediaStreamTrack Java
+ // object.
+ return jlongFromPointer(
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->track()
+ .release());
+}
+
+static ScopedJavaLocalRef<jobject> JNI_RtpReceiver_GetParameters(
+ JNIEnv* jni,
+ jlong j_rtp_receiver_pointer) {
+ RtpParameters parameters =
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->GetParameters();
+ return NativeToJavaRtpParameters(jni, parameters);
+}
+
+static ScopedJavaLocalRef<jstring> JNI_RtpReceiver_GetId(
+ JNIEnv* jni,
+ jlong j_rtp_receiver_pointer) {
+ return NativeToJavaString(
+ jni,
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id());
+}
+
+static jlong JNI_RtpReceiver_SetObserver(
+ JNIEnv* jni,
+ jlong j_rtp_receiver_pointer,
+ const JavaParamRef<jobject>& j_observer) {
+ RtpReceiverObserverJni* rtpReceiverObserver =
+ new RtpReceiverObserverJni(jni, j_observer);
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->SetObserver(rtpReceiverObserver);
+ return jlongFromPointer(rtpReceiverObserver);
+}
+
+static void JNI_RtpReceiver_UnsetObserver(JNIEnv* jni,
+ jlong j_rtp_receiver_pointer,
+ jlong j_observer_pointer) {
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->SetObserver(nullptr);
+ RtpReceiverObserverJni* observer =
+ reinterpret_cast<RtpReceiverObserverJni*>(j_observer_pointer);
+ if (observer) {
+ delete observer;
+ }
+}
+
+static void JNI_RtpReceiver_SetFrameDecryptor(JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ jlong j_frame_decryptor_pointer) {
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_sender_pointer)
+ ->SetFrameDecryptor(rtc::scoped_refptr<FrameDecryptorInterface>(
+ reinterpret_cast<FrameDecryptorInterface*>(
+ j_frame_decryptor_pointer)));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h
new file mode 100644
index 0000000000..ccef44b040
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_
+
+#include <jni.h>
+
+#include "api/rtp_receiver_interface.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpReceiver(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpReceiverInterface> receiver);
+
+// Takes ownership of the passed `j_receiver` and stores it as a global
+// reference. Will call dispose() in the dtor.
+class JavaRtpReceiverGlobalOwner {
+ public:
+ JavaRtpReceiverGlobalOwner(JNIEnv* env, const JavaRef<jobject>& j_receiver);
+ JavaRtpReceiverGlobalOwner(JavaRtpReceiverGlobalOwner&& other);
+ ~JavaRtpReceiverGlobalOwner();
+
+ private:
+ ScopedJavaGlobalRef<jobject> j_receiver_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc
new file mode 100644
index 0000000000..233a353654
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtp_sender.h"
+
+#include "sdk/android/generated_peerconnection_jni/RtpSender_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/rtp_parameters.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpSender(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpSenderInterface> sender) {
+ if (!sender)
+ return nullptr;
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+ return Java_RtpSender_Constructor(env, jlongFromPointer(sender.release()));
+}
+
+static jboolean JNI_RtpSender_SetTrack(JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ jlong j_track_pointer) {
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
+}
+
+jlong JNI_RtpSender_GetTrack(JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ // MediaStreamTrack will have shared ownership by the MediaStreamTrack Java
+ // object.
+ return jlongFromPointer(
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->track()
+ .release());
+}
+
+static void JNI_RtpSender_SetStreams(
+ JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ const JavaParamRef<jobject>& j_stream_labels) {
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetStreams(JavaListToNativeVector<std::string, jstring>(
+ jni, j_stream_labels, &JavaToNativeString));
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpSender_GetStreams(
+ JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ ScopedJavaLocalRef<jstring> (*convert_function)(JNIEnv*, const std::string&) =
+ &NativeToJavaString;
+ return NativeToJavaList(
+ jni,
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->stream_ids(),
+ convert_function);
+}
+
+jlong JNI_RtpSender_GetDtmfSender(JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ return jlongFromPointer(
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->GetDtmfSender()
+ .release());
+}
+
+jboolean JNI_RtpSender_SetParameters(
+ JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ const JavaParamRef<jobject>& j_parameters) {
+ if (IsNull(jni, j_parameters)) {
+ return false;
+ }
+ RtpParameters parameters = JavaToNativeRtpParameters(jni, j_parameters);
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetParameters(parameters)
+ .ok();
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpSender_GetParameters(
+ JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ RtpParameters parameters =
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->GetParameters();
+ return NativeToJavaRtpParameters(jni, parameters);
+}
+
+ScopedJavaLocalRef<jstring> JNI_RtpSender_GetId(JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ return NativeToJavaString(
+ jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id());
+}
+
+static void JNI_RtpSender_SetFrameEncryptor(JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ jlong j_frame_encryptor_pointer) {
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetFrameEncryptor(rtc::scoped_refptr<FrameEncryptorInterface>(
+ reinterpret_cast<FrameEncryptorInterface*>(
+ j_frame_encryptor_pointer)));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h
new file mode 100644
index 0000000000..d782ca915f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_
+
+#include <jni.h>
+
+#include "api/rtp_sender_interface.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpSender(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpSenderInterface> sender);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc
new file mode 100644
index 0000000000..1d468461f1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtp_transceiver.h"
+
+#include <string>
+
+#include "sdk/android/generated_peerconnection_jni/RtpTransceiver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+#include "sdk/android/src/jni/pc/rtp_parameters.h"
+#include "sdk/android/src/jni/pc/rtp_receiver.h"
+#include "sdk/android/src/jni/pc/rtp_sender.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiverDirection(
+ JNIEnv* jni,
+ RtpTransceiverDirection rtp_transceiver_direction) {
+ return Java_RtpTransceiverDirection_fromNativeIndex(
+ jni, static_cast<int>(rtp_transceiver_direction));
+}
+
+} // namespace
+
+RtpTransceiverInit JavaToNativeRtpTransceiverInit(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_init) {
+ RtpTransceiverInit init;
+
+ // Convert the direction.
+ init.direction = static_cast<RtpTransceiverDirection>(
+ Java_RtpTransceiverInit_getDirectionNativeIndex(jni, j_init));
+
+ // Convert the stream ids.
+ ScopedJavaLocalRef<jobject> j_stream_ids =
+ Java_RtpTransceiverInit_getStreamIds(jni, j_init);
+ init.stream_ids = JavaListToNativeVector<std::string, jstring>(
+ jni, j_stream_ids, &JavaToNativeString);
+
+ // Convert the send encodings.
+ ScopedJavaLocalRef<jobject> j_send_encodings =
+ Java_RtpTransceiverInit_getSendEncodings(jni, j_init);
+ init.send_encodings = JavaListToNativeVector<RtpEncodingParameters, jobject>(
+ jni, j_send_encodings, &JavaToNativeRtpEncodingParameters);
+ return init;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiver(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ if (!transceiver) {
+ return nullptr;
+ }
+ // Transceiver will now have shared ownership by the Java object.
+ return Java_RtpTransceiver_Constructor(
+ env, jlongFromPointer(transceiver.release()));
+}
+
+JavaRtpTransceiverGlobalOwner::JavaRtpTransceiverGlobalOwner(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_transceiver)
+ : j_transceiver_(env, j_transceiver) {}
+
+JavaRtpTransceiverGlobalOwner::JavaRtpTransceiverGlobalOwner(
+ JavaRtpTransceiverGlobalOwner&& other) = default;
+
+JavaRtpTransceiverGlobalOwner::~JavaRtpTransceiverGlobalOwner() {
+ if (j_transceiver_.obj()) {
+ Java_RtpTransceiver_dispose(AttachCurrentThreadIfNeeded(), j_transceiver_);
+ }
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetMediaType(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return NativeToJavaMediaType(
+ jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->media_type());
+}
+
+ScopedJavaLocalRef<jstring> JNI_RtpTransceiver_GetMid(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ absl::optional<std::string> mid =
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->mid();
+ return NativeToJavaString(jni, mid);
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetSender(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return NativeToJavaRtpSender(
+ jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->sender());
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetReceiver(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return NativeToJavaRtpReceiver(
+ jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->receiver());
+}
+
+jboolean JNI_RtpTransceiver_Stopped(JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->stopped();
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_Direction(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return NativeToJavaRtpTransceiverDirection(
+ jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->direction());
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_CurrentDirection(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ absl::optional<RtpTransceiverDirection> direction =
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->current_direction();
+ return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction)
+ : nullptr;
+}
+
+void JNI_RtpTransceiver_StopInternal(JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->StopInternal();
+}
+
+void JNI_RtpTransceiver_StopStandard(JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->StopStandard();
+}
+
+jboolean JNI_RtpTransceiver_SetDirection(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer,
+ const base::android::JavaParamRef<jobject>& j_rtp_transceiver_direction) {
+ if (IsNull(jni, j_rtp_transceiver_direction)) {
+ return false;
+ }
+ RtpTransceiverDirection direction = static_cast<RtpTransceiverDirection>(
+ Java_RtpTransceiverDirection_getNativeIndex(jni,
+ j_rtp_transceiver_direction));
+ webrtc::RTCError error =
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->SetDirectionWithError(direction);
+ if (!error.ok()) {
+ RTC_LOG(LS_WARNING) << "SetDirection failed, code "
+ << ToString(error.type()) << ", message "
+ << error.message();
+ }
+ return error.ok();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h
new file mode 100644
index 0000000000..5b2d0121ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_
+
+#include <jni.h>
+
+#include "api/rtp_transceiver_interface.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+RtpTransceiverInit JavaToNativeRtpTransceiverInit(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_init);
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiver(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver);
+
+// This takes ownership of the of the `j_transceiver` and stores it as a global
+// reference. This calls the Java Transceiver's dispose() method with the dtor.
+class JavaRtpTransceiverGlobalOwner {
+ public:
+ JavaRtpTransceiverGlobalOwner(JNIEnv* env,
+ const JavaRef<jobject>& j_transceiver);
+ JavaRtpTransceiverGlobalOwner(JavaRtpTransceiverGlobalOwner&& other);
+ ~JavaRtpTransceiverGlobalOwner();
+
+ private:
+ ScopedJavaGlobalRef<jobject> j_transceiver_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc
new file mode 100644
index 0000000000..c8b4345af4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/sdp_observer.h"
+
+#include <utility>
+
+#include "sdk/android/generated_peerconnection_jni/SdpObserver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/media_constraints.h"
+
+namespace webrtc {
+namespace jni {
+
+CreateSdpObserverJni::CreateSdpObserverJni(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_observer,
+ std::unique_ptr<MediaConstraints> constraints)
+ : j_observer_global_(env, j_observer),
+ constraints_(std::move(constraints)) {}
+
+CreateSdpObserverJni::~CreateSdpObserverJni() = default;
+
+void CreateSdpObserverJni::OnSuccess(SessionDescriptionInterface* desc) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ std::string sdp;
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ Java_SdpObserver_onCreateSuccess(
+ env, j_observer_global_,
+ NativeToJavaSessionDescription(env, sdp, desc->type()));
+ // OnSuccess transfers ownership of the description (there's a TODO to make
+ // it use unique_ptr...).
+ delete desc;
+}
+
+void CreateSdpObserverJni::OnFailure(webrtc::RTCError error) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_SdpObserver_onCreateFailure(env, j_observer_global_,
+ NativeToJavaString(env, error.message()));
+}
+
+SetLocalSdpObserverJni::SetLocalSdpObserverJni(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(env, j_observer) {}
+
+void SetLocalSdpObserverJni::OnSetLocalDescriptionComplete(RTCError error) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ if (error.ok()) {
+ Java_SdpObserver_onSetSuccess(env, j_observer_global_);
+ } else {
+ Java_SdpObserver_onSetFailure(env, j_observer_global_,
+ NativeToJavaString(env, error.message()));
+ }
+}
+
+SetRemoteSdpObserverJni::SetRemoteSdpObserverJni(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(env, j_observer) {}
+
+void SetRemoteSdpObserverJni::OnSetRemoteDescriptionComplete(RTCError error) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ if (error.ok()) {
+ Java_SdpObserver_onSetSuccess(env, j_observer_global_);
+ } else {
+ Java_SdpObserver_onSetFailure(env, j_observer_global_,
+ NativeToJavaString(env, error.message()));
+ }
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h
new file mode 100644
index 0000000000..b33a3018c8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_
+
+#include <memory>
+#include <string>
+
+#include "api/peer_connection_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/session_description.h"
+#include "sdk/media_constraints.h"
+
+namespace webrtc {
+namespace jni {
+
+class CreateSdpObserverJni : public CreateSessionDescriptionObserver {
+ public:
+ CreateSdpObserverJni(JNIEnv* env,
+ const JavaRef<jobject>& j_observer,
+ std::unique_ptr<MediaConstraints> constraints);
+ ~CreateSdpObserverJni() override;
+
+ MediaConstraints* constraints() { return constraints_.get(); }
+
+ void OnSuccess(SessionDescriptionInterface* desc) override;
+ void OnFailure(RTCError error) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+ std::unique_ptr<MediaConstraints> constraints_;
+};
+
+class SetLocalSdpObserverJni : public SetLocalDescriptionObserverInterface {
+ public:
+ SetLocalSdpObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer);
+
+ ~SetLocalSdpObserverJni() override = default;
+
+ virtual void OnSetLocalDescriptionComplete(RTCError error) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+class SetRemoteSdpObserverJni : public SetRemoteDescriptionObserverInterface {
+ public:
+ SetRemoteSdpObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer);
+
+ ~SetRemoteSdpObserverJni() override = default;
+
+ virtual void OnSetRemoteDescriptionComplete(RTCError error) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc
new file mode 100644
index 0000000000..bbac721e51
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/session_description.h"
+
+#include <string>
+
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_peerconnection_jni/SessionDescription_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+std::unique_ptr<SessionDescriptionInterface> JavaToNativeSessionDescription(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_sdp) {
+ std::string std_type = JavaToStdString(
+ jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp));
+ std::string std_description =
+ JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp));
+ absl::optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
+ if (!sdp_type_maybe) {
+ RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type;
+ return nullptr;
+ }
+ return CreateSessionDescription(*sdp_type_maybe, std_description);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaSessionDescription(
+ JNIEnv* jni,
+ const std::string& sdp,
+ const std::string& type) {
+ return Java_SessionDescription_Constructor(
+ jni, Java_Type_fromCanonicalForm(jni, NativeToJavaString(jni, type)),
+ NativeToJavaString(jni, sdp));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h
new file mode 100644
index 0000000000..f0f49cb2ee
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_
+#define SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_
+
+#include <jni.h>
+#include <memory>
+#include <string>
+
+#include "api/jsep.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+std::unique_ptr<SessionDescriptionInterface> JavaToNativeSessionDescription(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_sdp);
+
+ScopedJavaLocalRef<jobject> NativeToJavaSessionDescription(
+ JNIEnv* jni,
+ const std::string& sdp,
+ const std::string& type);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc
new file mode 100644
index 0000000000..74ef3b8049
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h"
+#include "sdk/android/generated_peerconnection_jni/SSLCertificateVerifier_jni.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/native_api/jni/java_types.h"
+
+namespace webrtc {
+namespace jni {
+
+SSLCertificateVerifierWrapper::SSLCertificateVerifierWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& ssl_certificate_verifier)
+ : ssl_certificate_verifier_(jni, ssl_certificate_verifier) {}
+
+SSLCertificateVerifierWrapper::~SSLCertificateVerifierWrapper() = default;
+
+bool SSLCertificateVerifierWrapper::Verify(
+ const rtc::SSLCertificate& certificate) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ // Serialize the der encoding of the cert into a jbyteArray
+ rtc::Buffer cert_der_buffer;
+ certificate.ToDER(&cert_der_buffer);
+ ScopedJavaLocalRef<jbyteArray> jni_buffer(
+ jni, jni->NewByteArray(cert_der_buffer.size()));
+ jni->SetByteArrayRegion(
+ jni_buffer.obj(), 0, cert_der_buffer.size(),
+ reinterpret_cast<const jbyte*>(cert_der_buffer.data()));
+
+ return Java_SSLCertificateVerifier_verify(jni, ssl_certificate_verifier_,
+ jni_buffer);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h
new file mode 100644
index 0000000000..8c883f445b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_
+
+#include <jni.h>
+#include <vector>
+
+#include "rtc_base/ssl_certificate.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wrapper for Java SSLCertifiacteVerifier class. Delegates method calls through
+// JNI and wraps the encoder inside SSLCertificateVerifierWrapper.
+class SSLCertificateVerifierWrapper : public rtc::SSLCertificateVerifier {
+ public:
+ SSLCertificateVerifierWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& ssl_certificate_verifier);
+ ~SSLCertificateVerifierWrapper() override;
+
+ bool Verify(const rtc::SSLCertificate& certificate) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> ssl_certificate_verifier_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc
new file mode 100644
index 0000000000..6d4a31df1c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/stats_observer.h"
+
+#include <vector>
+
+#include "sdk/android/generated_peerconnection_jni/StatsObserver_jni.h"
+#include "sdk/android/generated_peerconnection_jni/StatsReport_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+ScopedJavaLocalRef<jobject> NativeToJavaStatsReportValue(
+ JNIEnv* env,
+ const rtc::scoped_refptr<StatsReport::Value>& value_ptr) {
+ // Should we use the '.name' enum value here instead of converting the
+ // name to a string?
+ return Java_Value_Constructor(
+ env, NativeToJavaString(env, value_ptr->display_name()),
+ NativeToJavaString(env, value_ptr->ToString()));
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaStatsReportValueArray(
+ JNIEnv* env,
+ const StatsReport::Values& value_map) {
+ // Ignore the keys and make an array out of the values.
+ std::vector<StatsReport::ValuePtr> values;
+ for (const auto& it : value_map)
+ values.push_back(it.second);
+ return NativeToJavaObjectArray(env, values,
+ org_webrtc_StatsReport_00024Value_clazz(env),
+ &NativeToJavaStatsReportValue);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaStatsReport(JNIEnv* env,
+ const StatsReport& report) {
+ return Java_StatsReport_Constructor(
+ env, NativeToJavaString(env, report.id()->ToString()),
+ NativeToJavaString(env, report.TypeToString()), report.timestamp(),
+ NativeToJavaStatsReportValueArray(env, report.values()));
+}
+
+} // namespace
+
+StatsObserverJni::StatsObserverJni(JNIEnv* jni,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(jni, j_observer) {}
+
+StatsObserverJni::~StatsObserverJni() = default;
+
+void StatsObserverJni::OnComplete(const StatsReports& reports) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobjectArray> j_reports =
+ NativeToJavaObjectArray(env, reports, org_webrtc_StatsReport_clazz(env),
+ [](JNIEnv* env, const StatsReport* report) {
+ return NativeToJavaStatsReport(env, *report);
+ });
+ Java_StatsObserver_onComplete(env, j_observer_global_, j_reports);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h
new file mode 100644
index 0000000000..0cfd43384b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_
+
+#include "api/peer_connection_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Adapter for a Java StatsObserver presenting a C++ StatsObserver and
+// dispatching the callback from C++ back to Java.
+class StatsObserverJni : public StatsObserver {
+ public:
+ StatsObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer);
+ ~StatsObserverJni() override;
+
+ void OnComplete(const StatsReports& reports) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc
new file mode 100644
index 0000000000..5c93fcd7c0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/turn_customizer.h"
+#include "sdk/android/generated_peerconnection_jni/TurnCustomizer_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+TurnCustomizer* GetNativeTurnCustomizer(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_turn_customizer) {
+ if (IsNull(env, j_turn_customizer))
+ return nullptr;
+ return reinterpret_cast<webrtc::TurnCustomizer*>(
+ Java_TurnCustomizer_getNativeTurnCustomizer(env, j_turn_customizer));
+}
+
+static void JNI_TurnCustomizer_FreeTurnCustomizer(
+ JNIEnv* jni,
+ jlong j_turn_customizer_pointer) {
+ delete reinterpret_cast<TurnCustomizer*>(j_turn_customizer_pointer);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h
new file mode 100644
index 0000000000..359234fc76
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_
+#define SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_
+
+#include "api/turn_customizer.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+TurnCustomizer* GetNativeTurnCustomizer(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_turn_customizer);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc
new file mode 100644
index 0000000000..b955dbb1ef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/video.h"
+
+#include <jni.h>
+
+#include <memory>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/android_video_track_source.h"
+#include "sdk/android/src/jni/video_decoder_factory_wrapper.h"
+#include "sdk/android/src/jni/video_encoder_factory_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+
+VideoEncoderFactory* CreateVideoEncoderFactory(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder_factory) {
+ return IsNull(jni, j_encoder_factory)
+ ? nullptr
+ : new VideoEncoderFactoryWrapper(jni, j_encoder_factory);
+}
+
+VideoDecoderFactory* CreateVideoDecoderFactory(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_decoder_factory) {
+ return IsNull(jni, j_decoder_factory)
+ ? nullptr
+ : new VideoDecoderFactoryWrapper(jni, j_decoder_factory);
+}
+
+void* CreateVideoSource(JNIEnv* env,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ jboolean is_screencast,
+ jboolean align_timestamps) {
+ auto source = rtc::make_ref_counted<AndroidVideoTrackSource>(
+ signaling_thread, env, is_screencast, align_timestamps);
+ return source.release();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/video.h b/third_party/libwebrtc/sdk/android/src/jni/pc/video.h
new file mode 100644
index 0000000000..32bc6406a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/video.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_VIDEO_H_
+#define SDK_ANDROID_SRC_JNI_PC_VIDEO_H_
+
+#include <jni.h>
+
+#include "api/scoped_refptr.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+class VideoEncoderFactory;
+class VideoDecoderFactory;
+} // namespace webrtc
+
+namespace webrtc {
+namespace jni {
+
+VideoEncoderFactory* CreateVideoEncoderFactory(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder_factory);
+
+VideoDecoderFactory* CreateVideoDecoderFactory(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_decoder_factory);
+
+void* CreateVideoSource(JNIEnv* env,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ jboolean is_screencast,
+ jboolean align_timestamps);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_VIDEO_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc
new file mode 100644
index 0000000000..1df8c7ade5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/scoped_java_ref_counted.h"
+
+#include "sdk/android/generated_base_jni/RefCounted_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+// static
+ScopedJavaRefCounted ScopedJavaRefCounted::Retain(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_object) {
+ Java_RefCounted_retain(jni, j_object);
+ CHECK_EXCEPTION(jni)
+ << "Unexpected java exception from java JavaRefCounted.retain()";
+ return Adopt(jni, j_object);
+}
+
+ScopedJavaRefCounted::~ScopedJavaRefCounted() {
+ if (!j_object_.is_null()) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ Java_RefCounted_release(jni, j_object_);
+ CHECK_EXCEPTION(jni)
+ << "Unexpected java exception from java RefCounted.release()";
+ }
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h
new file mode 100644
index 0000000000..3ea226259e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_
+#define SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+// Holds a reference to a java object implementing the RefCounted interface, and
+// calls its release() method from the destructor.
+class ScopedJavaRefCounted {
+ public:
+ // Takes over the caller's reference.
+ static ScopedJavaRefCounted Adopt(JNIEnv* jni,
+ const JavaRef<jobject>& j_object) {
+ return ScopedJavaRefCounted(jni, j_object);
+ }
+
+ // Retains the java object for the live time of this object.
+ static ScopedJavaRefCounted Retain(JNIEnv* jni,
+ const JavaRef<jobject>& j_object);
+ ScopedJavaRefCounted(ScopedJavaRefCounted&& other) = default;
+
+ ScopedJavaRefCounted(const ScopedJavaRefCounted& other) = delete;
+ ScopedJavaRefCounted& operator=(const ScopedJavaRefCounted&) = delete;
+
+ ~ScopedJavaRefCounted();
+
+ private:
+ // Adopts reference.
+ ScopedJavaRefCounted(JNIEnv* jni, const JavaRef<jobject>& j_object)
+ : j_object_(jni, j_object) {}
+
+ ScopedJavaGlobalRef<jobject> j_object_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc b/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc
new file mode 100644
index 0000000000..c0c5fd9d9f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "rtc_base/time_utils.h"
+#include "rtc_base/timestamp_aligner.h"
+#include "sdk/android/generated_video_jni/TimestampAligner_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_TimestampAligner_RtcTimeNanos(JNIEnv* env) {
+ return rtc::TimeNanos();
+}
+
+static jlong JNI_TimestampAligner_CreateTimestampAligner(JNIEnv* env) {
+ return jlongFromPointer(new rtc::TimestampAligner());
+}
+
+static void JNI_TimestampAligner_ReleaseTimestampAligner(
+ JNIEnv* env,
+ jlong timestamp_aligner) {
+ delete reinterpret_cast<rtc::TimestampAligner*>(timestamp_aligner);
+}
+
+static jlong JNI_TimestampAligner_TranslateTimestamp(
+ JNIEnv* env,
+ jlong timestamp_aligner,
+ jlong camera_time_ns) {
+ return reinterpret_cast<rtc::TimestampAligner*>(timestamp_aligner)
+ ->TranslateTimestamp(camera_time_ns / rtc::kNumNanosecsPerMicrosec,
+ rtc::TimeMicros()) *
+ rtc::kNumNanosecsPerMicrosec;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc
new file mode 100644
index 0000000000..a218a1d23f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_codec_info.h"
+
+#include "sdk/android/generated_video_jni/VideoCodecInfo_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni,
+ const JavaRef<jobject>& j_info) {
+ return SdpVideoFormat(
+ JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)),
+ JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info)));
+}
+
+ScopedJavaLocalRef<jobject> SdpVideoFormatToVideoCodecInfo(
+ JNIEnv* jni,
+ const SdpVideoFormat& format) {
+ ScopedJavaLocalRef<jobject> j_params =
+ NativeToJavaStringMap(jni, format.parameters);
+ return Java_VideoCodecInfo_Constructor(
+ jni, NativeToJavaString(jni, format.name), j_params);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h
new file mode 100644
index 0000000000..07b073086a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_
+
+#include <jni.h>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni,
+ const JavaRef<jobject>& info);
+ScopedJavaLocalRef<jobject> SdpVideoFormatToVideoCodecInfo(
+ JNIEnv* jni,
+ const SdpVideoFormat& format);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc
new file mode 100644
index 0000000000..e34d6d69e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_codec_status.h"
+
+#include "sdk/android/generated_video_jni/VideoCodecStatus_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+int32_t JavaToNativeVideoCodecStatus(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_video_codec_status) {
+ return Java_VideoCodecStatus_getNumber(env, j_video_codec_status);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h
new file mode 100644
index 0000000000..607bd46340
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_
+
+#include <jni.h>
+#include <stdint.h>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+int32_t JavaToNativeVideoCodecStatus(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_video_codec_status);
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc
new file mode 100644
index 0000000000..2d9240493a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_decoder_factory_wrapper.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_video_jni/VideoDecoderFactory_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/video_codec_info.h"
+#include "sdk/android/src/jni/video_decoder_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+
+VideoDecoderFactoryWrapper::VideoDecoderFactoryWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& decoder_factory)
+ : decoder_factory_(jni, decoder_factory) {}
+VideoDecoderFactoryWrapper::~VideoDecoderFactoryWrapper() = default;
+
+std::unique_ptr<VideoDecoder> VideoDecoderFactoryWrapper::CreateVideoDecoder(
+ const SdpVideoFormat& format) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_codec_info =
+ SdpVideoFormatToVideoCodecInfo(jni, format);
+ ScopedJavaLocalRef<jobject> decoder = Java_VideoDecoderFactory_createDecoder(
+ jni, decoder_factory_, j_codec_info);
+ if (!decoder.obj())
+ return nullptr;
+ return JavaToNativeVideoDecoder(jni, decoder);
+}
+
+std::vector<SdpVideoFormat> VideoDecoderFactoryWrapper::GetSupportedFormats()
+ const {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ return JavaToNativeVector<SdpVideoFormat>(
+ env, Java_VideoDecoderFactory_getSupportedCodecs(env, decoder_factory_),
+ &VideoCodecInfoToSdpVideoFormat);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h
new file mode 100644
index 0000000000..2122fdc008
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_
+
+#include <jni.h>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wrapper for Java VideoDecoderFactory class. Delegates method calls through
+// JNI and wraps the decoder inside VideoDecoderWrapper.
+class VideoDecoderFactoryWrapper : public VideoDecoderFactory {
+ public:
+ VideoDecoderFactoryWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& decoder_factory);
+ ~VideoDecoderFactoryWrapper() override;
+
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> decoder_factory_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc
new file mode 100644
index 0000000000..a678280f69
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "api/video_codecs/video_decoder_software_fallback_wrapper.h"
+#include "sdk/android/generated_video_jni/VideoDecoderFallback_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/video_decoder_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_VideoDecoderFallback_CreateDecoder(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_fallback_decoder,
+ const JavaParamRef<jobject>& j_primary_decoder) {
+ std::unique_ptr<VideoDecoder> fallback_decoder =
+ JavaToNativeVideoDecoder(jni, j_fallback_decoder);
+ std::unique_ptr<VideoDecoder> primary_decoder =
+ JavaToNativeVideoDecoder(jni, j_primary_decoder);
+
+ VideoDecoder* nativeWrapper =
+ CreateVideoDecoderSoftwareFallbackWrapper(std::move(fallback_decoder),
+ std::move(primary_decoder))
+ .release();
+
+ return jlongFromPointer(nativeWrapper);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc
new file mode 100644
index 0000000000..328f8d8d4b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc
@@ -0,0 +1,273 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_decoder_wrapper.h"
+
+#include "api/video/render_resolution.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/utility/vp8_header_parser.h"
+#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_video_jni/VideoDecoderWrapper_jni.h"
+#include "sdk/android/generated_video_jni/VideoDecoder_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/encoded_image.h"
+#include "sdk/android/src/jni/video_codec_status.h"
+#include "sdk/android/src/jni/video_frame.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+// RTP timestamps are 90 kHz.
+const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec;
+
+template <typename Dst, typename Src>
+inline absl::optional<Dst> cast_optional(const absl::optional<Src>& value) {
+ return value ? absl::optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
+ : absl::nullopt;
+}
+} // namespace
+
+VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& decoder)
+ : decoder_(jni, decoder),
+ implementation_name_(JavaToStdString(
+ jni,
+ Java_VideoDecoder_getImplementationName(jni, decoder))),
+ initialized_(false),
+ qp_parsing_enabled_(true) // QP parsing starts enabled and we disable it
+ // if the decoder provides frames.
+
+{
+ decoder_thread_checker_.Detach();
+}
+
+VideoDecoderWrapper::~VideoDecoderWrapper() = default;
+
+bool VideoDecoderWrapper::Configure(const Settings& settings) {
+ RTC_DCHECK_RUN_ON(&decoder_thread_checker_);
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ decoder_settings_ = settings;
+ return ConfigureInternal(jni);
+}
+
+bool VideoDecoderWrapper::ConfigureInternal(JNIEnv* jni) {
+ RenderResolution resolution = decoder_settings_.max_render_resolution();
+ ScopedJavaLocalRef<jobject> settings =
+ Java_Settings_Constructor(jni, decoder_settings_.number_of_cores(),
+ resolution.Width(), resolution.Height());
+
+ ScopedJavaLocalRef<jobject> callback =
+ Java_VideoDecoderWrapper_createDecoderCallback(jni,
+ jlongFromPointer(this));
+
+ int32_t status = JavaToNativeVideoCodecStatus(
+ jni, Java_VideoDecoder_initDecode(jni, decoder_, settings, callback));
+ RTC_LOG(LS_INFO) << "initDecode: " << status;
+ if (status == WEBRTC_VIDEO_CODEC_OK) {
+ initialized_ = true;
+ }
+
+ // The decoder was reinitialized so re-enable the QP parsing in case it stops
+ // providing QP values.
+ qp_parsing_enabled_ = true;
+
+ return status == WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t VideoDecoderWrapper::Decode(
+ const EncodedImage& image_param,
+ bool missing_frames,
+ int64_t render_time_ms) {
+ RTC_DCHECK_RUN_ON(&decoder_thread_checker_);
+ if (!initialized_) {
+ // Most likely initializing the codec failed.
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ // Make a mutable copy so we can modify the timestamp.
+ EncodedImage input_image(image_param);
+ // We use RTP timestamp for capture time because capture_time_ms_ is always 0.
+ input_image.capture_time_ms_ =
+ input_image.Timestamp() / kNumRtpTicksPerMillisec;
+
+ FrameExtraInfo frame_extra_info;
+ frame_extra_info.timestamp_ns =
+ input_image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec;
+ frame_extra_info.timestamp_rtp = input_image.Timestamp();
+ frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_;
+ frame_extra_info.qp =
+ qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ frame_extra_infos_.push_back(frame_extra_info);
+ }
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> jinput_image =
+ NativeToJavaEncodedImage(env, input_image);
+ ScopedJavaLocalRef<jobject> decode_info;
+ ScopedJavaLocalRef<jobject> ret =
+ Java_VideoDecoder_decode(env, decoder_, jinput_image, decode_info);
+ return HandleReturnCode(env, ret, "decode");
+}
+
+int32_t VideoDecoderWrapper::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ RTC_DCHECK_RUNS_SERIALIZED(&callback_race_checker_);
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t VideoDecoderWrapper::Release() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ int32_t status = JavaToNativeVideoCodecStatus(
+ jni, Java_VideoDecoder_release(jni, decoder_));
+ RTC_LOG(LS_INFO) << "release: " << status;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ frame_extra_infos_.clear();
+ }
+ initialized_ = false;
+ // It is allowed to reinitialize the codec on a different thread.
+ decoder_thread_checker_.Detach();
+ return status;
+}
+
+const char* VideoDecoderWrapper::ImplementationName() const {
+ return implementation_name_.c_str();
+}
+
+void VideoDecoderWrapper::OnDecodedFrame(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_frame,
+ const JavaRef<jobject>& j_decode_time_ms,
+ const JavaRef<jobject>& j_qp) {
+ RTC_DCHECK_RUNS_SERIALIZED(&callback_race_checker_);
+ const int64_t timestamp_ns = GetJavaVideoFrameTimestampNs(env, j_frame);
+
+ FrameExtraInfo frame_extra_info;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+
+ do {
+ if (frame_extra_infos_.empty()) {
+ RTC_LOG(LS_WARNING)
+ << "Java decoder produced an unexpected frame: " << timestamp_ns;
+ return;
+ }
+
+ frame_extra_info = frame_extra_infos_.front();
+ frame_extra_infos_.pop_front();
+ // If the decoder might drop frames so iterate through the queue until we
+ // find a matching timestamp.
+ } while (frame_extra_info.timestamp_ns != timestamp_ns);
+ }
+
+ VideoFrame frame =
+ JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp);
+ frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp);
+
+ absl::optional<int32_t> decoding_time_ms =
+ JavaToNativeOptionalInt(env, j_decode_time_ms);
+
+ absl::optional<uint8_t> decoder_qp =
+ cast_optional<uint8_t, int32_t>(JavaToNativeOptionalInt(env, j_qp));
+ // If the decoder provides QP values itself, no need to parse the bitstream.
+ // Enable QP parsing if decoder does not provide QP values itself.
+ qp_parsing_enabled_ = !decoder_qp.has_value();
+ callback_->Decoded(frame, decoding_time_ms,
+ decoder_qp ? decoder_qp : frame_extra_info.qp);
+}
+
+VideoDecoderWrapper::FrameExtraInfo::FrameExtraInfo() = default;
+VideoDecoderWrapper::FrameExtraInfo::FrameExtraInfo(const FrameExtraInfo&) =
+ default;
+VideoDecoderWrapper::FrameExtraInfo::~FrameExtraInfo() = default;
+
+int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni,
+ const JavaRef<jobject>& j_value,
+ const char* method_name) {
+ int32_t value = JavaToNativeVideoCodecStatus(jni, j_value);
+ if (value >= 0) { // OK or NO_OUTPUT
+ return value;
+ }
+
+ RTC_LOG(LS_WARNING) << method_name << ": " << value;
+ if (value == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE ||
+ value == WEBRTC_VIDEO_CODEC_UNINITIALIZED) { // Critical error.
+ RTC_LOG(LS_WARNING) << "Java decoder requested software fallback.";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ // Try resetting the codec.
+ if (Release() == WEBRTC_VIDEO_CODEC_OK && ConfigureInternal(jni)) {
+ RTC_LOG(LS_WARNING) << "Reset Java decoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ RTC_LOG(LS_WARNING) << "Unable to reset Java decoder.";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+}
+
+absl::optional<uint8_t> VideoDecoderWrapper::ParseQP(
+ const EncodedImage& input_image) {
+ if (input_image.qp_ != -1) {
+ return input_image.qp_;
+ }
+
+ absl::optional<uint8_t> qp;
+ switch (decoder_settings_.codec_type()) {
+ case kVideoCodecVP8: {
+ int qp_int;
+ if (vp8::GetQp(input_image.data(), input_image.size(), &qp_int)) {
+ qp = qp_int;
+ }
+ break;
+ }
+ case kVideoCodecVP9: {
+ int qp_int;
+ if (vp9::GetQp(input_image.data(), input_image.size(), &qp_int)) {
+ qp = qp_int;
+ }
+ break;
+ }
+ case kVideoCodecH264: {
+ h264_bitstream_parser_.ParseBitstream(input_image);
+ qp = h264_bitstream_parser_.GetLastSliceQp();
+ break;
+ }
+ default:
+ break; // Default is to not provide QP.
+ }
+ return qp;
+}
+
+std::unique_ptr<VideoDecoder> JavaToNativeVideoDecoder(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_decoder) {
+ const jlong native_decoder =
+ Java_VideoDecoder_createNativeVideoDecoder(jni, j_decoder);
+ VideoDecoder* decoder;
+ if (native_decoder == 0) {
+ decoder = new VideoDecoderWrapper(jni, j_decoder);
+ } else {
+ decoder = reinterpret_cast<VideoDecoder*>(native_decoder);
+ }
+ return std::unique_ptr<VideoDecoder>(decoder);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h
new file mode 100644
index 0000000000..49d0fbf048
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_
+
+#include <jni.h>
+
+#include <atomic>
+#include <deque>
+
+#include "api/sequence_checker.h"
+#include "api/video_codecs/video_decoder.h"
+#include "common_video/h264/h264_bitstream_parser.h"
+#include "rtc_base/race_checker.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wraps a Java decoder and delegates all calls to it.
+class VideoDecoderWrapper : public VideoDecoder {
+ public:
+ VideoDecoderWrapper(JNIEnv* jni, const JavaRef<jobject>& decoder);
+ ~VideoDecoderWrapper() override;
+
+ bool Configure(const Settings& settings) override;
+
+ int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) override;
+
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override;
+
+ // TODO(sakal): This is not always called on the correct thread. It is called
+ // from VCMGenericDecoder destructor which is on a different thread but is
+ // still safe and synchronous.
+ int32_t Release() override RTC_NO_THREAD_SAFETY_ANALYSIS;
+
+ const char* ImplementationName() const override;
+
+ // Wraps the frame to a AndroidVideoBuffer and passes it to the callback.
+ void OnDecodedFrame(JNIEnv* env,
+ const JavaRef<jobject>& j_frame,
+ const JavaRef<jobject>& j_decode_time_ms,
+ const JavaRef<jobject>& j_qp);
+
+ private:
+ struct FrameExtraInfo {
+ int64_t timestamp_ns; // Used as an identifier of the frame.
+
+ uint32_t timestamp_rtp;
+ int64_t timestamp_ntp;
+ absl::optional<uint8_t> qp;
+
+ FrameExtraInfo();
+ FrameExtraInfo(const FrameExtraInfo&);
+ ~FrameExtraInfo();
+ };
+
+ bool ConfigureInternal(JNIEnv* jni) RTC_RUN_ON(decoder_thread_checker_);
+
+ // Takes Java VideoCodecStatus, handles it and returns WEBRTC_VIDEO_CODEC_*
+ // status code.
+ int32_t HandleReturnCode(JNIEnv* jni,
+ const JavaRef<jobject>& j_value,
+ const char* method_name)
+ RTC_RUN_ON(decoder_thread_checker_);
+
+ absl::optional<uint8_t> ParseQP(const EncodedImage& input_image)
+ RTC_RUN_ON(decoder_thread_checker_);
+
+ const ScopedJavaGlobalRef<jobject> decoder_;
+ const std::string implementation_name_;
+
+ SequenceChecker decoder_thread_checker_;
+ // Callbacks must be executed sequentially on an arbitrary thread. We do not
+ // own this thread so a thread checker cannot be used.
+ rtc::RaceChecker callback_race_checker_;
+
+ // Initialized on Configure and immutable after that.
+ VideoDecoder::Settings decoder_settings_
+ RTC_GUARDED_BY(decoder_thread_checker_);
+
+ bool initialized_ RTC_GUARDED_BY(decoder_thread_checker_);
+ H264BitstreamParser h264_bitstream_parser_
+ RTC_GUARDED_BY(decoder_thread_checker_);
+
+ DecodedImageCallback* callback_ RTC_GUARDED_BY(callback_race_checker_);
+
+ // Accessed both on the decoder thread and the callback thread.
+ std::atomic<bool> qp_parsing_enabled_;
+ Mutex frame_extra_infos_lock_;
+ std::deque<FrameExtraInfo> frame_extra_infos_
+ RTC_GUARDED_BY(frame_extra_infos_lock_);
+};
+
+/* If the j_decoder is a wrapped native decoder, unwrap it. If it is not,
+ * wrap it in a VideoDecoderWrapper.
+ */
+std::unique_ptr<VideoDecoder> JavaToNativeVideoDecoder(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_decoder);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc
new file mode 100644
index 0000000000..7df129b360
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_encoder_factory_wrapper.h"
+
+#include "api/video/render_resolution.h"
+#include "api/video_codecs/video_encoder.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_video_jni/VideoEncoderFactory_jni.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/video_codec_info.h"
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+namespace {
+class VideoEncoderSelectorWrapper
+ : public VideoEncoderFactory::EncoderSelectorInterface {
+ public:
+ VideoEncoderSelectorWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& encoder_selector)
+ : encoder_selector_(jni, encoder_selector) {}
+
+ void OnCurrentEncoder(const SdpVideoFormat& format) override {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_codec_info =
+ SdpVideoFormatToVideoCodecInfo(jni, format);
+ Java_VideoEncoderSelector_onCurrentEncoder(jni, encoder_selector_,
+ j_codec_info);
+ }
+
+ absl::optional<SdpVideoFormat> OnAvailableBitrate(
+ const DataRate& rate) override {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> codec_info =
+ Java_VideoEncoderSelector_onAvailableBitrate(jni, encoder_selector_,
+ rate.kbps<int>());
+ if (codec_info.is_null()) {
+ return absl::nullopt;
+ }
+ return VideoCodecInfoToSdpVideoFormat(jni, codec_info);
+ }
+
+ absl::optional<SdpVideoFormat> OnResolutionChange(
+ const RenderResolution& resolution) override {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> codec_info =
+ Java_VideoEncoderSelector_onResolutionChange(
+ jni, encoder_selector_, resolution.Width(), resolution.Height());
+ if (codec_info.is_null()) {
+ return absl::nullopt;
+ }
+ return VideoCodecInfoToSdpVideoFormat(jni, codec_info);
+ }
+
+ absl::optional<SdpVideoFormat> OnEncoderBroken() override {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> codec_info =
+ Java_VideoEncoderSelector_onEncoderBroken(jni, encoder_selector_);
+ if (codec_info.is_null()) {
+ return absl::nullopt;
+ }
+ return VideoCodecInfoToSdpVideoFormat(jni, codec_info);
+ }
+
+ private:
+ const ScopedJavaGlobalRef<jobject> encoder_selector_;
+};
+
+} // namespace
+
+VideoEncoderFactoryWrapper::VideoEncoderFactoryWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& encoder_factory)
+ : encoder_factory_(jni, encoder_factory) {
+ const ScopedJavaLocalRef<jobjectArray> j_supported_codecs =
+ Java_VideoEncoderFactory_getSupportedCodecs(jni, encoder_factory);
+ supported_formats_ = JavaToNativeVector<SdpVideoFormat>(
+ jni, j_supported_codecs, &VideoCodecInfoToSdpVideoFormat);
+ const ScopedJavaLocalRef<jobjectArray> j_implementations =
+ Java_VideoEncoderFactory_getImplementations(jni, encoder_factory);
+ implementations_ = JavaToNativeVector<SdpVideoFormat>(
+ jni, j_implementations, &VideoCodecInfoToSdpVideoFormat);
+}
+VideoEncoderFactoryWrapper::~VideoEncoderFactoryWrapper() = default;
+
+std::unique_ptr<VideoEncoder> VideoEncoderFactoryWrapper::CreateVideoEncoder(
+ const SdpVideoFormat& format) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_codec_info =
+ SdpVideoFormatToVideoCodecInfo(jni, format);
+ ScopedJavaLocalRef<jobject> encoder = Java_VideoEncoderFactory_createEncoder(
+ jni, encoder_factory_, j_codec_info);
+ if (!encoder.obj())
+ return nullptr;
+ return JavaToNativeVideoEncoder(jni, encoder);
+}
+
+std::vector<SdpVideoFormat> VideoEncoderFactoryWrapper::GetSupportedFormats()
+ const {
+ return supported_formats_;
+}
+
+std::vector<SdpVideoFormat> VideoEncoderFactoryWrapper::GetImplementations()
+ const {
+ return implementations_;
+}
+
+std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>
+VideoEncoderFactoryWrapper::GetEncoderSelector() const {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> selector =
+ Java_VideoEncoderFactory_getEncoderSelector(jni, encoder_factory_);
+ if (selector.is_null()) {
+ return nullptr;
+ }
+
+ return std::make_unique<VideoEncoderSelectorWrapper>(jni, selector);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h
new file mode 100644
index 0000000000..2be6b1b33f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_
+
+#include <jni.h>
+#include <vector>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wrapper for Java VideoEncoderFactory class. Delegates method calls through
+// JNI and wraps the encoder inside VideoEncoderWrapper.
+class VideoEncoderFactoryWrapper : public VideoEncoderFactory {
+ public:
+ VideoEncoderFactoryWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& encoder_factory);
+ ~VideoEncoderFactoryWrapper() override;
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override;
+
+ // Returns a list of supported codecs in order of preference.
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+
+ std::vector<SdpVideoFormat> GetImplementations() const override;
+
+ std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> encoder_factory_;
+ std::vector<SdpVideoFormat> supported_formats_;
+ std::vector<SdpVideoFormat> implementations_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc
new file mode 100644
index 0000000000..d581572abf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "api/video_codecs/video_encoder_software_fallback_wrapper.h"
+#include "sdk/android/generated_video_jni/VideoEncoderFallback_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_VideoEncoderFallback_CreateEncoder(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_fallback_encoder,
+ const JavaParamRef<jobject>& j_primary_encoder) {
+ std::unique_ptr<VideoEncoder> fallback_encoder =
+ JavaToNativeVideoEncoder(jni, j_fallback_encoder);
+ std::unique_ptr<VideoEncoder> primary_encoder =
+ JavaToNativeVideoEncoder(jni, j_primary_encoder);
+
+ VideoEncoder* nativeWrapper =
+ CreateVideoEncoderSoftwareFallbackWrapper(std::move(fallback_encoder),
+ std::move(primary_encoder))
+ .release();
+
+ return jlongFromPointer(nativeWrapper);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc
new file mode 100644
index 0000000000..c23ab1e485
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc
@@ -0,0 +1,490 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+
+#include <utility>
+
+#include "common_video/h264/h264_common.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "modules/video_coding/svc/scalable_video_controller_no_layering.h"
+#include "modules/video_coding/utility/vp8_header_parser.h"
+#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_video_jni/VideoEncoderWrapper_jni.h"
+#include "sdk/android/generated_video_jni/VideoEncoder_jni.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/encoded_image.h"
+#include "sdk/android/src/jni/video_codec_status.h"
+#include "sdk/android/src/jni/video_frame.h"
+
+namespace webrtc {
+namespace jni {
+
+VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder)
+ : encoder_(jni, j_encoder), int_array_class_(GetClass(jni, "[I")) {
+ initialized_ = false;
+ num_resets_ = 0;
+
+ // Fetch and update encoder info.
+ UpdateEncoderInfo(jni);
+}
+VideoEncoderWrapper::~VideoEncoderWrapper() = default;
+
+int VideoEncoderWrapper::InitEncode(const VideoCodec* codec_settings,
+ const Settings& settings) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ codec_settings_ = *codec_settings;
+ capabilities_ = settings.capabilities;
+ number_of_cores_ = settings.number_of_cores;
+ num_resets_ = 0;
+
+ return InitEncodeInternal(jni);
+}
+
+int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) {
+ bool automatic_resize_on;
+ switch (codec_settings_.codecType) {
+ case kVideoCodecVP8:
+ automatic_resize_on = codec_settings_.VP8()->automaticResizeOn;
+ break;
+ case kVideoCodecVP9:
+ automatic_resize_on = codec_settings_.VP9()->automaticResizeOn;
+ gof_.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1);
+ gof_idx_ = 0;
+ break;
+ default:
+ automatic_resize_on = true;
+ }
+
+ RTC_DCHECK(capabilities_);
+ ScopedJavaLocalRef<jobject> capabilities =
+ Java_Capabilities_Constructor(jni, capabilities_->loss_notification);
+
+ ScopedJavaLocalRef<jobject> settings = Java_Settings_Constructor(
+ jni, number_of_cores_, codec_settings_.width, codec_settings_.height,
+ static_cast<int>(codec_settings_.startBitrate),
+ static_cast<int>(codec_settings_.maxFramerate),
+ static_cast<int>(codec_settings_.numberOfSimulcastStreams),
+ automatic_resize_on, capabilities);
+
+ ScopedJavaLocalRef<jobject> callback =
+ Java_VideoEncoderWrapper_createEncoderCallback(jni,
+ jlongFromPointer(this));
+
+ int32_t status = JavaToNativeVideoCodecStatus(
+ jni, Java_VideoEncoder_initEncode(jni, encoder_, settings, callback));
+ RTC_LOG(LS_INFO) << "initEncode: " << status;
+
+ // Some encoder's properties depend on settings and may change after
+ // initialization.
+ UpdateEncoderInfo(jni);
+
+ if (status == WEBRTC_VIDEO_CODEC_OK) {
+ initialized_ = true;
+ }
+ return status;
+}
+
+void VideoEncoderWrapper::UpdateEncoderInfo(JNIEnv* jni) {
+ encoder_info_.supports_native_handle = true;
+
+ encoder_info_.implementation_name = JavaToStdString(
+ jni, Java_VideoEncoder_getImplementationName(jni, encoder_));
+
+ encoder_info_.is_hardware_accelerated =
+ Java_VideoEncoder_isHardwareEncoder(jni, encoder_);
+
+ encoder_info_.scaling_settings = GetScalingSettingsInternal(jni);
+
+ encoder_info_.resolution_bitrate_limits = JavaToNativeResolutionBitrateLimits(
+ jni, Java_VideoEncoder_getResolutionBitrateLimits(jni, encoder_));
+
+ EncoderInfo info = GetEncoderInfoInternal(jni);
+ encoder_info_.requested_resolution_alignment =
+ info.requested_resolution_alignment;
+ encoder_info_.apply_alignment_to_all_simulcast_layers =
+ info.apply_alignment_to_all_simulcast_layers;
+}
+
+int32_t VideoEncoderWrapper::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t VideoEncoderWrapper::Release() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ int32_t status = JavaToNativeVideoCodecStatus(
+ jni, Java_VideoEncoder_release(jni, encoder_));
+ RTC_LOG(LS_INFO) << "release: " << status;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ frame_extra_infos_.clear();
+ }
+ initialized_ = false;
+
+ return status;
+}
+
+int32_t VideoEncoderWrapper::Encode(
+ const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) {
+ if (!initialized_) {
+ // Most likely initializing the codec failed.
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ // Construct encode info.
+ ScopedJavaLocalRef<jobjectArray> j_frame_types =
+ NativeToJavaFrameTypeArray(jni, *frame_types);
+ ScopedJavaLocalRef<jobject> encode_info =
+ Java_EncodeInfo_Constructor(jni, j_frame_types);
+
+ FrameExtraInfo info;
+ info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec;
+ info.timestamp_rtp = frame.timestamp();
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ frame_extra_infos_.push_back(info);
+ }
+
+ ScopedJavaLocalRef<jobject> j_frame = NativeToJavaVideoFrame(jni, frame);
+ ScopedJavaLocalRef<jobject> ret =
+ Java_VideoEncoder_encode(jni, encoder_, j_frame, encode_info);
+ ReleaseJavaVideoFrame(jni, j_frame);
+ return HandleReturnCode(jni, ret, "encode");
+}
+
+void VideoEncoderWrapper::SetRates(const RateControlParameters& rc_parameters) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ ScopedJavaLocalRef<jobject> j_rc_parameters =
+ ToJavaRateControlParameters(jni, rc_parameters);
+ ScopedJavaLocalRef<jobject> ret =
+ Java_VideoEncoder_setRates(jni, encoder_, j_rc_parameters);
+ HandleReturnCode(jni, ret, "setRates");
+}
+
+VideoEncoder::EncoderInfo VideoEncoderWrapper::GetEncoderInfo() const {
+ return encoder_info_;
+}
+
+VideoEncoderWrapper::ScalingSettings
+VideoEncoderWrapper::GetScalingSettingsInternal(JNIEnv* jni) const {
+ ScopedJavaLocalRef<jobject> j_scaling_settings =
+ Java_VideoEncoder_getScalingSettings(jni, encoder_);
+ bool isOn =
+ Java_VideoEncoderWrapper_getScalingSettingsOn(jni, j_scaling_settings);
+
+ if (!isOn)
+ return ScalingSettings::kOff;
+
+ absl::optional<int> low = JavaToNativeOptionalInt(
+ jni,
+ Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings));
+ absl::optional<int> high = JavaToNativeOptionalInt(
+ jni,
+ Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings));
+
+ if (low && high)
+ return ScalingSettings(*low, *high);
+
+ switch (codec_settings_.codecType) {
+ case kVideoCodecVP8: {
+ // Same as in vp8_impl.cc.
+ static const int kLowVp8QpThreshold = 29;
+ static const int kHighVp8QpThreshold = 95;
+ return ScalingSettings(low.value_or(kLowVp8QpThreshold),
+ high.value_or(kHighVp8QpThreshold));
+ }
+ case kVideoCodecVP9: {
+ // QP is obtained from VP9-bitstream, so the QP corresponds to the
+ // bitstream range of [0, 255] and not the user-level range of [0,63].
+ static const int kLowVp9QpThreshold = 96;
+ static const int kHighVp9QpThreshold = 185;
+
+ return VideoEncoder::ScalingSettings(kLowVp9QpThreshold,
+ kHighVp9QpThreshold);
+ }
+ case kVideoCodecH264: {
+ // Same as in h264_encoder_impl.cc.
+ static const int kLowH264QpThreshold = 24;
+ static const int kHighH264QpThreshold = 37;
+ return ScalingSettings(low.value_or(kLowH264QpThreshold),
+ high.value_or(kHighH264QpThreshold));
+ }
+ default:
+ return ScalingSettings::kOff;
+ }
+}
+
+VideoEncoder::EncoderInfo VideoEncoderWrapper::GetEncoderInfoInternal(
+ JNIEnv* jni) const {
+ ScopedJavaLocalRef<jobject> j_encoder_info =
+ Java_VideoEncoder_getEncoderInfo(jni, encoder_);
+
+ jint requested_resolution_alignment =
+ Java_EncoderInfo_getRequestedResolutionAlignment(jni, j_encoder_info);
+
+ jboolean apply_alignment_to_all_simulcast_layers =
+ Java_EncoderInfo_getApplyAlignmentToAllSimulcastLayers(jni,
+ j_encoder_info);
+
+ VideoEncoder::EncoderInfo info;
+ info.requested_resolution_alignment = requested_resolution_alignment;
+ info.apply_alignment_to_all_simulcast_layers =
+ apply_alignment_to_all_simulcast_layers;
+
+ return info;
+}
+
+void VideoEncoderWrapper::OnEncodedFrame(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoded_image) {
+ EncodedImage frame = JavaToNativeEncodedImage(jni, j_encoded_image);
+ int64_t capture_time_ns =
+ GetJavaEncodedImageCaptureTimeNs(jni, j_encoded_image);
+
+ // Encoded frames are delivered in the order received, but some of them
+ // may be dropped, so remove records of frames older than the current
+ // one.
+ //
+ // NOTE: if the current frame is associated with Encoder A, in the time
+ // since this frame was received, Encoder A could have been
+ // Release()'ed, Encoder B InitEncode()'ed (due to reuse of Encoder A),
+ // and frames received by Encoder B. Thus there may be frame_extra_infos
+ // entries that don't belong to us, and we need to be careful not to
+ // remove them. Removing only those entries older than the current frame
+ // provides this guarantee.
+ FrameExtraInfo frame_extra_info;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ while (!frame_extra_infos_.empty() &&
+ frame_extra_infos_.front().capture_time_ns < capture_time_ns) {
+ frame_extra_infos_.pop_front();
+ }
+ if (frame_extra_infos_.empty() ||
+ frame_extra_infos_.front().capture_time_ns != capture_time_ns) {
+ RTC_LOG(LS_WARNING)
+ << "Java encoder produced an unexpected frame with timestamp: "
+ << capture_time_ns;
+ return;
+ }
+ frame_extra_info = frame_extra_infos_.front();
+ frame_extra_infos_.pop_front();
+ }
+
+ // This is a bit subtle. The `frame` variable from the lambda capture is
+ // const. Which implies that (i) we need to make a copy to be able to
+ // write to the metadata, and (ii) we should avoid using the .data()
+ // method (including implicit conversion to ArrayView) on the non-const
+ // copy, since that would trigget a copy operation on the underlying
+ // CopyOnWriteBuffer.
+ EncodedImage frame_copy = frame;
+
+ frame_copy.SetTimestamp(frame_extra_info.timestamp_rtp);
+ frame_copy.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec;
+
+ if (frame_copy.qp_ < 0)
+ frame_copy.qp_ = ParseQp(frame);
+
+ CodecSpecificInfo info(ParseCodecSpecificInfo(frame));
+
+ callback_->OnEncodedImage(frame_copy, &info);
+}
+
+int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni,
+ const JavaRef<jobject>& j_value,
+ const char* method_name) {
+ int32_t value = JavaToNativeVideoCodecStatus(jni, j_value);
+ if (value >= 0) { // OK or NO_OUTPUT
+ return value;
+ }
+
+ RTC_LOG(LS_WARNING) << method_name << ": " << value;
+ if (value == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE ||
+ value == WEBRTC_VIDEO_CODEC_UNINITIALIZED) { // Critical error.
+ RTC_LOG(LS_WARNING) << "Java encoder requested software fallback.";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ // Try resetting the codec.
+ if (Release() == WEBRTC_VIDEO_CODEC_OK &&
+ InitEncodeInternal(jni) == WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "Reset Java encoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ RTC_LOG(LS_WARNING) << "Unable to reset Java encoder.";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+}
+
+int VideoEncoderWrapper::ParseQp(rtc::ArrayView<const uint8_t> buffer) {
+ int qp;
+ bool success;
+ switch (codec_settings_.codecType) {
+ case kVideoCodecVP8:
+ success = vp8::GetQp(buffer.data(), buffer.size(), &qp);
+ break;
+ case kVideoCodecVP9:
+ success = vp9::GetQp(buffer.data(), buffer.size(), &qp);
+ break;
+ case kVideoCodecH264:
+ h264_bitstream_parser_.ParseBitstream(buffer);
+ qp = h264_bitstream_parser_.GetLastSliceQp().value_or(-1);
+ success = (qp >= 0);
+ break;
+ default: // Default is to not provide QP.
+ success = false;
+ break;
+ }
+ return success ? qp : -1; // -1 means unknown QP.
+}
+
+CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo(
+ const EncodedImage& frame) {
+ const bool key_frame = frame._frameType == VideoFrameType::kVideoFrameKey;
+
+ CodecSpecificInfo info;
+ // For stream with scalability, NextFrameConfig should be called before
+ // encoding and used to configure encoder, then passed here e.g. via
+ // FrameExtraInfo structure. But while this encoder wrapper uses only trivial
+ // scalability, NextFrameConfig can be called here.
+ auto layer_frames = svc_controller_.NextFrameConfig(/*reset=*/key_frame);
+ RTC_DCHECK_EQ(layer_frames.size(), 1);
+ info.generic_frame_info = svc_controller_.OnEncodeDone(layer_frames[0]);
+ if (key_frame) {
+ info.template_structure = svc_controller_.DependencyStructure();
+ info.template_structure->resolutions = {
+ RenderResolution(frame._encodedWidth, frame._encodedHeight)};
+ }
+
+ info.codecType = codec_settings_.codecType;
+
+ switch (codec_settings_.codecType) {
+ case kVideoCodecVP8:
+ info.codecSpecific.VP8.nonReference = false;
+ info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
+ info.codecSpecific.VP8.layerSync = false;
+ info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
+ break;
+ case kVideoCodecVP9:
+ if (key_frame) {
+ gof_idx_ = 0;
+ }
+ info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
+ info.codecSpecific.VP9.flexible_mode = false;
+ info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
+ info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.gof_idx =
+ static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+ info.codecSpecific.VP9.num_spatial_layers = 1;
+ info.codecSpecific.VP9.first_frame_in_picture = true;
+ info.codecSpecific.VP9.spatial_layer_resolution_present = false;
+ if (info.codecSpecific.VP9.ss_data_available) {
+ info.codecSpecific.VP9.spatial_layer_resolution_present = true;
+ info.codecSpecific.VP9.width[0] = frame._encodedWidth;
+ info.codecSpecific.VP9.height[0] = frame._encodedHeight;
+ info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return info;
+}
+
+ScopedJavaLocalRef<jobject> VideoEncoderWrapper::ToJavaBitrateAllocation(
+ JNIEnv* jni,
+ const VideoBitrateAllocation& allocation) {
+ ScopedJavaLocalRef<jobjectArray> j_allocation_array(
+ jni, jni->NewObjectArray(kMaxSpatialLayers, int_array_class_.obj(),
+ nullptr /* initial */));
+ for (int spatial_i = 0; spatial_i < kMaxSpatialLayers; ++spatial_i) {
+ std::array<int32_t, kMaxTemporalStreams> spatial_layer;
+ for (int temporal_i = 0; temporal_i < kMaxTemporalStreams; ++temporal_i) {
+ spatial_layer[temporal_i] = allocation.GetBitrate(spatial_i, temporal_i);
+ }
+
+ ScopedJavaLocalRef<jintArray> j_array_spatial_layer =
+ NativeToJavaIntArray(jni, spatial_layer);
+ jni->SetObjectArrayElement(j_allocation_array.obj(), spatial_i,
+ j_array_spatial_layer.obj());
+ }
+ return Java_BitrateAllocation_Constructor(jni, j_allocation_array);
+}
+
+ScopedJavaLocalRef<jobject> VideoEncoderWrapper::ToJavaRateControlParameters(
+ JNIEnv* jni,
+ const VideoEncoder::RateControlParameters& rc_parameters) {
+ ScopedJavaLocalRef<jobject> j_bitrate_allocation =
+ ToJavaBitrateAllocation(jni, rc_parameters.bitrate);
+
+ return Java_RateControlParameters_Constructor(jni, j_bitrate_allocation,
+ rc_parameters.framerate_fps);
+}
+
+std::unique_ptr<VideoEncoder> JavaToNativeVideoEncoder(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder) {
+ const jlong native_encoder =
+ Java_VideoEncoder_createNativeVideoEncoder(jni, j_encoder);
+ VideoEncoder* encoder;
+ if (native_encoder == 0) {
+ encoder = new VideoEncoderWrapper(jni, j_encoder);
+ } else {
+ encoder = reinterpret_cast<VideoEncoder*>(native_encoder);
+ }
+ return std::unique_ptr<VideoEncoder>(encoder);
+}
+
+std::vector<VideoEncoder::ResolutionBitrateLimits>
+JavaToNativeResolutionBitrateLimits(
+ JNIEnv* jni,
+ const JavaRef<jobjectArray>& j_bitrate_limits_array) {
+ std::vector<VideoEncoder::ResolutionBitrateLimits> resolution_bitrate_limits;
+
+ const jsize array_length = jni->GetArrayLength(j_bitrate_limits_array.obj());
+ for (int i = 0; i < array_length; ++i) {
+ ScopedJavaLocalRef<jobject> j_bitrate_limits = ScopedJavaLocalRef<jobject>(
+ jni, jni->GetObjectArrayElement(j_bitrate_limits_array.obj(), i));
+
+ jint frame_size_pixels =
+ Java_ResolutionBitrateLimits_getFrameSizePixels(jni, j_bitrate_limits);
+ jint min_start_bitrate_bps =
+ Java_ResolutionBitrateLimits_getMinStartBitrateBps(jni,
+ j_bitrate_limits);
+ jint min_bitrate_bps =
+ Java_ResolutionBitrateLimits_getMinBitrateBps(jni, j_bitrate_limits);
+ jint max_bitrate_bps =
+ Java_ResolutionBitrateLimits_getMaxBitrateBps(jni, j_bitrate_limits);
+
+ resolution_bitrate_limits.push_back(VideoEncoder::ResolutionBitrateLimits(
+ frame_size_pixels, min_start_bitrate_bps, min_bitrate_bps,
+ max_bitrate_bps));
+ }
+
+ return resolution_bitrate_limits;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h
new file mode 100644
index 0000000000..5c5aab7588
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_
+
+#include <jni.h>
+
+#include <deque>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/video_codecs/video_encoder.h"
+#include "common_video/h264/h264_bitstream_parser.h"
+#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
+#include "modules/video_coding/svc/scalable_video_controller_no_layering.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wraps a Java encoder and delegates all calls to it.
+class VideoEncoderWrapper : public VideoEncoder {
+ public:
+ VideoEncoderWrapper(JNIEnv* jni, const JavaRef<jobject>& j_encoder);
+ ~VideoEncoderWrapper() override;
+
+ int32_t InitEncode(const VideoCodec* codec_settings,
+ const Settings& settings) override;
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override;
+
+ int32_t Release() override;
+
+ int32_t Encode(const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) override;
+
+ void SetRates(const RateControlParameters& rc_parameters) override;
+
+ EncoderInfo GetEncoderInfo() const override;
+
+ // Should only be called by JNI.
+ void OnEncodedFrame(JNIEnv* jni,
+ const JavaRef<jobject>& j_encoded_image);
+
+ private:
+ struct FrameExtraInfo {
+ int64_t capture_time_ns; // Used as an identifier of the frame.
+
+ uint32_t timestamp_rtp;
+ };
+
+ int32_t InitEncodeInternal(JNIEnv* jni);
+
+ // Takes Java VideoCodecStatus, handles it and returns WEBRTC_VIDEO_CODEC_*
+ // status code.
+ int32_t HandleReturnCode(JNIEnv* jni,
+ const JavaRef<jobject>& j_value,
+ const char* method_name);
+
+ int ParseQp(rtc::ArrayView<const uint8_t> buffer);
+
+ CodecSpecificInfo ParseCodecSpecificInfo(const EncodedImage& frame);
+
+ ScopedJavaLocalRef<jobject> ToJavaBitrateAllocation(
+ JNIEnv* jni,
+ const VideoBitrateAllocation& allocation);
+
+ ScopedJavaLocalRef<jobject> ToJavaRateControlParameters(
+ JNIEnv* jni,
+ const VideoEncoder::RateControlParameters& rc_parameters);
+
+ void UpdateEncoderInfo(JNIEnv* jni);
+
+ ScalingSettings GetScalingSettingsInternal(JNIEnv* jni) const;
+ std::vector<ResolutionBitrateLimits> GetResolutionBitrateLimits(
+ JNIEnv* jni) const;
+
+ VideoEncoder::EncoderInfo GetEncoderInfoInternal(JNIEnv* jni) const;
+
+ const ScopedJavaGlobalRef<jobject> encoder_;
+ const ScopedJavaGlobalRef<jclass> int_array_class_;
+
+ // Modified both on the encoder thread and the callback thread.
+ Mutex frame_extra_infos_lock_;
+ std::deque<FrameExtraInfo> frame_extra_infos_
+ RTC_GUARDED_BY(frame_extra_infos_lock_);
+ EncodedImageCallback* callback_;
+ bool initialized_;
+ int num_resets_;
+ absl::optional<VideoEncoder::Capabilities> capabilities_;
+ int number_of_cores_;
+ VideoCodec codec_settings_;
+ EncoderInfo encoder_info_;
+ H264BitstreamParser h264_bitstream_parser_;
+
+ // Fills frame dependencies in codec-agnostic format.
+ ScalableVideoControllerNoLayering svc_controller_;
+ // VP9 variables to populate codec specific structure.
+ GofInfoVP9 gof_; // Contains each frame's temporal information for
+ // non-flexible VP9 mode.
+ size_t gof_idx_;
+};
+
+/* If the j_encoder is a wrapped native encoder, unwrap it. If it is not,
+ * wrap it in a VideoEncoderWrapper.
+ */
+std::unique_ptr<VideoEncoder> JavaToNativeVideoEncoder(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder);
+
+bool IsHardwareVideoEncoder(JNIEnv* jni, const JavaRef<jobject>& j_encoder);
+
+std::vector<VideoEncoder::ResolutionBitrateLimits>
+JavaToNativeResolutionBitrateLimits(
+ JNIEnv* jni,
+ const JavaRef<jobjectArray>& j_bitrate_limits_array);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc b/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc
new file mode 100644
index 0000000000..121b34fa94
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc
@@ -0,0 +1,319 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_frame.h"
+
+#include "api/scoped_refptr.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_video_jni/VideoFrame_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+class AndroidVideoBuffer : public VideoFrameBuffer {
+ public:
+ // Creates a native VideoFrameBuffer from a Java VideoFrame.Buffer.
+ static rtc::scoped_refptr<AndroidVideoBuffer> Create(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ // Similar to the Create() above, but adopts and takes ownership of the Java
+ // VideoFrame.Buffer. I.e. retain() will not be called, but release() will be
+ // called when the returned AndroidVideoBuffer is destroyed.
+ static rtc::scoped_refptr<AndroidVideoBuffer> Adopt(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ ~AndroidVideoBuffer() override;
+
+ const ScopedJavaGlobalRef<jobject>& video_frame_buffer() const;
+
+ // Crops a region defined by `crop_x`, `crop_y`, `crop_width` and
+ // `crop_height`. Scales it to size `scale_width` x `scale_height`.
+ rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int crop_x,
+ int crop_y,
+ int crop_width,
+ int crop_height,
+ int scale_width,
+ int scale_height) override;
+
+ protected:
+ // Should not be called directly. Adopts the Java VideoFrame.Buffer. Use
+ // Create() or Adopt() instead for clarity.
+ AndroidVideoBuffer(JNIEnv* jni, const JavaRef<jobject>& j_video_frame_buffer);
+
+ private:
+ Type type() const override;
+ int width() const override;
+ int height() const override;
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+ const int width_;
+ const int height_;
+ // Holds a VideoFrame.Buffer.
+ const ScopedJavaGlobalRef<jobject> j_video_frame_buffer_;
+};
+
+class AndroidVideoI420Buffer : public I420BufferInterface {
+ public:
+ // Creates a native VideoFrameBuffer from a Java VideoFrame.I420Buffer.
+ static rtc::scoped_refptr<AndroidVideoI420Buffer> Create(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ // Adopts and takes ownership of the Java VideoFrame.Buffer. I.e. retain()
+ // will not be called, but release() will be called when the returned
+ // AndroidVideoBuffer is destroyed.
+ static rtc::scoped_refptr<AndroidVideoI420Buffer> Adopt(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ protected:
+ // Should not be called directly. Adopts the buffer. Use Adopt() instead for
+ // clarity.
+ AndroidVideoI420Buffer(JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer);
+ ~AndroidVideoI420Buffer() override;
+
+ private:
+ const uint8_t* DataY() const override { return data_y_; }
+ const uint8_t* DataU() const override { return data_u_; }
+ const uint8_t* DataV() const override { return data_v_; }
+
+ int StrideY() const override { return stride_y_; }
+ int StrideU() const override { return stride_u_; }
+ int StrideV() const override { return stride_v_; }
+
+ int width() const override { return width_; }
+ int height() const override { return height_; }
+
+ const int width_;
+ const int height_;
+ // Holds a VideoFrame.I420Buffer.
+ const ScopedJavaGlobalRef<jobject> j_video_frame_buffer_;
+
+ const uint8_t* data_y_;
+ const uint8_t* data_u_;
+ const uint8_t* data_v_;
+ int stride_y_;
+ int stride_u_;
+ int stride_v_;
+};
+
+rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Create(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ Java_Buffer_retain(jni, j_video_frame_buffer);
+ return AndroidVideoI420Buffer::Adopt(jni, width, height,
+ j_video_frame_buffer);
+}
+
+rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Adopt(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ RTC_DCHECK_EQ(
+ static_cast<Type>(Java_Buffer_getBufferType(jni, j_video_frame_buffer)),
+ Type::kI420);
+ return rtc::make_ref_counted<AndroidVideoI420Buffer>(jni, width, height,
+ j_video_frame_buffer);
+}
+
+AndroidVideoI420Buffer::AndroidVideoI420Buffer(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer)
+ : width_(width),
+ height_(height),
+ j_video_frame_buffer_(jni, j_video_frame_buffer) {
+ ScopedJavaLocalRef<jobject> j_data_y =
+ Java_I420Buffer_getDataY(jni, j_video_frame_buffer);
+ ScopedJavaLocalRef<jobject> j_data_u =
+ Java_I420Buffer_getDataU(jni, j_video_frame_buffer);
+ ScopedJavaLocalRef<jobject> j_data_v =
+ Java_I420Buffer_getDataV(jni, j_video_frame_buffer);
+
+ data_y_ =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_y.obj()));
+ data_u_ =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_u.obj()));
+ data_v_ =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_v.obj()));
+
+ stride_y_ = Java_I420Buffer_getStrideY(jni, j_video_frame_buffer);
+ stride_u_ = Java_I420Buffer_getStrideU(jni, j_video_frame_buffer);
+ stride_v_ = Java_I420Buffer_getStrideV(jni, j_video_frame_buffer);
+}
+
+AndroidVideoI420Buffer::~AndroidVideoI420Buffer() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ Java_Buffer_release(jni, j_video_frame_buffer_);
+}
+
+} // namespace
+
+int64_t GetJavaVideoFrameTimestampNs(JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame) {
+ return Java_VideoFrame_getTimestampNs(jni, j_video_frame);
+}
+
+rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ RTC_DCHECK_EQ(
+ static_cast<Type>(Java_Buffer_getBufferType(jni, j_video_frame_buffer)),
+ Type::kNative);
+ return rtc::make_ref_counted<AndroidVideoBuffer>(jni, j_video_frame_buffer);
+}
+
+rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Create(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ Java_Buffer_retain(jni, j_video_frame_buffer);
+ return Adopt(jni, j_video_frame_buffer);
+}
+
+AndroidVideoBuffer::AndroidVideoBuffer(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer)
+ : width_(Java_Buffer_getWidth(jni, j_video_frame_buffer)),
+ height_(Java_Buffer_getHeight(jni, j_video_frame_buffer)),
+ j_video_frame_buffer_(jni, j_video_frame_buffer) {}
+
+AndroidVideoBuffer::~AndroidVideoBuffer() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ Java_Buffer_release(jni, j_video_frame_buffer_);
+}
+
+const ScopedJavaGlobalRef<jobject>& AndroidVideoBuffer::video_frame_buffer()
+ const {
+ return j_video_frame_buffer_;
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> AndroidVideoBuffer::CropAndScale(
+ int crop_x,
+ int crop_y,
+ int crop_width,
+ int crop_height,
+ int scale_width,
+ int scale_height) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ return Adopt(jni, Java_Buffer_cropAndScale(jni, j_video_frame_buffer_, crop_x,
+ crop_y, crop_width, crop_height,
+ scale_width, scale_height));
+}
+
+VideoFrameBuffer::Type AndroidVideoBuffer::type() const {
+ return Type::kNative;
+}
+
+int AndroidVideoBuffer::width() const {
+ return width_;
+}
+
+int AndroidVideoBuffer::height() const {
+ return height_;
+}
+
+rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_i420_buffer =
+ Java_Buffer_toI420(jni, j_video_frame_buffer_);
+ // In case I420 conversion fails, we propagate the nullptr.
+ if (j_i420_buffer.is_null()) {
+ return nullptr;
+ }
+
+ // We don't need to retain the buffer because toI420 returns a new object that
+ // we are assumed to take the ownership of.
+ return AndroidVideoI420Buffer::Adopt(jni, width_, height_, j_i420_buffer);
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ VideoFrameBuffer::Type type = static_cast<VideoFrameBuffer::Type>(
+ Java_Buffer_getBufferType(jni, j_video_frame_buffer));
+ switch (type) {
+ case VideoFrameBuffer::Type::kI420: {
+ const int width = Java_Buffer_getWidth(jni, j_video_frame_buffer);
+ const int height = Java_Buffer_getHeight(jni, j_video_frame_buffer);
+ return AndroidVideoI420Buffer::Create(jni, width, height,
+ j_video_frame_buffer);
+ }
+ case VideoFrameBuffer::Type::kNative:
+ return AndroidVideoBuffer::Create(jni, j_video_frame_buffer);
+ default:
+ RTC_CHECK_NOTREACHED();
+ }
+}
+
+VideoFrame JavaToNativeFrame(JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame,
+ uint32_t timestamp_rtp) {
+ ScopedJavaLocalRef<jobject> j_video_frame_buffer =
+ Java_VideoFrame_getBuffer(jni, j_video_frame);
+ int rotation = Java_VideoFrame_getRotation(jni, j_video_frame);
+ int64_t timestamp_ns = Java_VideoFrame_getTimestampNs(jni, j_video_frame);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ JavaToNativeFrameBuffer(jni, j_video_frame_buffer);
+ return VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_timestamp_rtp(timestamp_rtp)
+ .set_timestamp_ms(timestamp_ns / rtc::kNumNanosecsPerMillisec)
+ .set_rotation(static_cast<VideoRotation>(rotation))
+ .build();
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
+ const VideoFrame& frame) {
+ rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
+
+ if (buffer->type() == VideoFrameBuffer::Type::kNative) {
+ AndroidVideoBuffer* android_buffer =
+ static_cast<AndroidVideoBuffer*>(buffer.get());
+ ScopedJavaLocalRef<jobject> j_video_frame_buffer(
+ jni, android_buffer->video_frame_buffer());
+ Java_Buffer_retain(jni, j_video_frame_buffer);
+ return Java_VideoFrame_Constructor(
+ jni, j_video_frame_buffer, static_cast<jint>(frame.rotation()),
+ static_cast<jlong>(frame.timestamp_us() *
+ rtc::kNumNanosecsPerMicrosec));
+ } else {
+ return Java_VideoFrame_Constructor(
+ jni, WrapI420Buffer(jni, buffer->ToI420()),
+ static_cast<jint>(frame.rotation()),
+ static_cast<jlong>(frame.timestamp_us() *
+ rtc::kNumNanosecsPerMicrosec));
+ }
+}
+
+void ReleaseJavaVideoFrame(JNIEnv* jni, const JavaRef<jobject>& j_video_frame) {
+ Java_VideoFrame_release(jni, j_video_frame);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_frame.h b/third_party/libwebrtc/sdk/android/src/jni/video_frame.h
new file mode 100644
index 0000000000..9b916de40b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_frame.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_
+
+#include <jni.h>
+
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+rtc::scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+VideoFrame JavaToNativeFrame(JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame,
+ uint32_t timestamp_rtp);
+
+// NOTE: Returns a new video frame that has to be released by calling
+// ReleaseJavaVideoFrame.
+ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
+ const VideoFrame& frame);
+void ReleaseJavaVideoFrame(JNIEnv* jni, const JavaRef<jobject>& j_video_frame);
+
+int64_t GetJavaVideoFrameTimestampNs(JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc
new file mode 100644
index 0000000000..14321084d0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_sink.h"
+
+#include "sdk/android/generated_video_jni/VideoSink_jni.h"
+#include "sdk/android/src/jni/video_frame.h"
+
+namespace webrtc {
+namespace jni {
+
+VideoSinkWrapper::VideoSinkWrapper(JNIEnv* jni, const JavaRef<jobject>& j_sink)
+ : j_sink_(jni, j_sink) {}
+
+VideoSinkWrapper::~VideoSinkWrapper() {}
+
+void VideoSinkWrapper::OnFrame(const VideoFrame& frame) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_frame = NativeToJavaVideoFrame(jni, frame);
+ Java_VideoSink_onFrame(jni, j_sink_, j_frame);
+ ReleaseJavaVideoFrame(jni, j_frame);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_sink.h b/third_party/libwebrtc/sdk/android/src/jni/video_sink.h
new file mode 100644
index 0000000000..f16545434b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_sink.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_
+
+#include <jni.h>
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+class VideoSinkWrapper : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ VideoSinkWrapper(JNIEnv* jni, const JavaRef<jobject>& j_sink);
+ ~VideoSinkWrapper() override;
+
+ private:
+ void OnFrame(const VideoFrame& frame) override;
+
+ const ScopedJavaGlobalRef<jobject> j_sink_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_track.cc b/third_party/libwebrtc/sdk/android/src/jni/video_track.cc
new file mode 100644
index 0000000000..70bedc12cf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_track.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/generated_video_jni/VideoTrack_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/video_sink.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_VideoTrack_AddSink(JNIEnv* jni,
+ jlong j_native_track,
+ jlong j_native_sink) {
+ reinterpret_cast<VideoTrackInterface*>(j_native_track)
+ ->AddOrUpdateSink(
+ reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>(j_native_sink),
+ rtc::VideoSinkWants());
+}
+
+static void JNI_VideoTrack_RemoveSink(JNIEnv* jni,
+ jlong j_native_track,
+ jlong j_native_sink) {
+ reinterpret_cast<VideoTrackInterface*>(j_native_track)
+ ->RemoveSink(reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>(
+ j_native_sink));
+}
+
+static jlong JNI_VideoTrack_WrapSink(JNIEnv* jni,
+ const JavaParamRef<jobject>& sink) {
+ return jlongFromPointer(new VideoSinkWrapper(jni, sink));
+}
+
+static void JNI_VideoTrack_FreeSink(JNIEnv* jni,
+ jlong j_native_sink) {
+ delete reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>(j_native_sink);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc
new file mode 100644
index 0000000000..8b34495dc2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "sdk/android/generated_libvpx_vp8_jni/LibvpxVp8Decoder_jni.h"
+#include "sdk/android/generated_libvpx_vp8_jni/LibvpxVp8Encoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibvpxVp8Encoder_CreateEncoder(JNIEnv* jni) {
+ return jlongFromPointer(VP8Encoder::Create().release());
+}
+
+static jlong JNI_LibvpxVp8Decoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(VP8Decoder::Create().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc
new file mode 100644
index 0000000000..ad9ca793ce
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Decoder_jni.h"
+#include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Encoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibvpxVp9Encoder_CreateEncoder(JNIEnv* jni) {
+ return jlongFromPointer(VP9Encoder::Create().release());
+}
+
+static jboolean JNI_LibvpxVp9Encoder_IsSupported(JNIEnv* jni) {
+ return !SupportedVP9Codecs().empty();
+}
+
+static jlong JNI_LibvpxVp9Decoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(VP9Decoder::Create().release());
+}
+
+static jboolean JNI_LibvpxVp9Decoder_IsSupported(JNIEnv* jni) {
+ return !SupportedVP9Codecs().empty();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc
new file mode 100644
index 0000000000..f2c543e8c2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
+
+#include "sdk/android/generated_video_jni/WrappedNativeI420Buffer_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// TODO(magjed): Write a test for this function.
+ScopedJavaLocalRef<jobject> WrapI420Buffer(
+ JNIEnv* jni,
+ const rtc::scoped_refptr<I420BufferInterface>& i420_buffer) {
+ ScopedJavaLocalRef<jobject> y_buffer =
+ NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataY()),
+ i420_buffer->StrideY() * i420_buffer->height());
+ ScopedJavaLocalRef<jobject> u_buffer =
+ NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataU()),
+ i420_buffer->StrideU() * i420_buffer->ChromaHeight());
+ ScopedJavaLocalRef<jobject> v_buffer =
+ NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataV()),
+ i420_buffer->StrideV() * i420_buffer->ChromaHeight());
+
+ return Java_WrappedNativeI420Buffer_Constructor(
+ jni, i420_buffer->width(), i420_buffer->height(), y_buffer,
+ i420_buffer->StrideY(), u_buffer, i420_buffer->StrideU(), v_buffer,
+ i420_buffer->StrideV(), jlongFromPointer(i420_buffer.get()));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h
new file mode 100644
index 0000000000..70ad062cc6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_
+#define SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_
+
+#include <jni.h>
+
+#include "api/video/video_frame_buffer.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+// This function wraps the C++ I420 buffer and returns a Java
+// VideoFrame.I420Buffer as a jobject.
+ScopedJavaLocalRef<jobject> WrapI420Buffer(
+ JNIEnv* jni,
+ const rtc::scoped_refptr<I420BufferInterface>& i420_buffer);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc b/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc
new file mode 100644
index 0000000000..e812bc9527
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "sdk/android/generated_video_jni/YuvHelper_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/planar_functions.h"
+
+namespace webrtc {
+namespace jni {
+
+void JNI_YuvHelper_CopyPlane(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src,
+ jint src_stride,
+ const JavaParamRef<jobject>& j_dst,
+ jint dst_stride,
+ jint width,
+ jint height) {
+ const uint8_t* src =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src.obj()));
+ uint8_t* dst =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst.obj()));
+
+ libyuv::CopyPlane(src, src_stride, dst, dst_stride, width, height);
+}
+
+void JNI_YuvHelper_I420Copy(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src_y,
+ jint src_stride_y,
+ const JavaParamRef<jobject>& j_src_u,
+ jint src_stride_u,
+ const JavaParamRef<jobject>& j_src_v,
+ jint src_stride_v,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v,
+ jint width,
+ jint height) {
+ const uint8_t* src_y =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj()));
+ const uint8_t* src_u =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj()));
+ const uint8_t* src_v =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ libyuv::I420Copy(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u,
+ dst_v, dst_stride_v, width, height);
+}
+
+static void JNI_YuvHelper_I420ToNV12(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src_y,
+ jint src_stride_y,
+ const JavaParamRef<jobject>& j_src_u,
+ jint src_stride_u,
+ const JavaParamRef<jobject>& j_src_v,
+ jint src_stride_v,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_uv,
+ jint dst_stride_uv,
+ jint width,
+ jint height) {
+ const uint8_t* src_y =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj()));
+ const uint8_t* src_u =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj()));
+ const uint8_t* src_v =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_uv =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_uv.obj()));
+
+ libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
+ width, height);
+}
+
+void JNI_YuvHelper_I420Rotate(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src_y,
+ jint src_stride_y,
+ const JavaParamRef<jobject>& j_src_u,
+ jint src_stride_u,
+ const JavaParamRef<jobject>& j_src_v,
+ jint src_stride_v,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v,
+ jint src_width,
+ jint src_height,
+ jint rotation_mode) {
+ const uint8_t* src_y =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj()));
+ const uint8_t* src_u =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj()));
+ const uint8_t* src_v =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ libyuv::I420Rotate(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u,
+ dst_v, dst_stride_v, src_width, src_height,
+ static_cast<libyuv::RotationMode>(rotation_mode));
+}
+
+void JNI_YuvHelper_ABGRToI420(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src,
+ jint src_stride,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v,
+ jint src_width,
+ jint src_height) {
+ const uint8_t* src =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ libyuv::ABGRToI420(src, src_stride, dst_y, dst_stride_y, dst_u, dst_stride_u,
+ dst_v, dst_stride_v, src_width, src_height);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/tests/resources/robolectric.properties b/third_party/libwebrtc/sdk/android/tests/resources/robolectric.properties
new file mode 100644
index 0000000000..a9bc625b18
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/resources/robolectric.properties
@@ -0,0 +1 @@
+sdk=21,25,26
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/AndroidVideoDecoderTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/AndroidVideoDecoderTest.java
new file mode 100644
index 0000000000..535187e99e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/AndroidVideoDecoderTest.java
@@ -0,0 +1,432 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.inOrder;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.graphics.Matrix;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaFormat;
+import android.os.Handler;
+import androidx.test.runner.AndroidJUnit4;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.InOrder;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.robolectric.annotation.Config;
+import org.webrtc.EncodedImage.FrameType;
+import org.webrtc.FakeMediaCodecWrapper.State;
+import org.webrtc.VideoDecoder.DecodeInfo;
+import org.webrtc.VideoFrame.I420Buffer;
+import org.webrtc.VideoFrame.TextureBuffer.Type;
+
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class AndroidVideoDecoderTest {
+ private static final VideoDecoder.Settings TEST_DECODER_SETTINGS =
+ new VideoDecoder.Settings(/* numberOfCores= */ 1, /* width= */ 640, /* height= */ 480);
+ private static final int COLOR_FORMAT = CodecCapabilities.COLOR_FormatYUV420Planar;
+ private static final long POLL_DELAY_MS = 10;
+ private static final long DELIVER_DECODED_IMAGE_DELAY_MS = 10;
+
+ private static final byte[] ENCODED_TEST_DATA = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
+
+ private class TestDecoder extends AndroidVideoDecoder {
+ private final Object deliverDecodedFrameLock = new Object();
+ private boolean deliverDecodedFrameDone = true;
+
+ public TestDecoder(MediaCodecWrapperFactory mediaCodecFactory, String codecName,
+ VideoCodecMimeType codecType, int colorFormat, EglBase.Context sharedContext) {
+ super(mediaCodecFactory, codecName, codecType, colorFormat, sharedContext);
+ }
+
+ public void waitDeliverDecodedFrame() throws InterruptedException {
+ synchronized (deliverDecodedFrameLock) {
+ deliverDecodedFrameDone = false;
+ deliverDecodedFrameLock.notifyAll();
+ while (!deliverDecodedFrameDone) {
+ deliverDecodedFrameLock.wait();
+ }
+ }
+ }
+
+ @SuppressWarnings("WaitNotInLoop") // This method is called inside a loop.
+ @Override
+ protected void deliverDecodedFrame() {
+ synchronized (deliverDecodedFrameLock) {
+ if (deliverDecodedFrameDone) {
+ try {
+ deliverDecodedFrameLock.wait(DELIVER_DECODED_IMAGE_DELAY_MS);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ return;
+ }
+ }
+ if (deliverDecodedFrameDone) {
+ return;
+ }
+ super.deliverDecodedFrame();
+ deliverDecodedFrameDone = true;
+ deliverDecodedFrameLock.notifyAll();
+ }
+ }
+
+ @Override
+ protected SurfaceTextureHelper createSurfaceTextureHelper() {
+ return mockSurfaceTextureHelper;
+ }
+
+ @Override
+ protected void releaseSurface() {}
+
+ @Override
+ protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) {
+ int chromaHeight = (height + 1) / 2;
+ int strideUV = (width + 1) / 2;
+ int yPos = 0;
+ int uPos = yPos + width * height;
+ int vPos = uPos + strideUV * chromaHeight;
+
+ ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight);
+
+ buffer.position(yPos);
+ buffer.limit(uPos);
+ ByteBuffer dataY = buffer.slice();
+
+ buffer.position(uPos);
+ buffer.limit(vPos);
+ ByteBuffer dataU = buffer.slice();
+
+ buffer.position(vPos);
+ buffer.limit(vPos + strideUV * chromaHeight);
+ ByteBuffer dataV = buffer.slice();
+
+ return JavaI420Buffer.wrap(width, height, dataY, width, dataU, strideUV, dataV, strideUV,
+ /* releaseCallback= */ null);
+ }
+
+ @Override
+ protected void copyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
+ for (int y = 0; y < height; y++) {
+ for (int x = 0; x < width; x++) {
+ dst.put(y * dstStride + x, src.get(y * srcStride + x));
+ }
+ }
+ }
+ }
+
+ private class TestDecoderBuilder {
+ private VideoCodecMimeType codecType = VideoCodecMimeType.VP8;
+ private boolean useSurface = true;
+
+ public TestDecoderBuilder setCodecType(VideoCodecMimeType codecType) {
+ this.codecType = codecType;
+ return this;
+ }
+
+ public TestDecoderBuilder setUseSurface(boolean useSurface) {
+ this.useSurface = useSurface;
+ return this;
+ }
+
+ public TestDecoder build() {
+ return new TestDecoder((String name)
+ -> fakeMediaCodecWrapper,
+ /* codecName= */ "org.webrtc.testdecoder", codecType, COLOR_FORMAT,
+ useSurface ? mockEglBaseContext : null);
+ }
+ }
+
+ private static class FakeDecoderCallback implements VideoDecoder.Callback {
+ public final List<VideoFrame> decodedFrames;
+
+ public FakeDecoderCallback() {
+ decodedFrames = new ArrayList<>();
+ }
+
+ @Override
+ public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
+ frame.retain();
+ decodedFrames.add(frame);
+ }
+
+ public void release() {
+ for (VideoFrame frame : decodedFrames) frame.release();
+ decodedFrames.clear();
+ }
+ }
+
+ private EncodedImage createTestEncodedImage() {
+ return EncodedImage.builder()
+ .setBuffer(ByteBuffer.wrap(ENCODED_TEST_DATA), null)
+ .setFrameType(FrameType.VideoFrameKey)
+ .createEncodedImage();
+ }
+
+ @Mock private EglBase.Context mockEglBaseContext;
+ @Mock private SurfaceTextureHelper mockSurfaceTextureHelper;
+ @Mock private VideoDecoder.Callback mockDecoderCallback;
+ private FakeMediaCodecWrapper fakeMediaCodecWrapper;
+ private FakeDecoderCallback fakeDecoderCallback;
+
+ @Before
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
+ when(mockSurfaceTextureHelper.getSurfaceTexture())
+ .thenReturn(new SurfaceTexture(/*texName=*/0));
+ MediaFormat inputFormat = new MediaFormat();
+ MediaFormat outputFormat = new MediaFormat();
+ // TODO(sakal): Add more details to output format as needed.
+ fakeMediaCodecWrapper = spy(new FakeMediaCodecWrapper(inputFormat, outputFormat));
+ fakeDecoderCallback = new FakeDecoderCallback();
+ }
+
+ @After
+ public void cleanUp() {
+ fakeDecoderCallback.release();
+ }
+
+ @Test
+ public void testInit() {
+ // Set-up.
+ AndroidVideoDecoder decoder =
+ new TestDecoderBuilder().setCodecType(VideoCodecMimeType.VP8).build();
+
+ // Test.
+ assertThat(decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback))
+ .isEqualTo(VideoCodecStatus.OK);
+
+ // Verify.
+ assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.EXECUTING_RUNNING);
+
+ MediaFormat mediaFormat = fakeMediaCodecWrapper.getConfiguredFormat();
+ assertThat(mediaFormat).isNotNull();
+ assertThat(mediaFormat.getInteger(MediaFormat.KEY_WIDTH))
+ .isEqualTo(TEST_DECODER_SETTINGS.width);
+ assertThat(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
+ .isEqualTo(TEST_DECODER_SETTINGS.height);
+ assertThat(mediaFormat.getString(MediaFormat.KEY_MIME))
+ .isEqualTo(VideoCodecMimeType.VP8.mimeType());
+ }
+
+ @Test
+ public void testRelease() {
+ // Set-up.
+ AndroidVideoDecoder decoder = new TestDecoderBuilder().build();
+ decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
+
+ // Test.
+ assertThat(decoder.release()).isEqualTo(VideoCodecStatus.OK);
+
+ // Verify.
+ assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
+ }
+
+ @Test
+ public void testReleaseMultipleTimes() {
+ // Set-up.
+ AndroidVideoDecoder decoder = new TestDecoderBuilder().build();
+ decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
+
+ // Test.
+ assertThat(decoder.release()).isEqualTo(VideoCodecStatus.OK);
+ assertThat(decoder.release()).isEqualTo(VideoCodecStatus.OK);
+
+ // Verify.
+ assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
+ }
+
+ @Test
+ public void testDecodeQueuesData() {
+ // Set-up.
+ AndroidVideoDecoder decoder = new TestDecoderBuilder().build();
+ decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
+
+ // Test.
+ assertThat(decoder.decode(createTestEncodedImage(),
+ new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0)))
+ .isEqualTo(VideoCodecStatus.OK);
+
+ // Verify.
+ ArgumentCaptor<Integer> indexCaptor = ArgumentCaptor.forClass(Integer.class);
+ ArgumentCaptor<Integer> offsetCaptor = ArgumentCaptor.forClass(Integer.class);
+ ArgumentCaptor<Integer> sizeCaptor = ArgumentCaptor.forClass(Integer.class);
+ verify(fakeMediaCodecWrapper)
+ .queueInputBuffer(indexCaptor.capture(), offsetCaptor.capture(), sizeCaptor.capture(),
+ /* presentationTimeUs= */ anyLong(),
+ /* flags= */ eq(0));
+
+ ByteBuffer inputBuffer = fakeMediaCodecWrapper.getInputBuffer(indexCaptor.getValue());
+ CodecTestHelper.assertEqualContents(
+ ENCODED_TEST_DATA, inputBuffer, offsetCaptor.getValue(), sizeCaptor.getValue());
+ }
+
+ @Test
+ public void testDeliversOutputByteBuffers() throws InterruptedException {
+ final byte[] testOutputData = CodecTestHelper.generateRandomData(
+ TEST_DECODER_SETTINGS.width * TEST_DECODER_SETTINGS.height * 3 / 2);
+ final I420Buffer expectedDeliveredBuffer = CodecTestHelper.wrapI420(
+ TEST_DECODER_SETTINGS.width, TEST_DECODER_SETTINGS.height, testOutputData);
+
+ // Set-up.
+ TestDecoder decoder = new TestDecoderBuilder().setUseSurface(/* useSurface = */ false).build();
+ decoder.initDecode(TEST_DECODER_SETTINGS, fakeDecoderCallback);
+ decoder.decode(createTestEncodedImage(),
+ new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0));
+ fakeMediaCodecWrapper.addOutputData(
+ testOutputData, /* presentationTimestampUs= */ 0, /* flags= */ 0);
+
+ // Test.
+ decoder.waitDeliverDecodedFrame();
+
+ // Verify.
+ assertThat(fakeDecoderCallback.decodedFrames).hasSize(1);
+ VideoFrame videoFrame = fakeDecoderCallback.decodedFrames.get(0);
+ assertThat(videoFrame).isNotNull();
+ assertThat(videoFrame.getRotatedWidth()).isEqualTo(TEST_DECODER_SETTINGS.width);
+ assertThat(videoFrame.getRotatedHeight()).isEqualTo(TEST_DECODER_SETTINGS.height);
+ assertThat(videoFrame.getRotation()).isEqualTo(0);
+ I420Buffer deliveredBuffer = videoFrame.getBuffer().toI420();
+ assertThat(deliveredBuffer.getDataY()).isEqualTo(expectedDeliveredBuffer.getDataY());
+ assertThat(deliveredBuffer.getDataU()).isEqualTo(expectedDeliveredBuffer.getDataU());
+ assertThat(deliveredBuffer.getDataV()).isEqualTo(expectedDeliveredBuffer.getDataV());
+ }
+
+ @Test
+ public void testRendersOutputTexture() throws InterruptedException {
+ // Set-up.
+ TestDecoder decoder = new TestDecoderBuilder().build();
+ decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
+ decoder.decode(createTestEncodedImage(),
+ new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0));
+ int bufferIndex =
+ fakeMediaCodecWrapper.addOutputTexture(/* presentationTimestampUs= */ 0, /* flags= */ 0);
+
+ // Test.
+ decoder.waitDeliverDecodedFrame();
+
+ // Verify.
+ verify(fakeMediaCodecWrapper).releaseOutputBuffer(bufferIndex, /* render= */ true);
+ }
+
+ @Test
+ public void testSurfaceTextureStall_FramesDropped() throws InterruptedException {
+ final int numFrames = 10;
+ // Maximum number of frame the decoder can keep queued on the output side.
+ final int maxQueuedBuffers = 3;
+
+ // Set-up.
+ TestDecoder decoder = new TestDecoderBuilder().build();
+ decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
+
+ // Test.
+ int[] bufferIndices = new int[numFrames];
+ for (int i = 0; i < 10; i++) {
+ decoder.decode(createTestEncodedImage(),
+ new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0));
+ bufferIndices[i] =
+ fakeMediaCodecWrapper.addOutputTexture(/* presentationTimestampUs= */ 0, /* flags= */ 0);
+ decoder.waitDeliverDecodedFrame();
+ }
+
+ // Verify.
+ InOrder releaseOrder = inOrder(fakeMediaCodecWrapper);
+ releaseOrder.verify(fakeMediaCodecWrapper)
+ .releaseOutputBuffer(bufferIndices[0], /* render= */ true);
+ for (int i = 1; i < numFrames - maxQueuedBuffers; i++) {
+ releaseOrder.verify(fakeMediaCodecWrapper)
+ .releaseOutputBuffer(bufferIndices[i], /* render= */ false);
+ }
+ }
+
+ @Test
+ public void testDeliversRenderedBuffers() throws InterruptedException {
+ // Set-up.
+ TestDecoder decoder = new TestDecoderBuilder().build();
+ decoder.initDecode(TEST_DECODER_SETTINGS, fakeDecoderCallback);
+ decoder.decode(createTestEncodedImage(),
+ new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0));
+ fakeMediaCodecWrapper.addOutputTexture(/* presentationTimestampUs= */ 0, /* flags= */ 0);
+
+ // Render the output buffer.
+ decoder.waitDeliverDecodedFrame();
+
+ ArgumentCaptor<VideoSink> videoSinkCaptor = ArgumentCaptor.forClass(VideoSink.class);
+ verify(mockSurfaceTextureHelper).startListening(videoSinkCaptor.capture());
+
+ // Test.
+ Runnable releaseCallback = mock(Runnable.class);
+ VideoFrame.TextureBuffer outputTextureBuffer =
+ new TextureBufferImpl(TEST_DECODER_SETTINGS.width, TEST_DECODER_SETTINGS.height, Type.OES,
+ /* id= */ 0,
+ /* transformMatrix= */ new Matrix(),
+ /* toI420Handler= */ new Handler(), new YuvConverter(), releaseCallback);
+ VideoFrame outputVideoFrame =
+ new VideoFrame(outputTextureBuffer, /* rotation= */ 0, /* timestampNs= */ 0);
+ videoSinkCaptor.getValue().onFrame(outputVideoFrame);
+ outputVideoFrame.release();
+
+ // Verify.
+ assertThat(fakeDecoderCallback.decodedFrames).hasSize(1);
+ VideoFrame videoFrame = fakeDecoderCallback.decodedFrames.get(0);
+ assertThat(videoFrame).isNotNull();
+ assertThat(videoFrame.getBuffer()).isEqualTo(outputTextureBuffer);
+
+ fakeDecoderCallback.release();
+
+ verify(releaseCallback).run();
+ }
+
+ @Test
+ public void testConfigureExceptionTriggerSWFallback() {
+ // Set-up.
+ doThrow(new IllegalStateException("Fake error"))
+ .when(fakeMediaCodecWrapper)
+ .configure(any(), any(), any(), anyInt());
+
+ AndroidVideoDecoder decoder = new TestDecoderBuilder().build();
+
+ // Test.
+ assertThat(decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback))
+ .isEqualTo(VideoCodecStatus.FALLBACK_SOFTWARE);
+ }
+
+ @Test
+ public void testStartExceptionTriggerSWFallback() {
+ // Set-up.
+ doThrow(new IllegalStateException("Fake error")).when(fakeMediaCodecWrapper).start();
+
+ AndroidVideoDecoder decoder = new TestDecoderBuilder().build();
+
+ // Test.
+ assertThat(decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback))
+ .isEqualTo(VideoCodecStatus.FALLBACK_SOFTWARE);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CameraEnumerationTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CameraEnumerationTest.java
new file mode 100644
index 0000000000..2c33992f99
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CameraEnumerationTest.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.webrtc.CameraEnumerationAndroid.getClosestSupportedFramerateRange;
+
+import androidx.test.runner.AndroidJUnit4;
+import java.util.Arrays;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat.FramerateRange;
+
+/**
+ * Tests for CameraEnumerationAndroid.
+ */
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class CameraEnumerationTest {
+ @Test
+ public void testGetClosestSupportedFramerateRange() {
+ assertEquals(new FramerateRange(10000, 30000),
+ getClosestSupportedFramerateRange(
+ Arrays.asList(new FramerateRange(10000, 30000), new FramerateRange(30000, 30000)),
+ 30 /* requestedFps */));
+
+ assertEquals(new FramerateRange(10000, 20000),
+ getClosestSupportedFramerateRange(
+ Arrays.asList(new FramerateRange(0, 30000), new FramerateRange(10000, 20000),
+ new FramerateRange(14000, 16000), new FramerateRange(15000, 15000)),
+ 15 /* requestedFps */));
+
+ assertEquals(new FramerateRange(10000, 20000),
+ getClosestSupportedFramerateRange(
+ Arrays.asList(new FramerateRange(15000, 15000), new FramerateRange(10000, 20000),
+ new FramerateRange(10000, 30000)),
+ 10 /* requestedFps */));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CodecTestHelper.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CodecTestHelper.java
new file mode 100644
index 0000000000..08a10707f8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CodecTestHelper.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assertWithMessage;
+
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+/**
+ * Helper methods for {@link HardwareVideoEncoderTest} and {@link AndroidVideoDecoderTest}.
+ */
+class CodecTestHelper {
+ static void assertEqualContents(byte[] expected, ByteBuffer actual, int offset, int size) {
+ assertThat(size).isEqualTo(expected.length);
+ assertThat(actual.capacity()).isAtLeast(offset + size);
+ for (int i = 0; i < expected.length; i++) {
+ assertWithMessage("At index: " + i).that(actual.get(offset + i)).isEqualTo(expected[i]);
+ }
+ }
+
+ static byte[] generateRandomData(int length) {
+ Random random = new Random();
+ byte[] data = new byte[length];
+ random.nextBytes(data);
+ return data;
+ }
+
+ static VideoFrame.I420Buffer wrapI420(int width, int height, byte[] data) {
+ final int posY = 0;
+ final int posU = width * height;
+ final int posV = posU + width * height / 4;
+ final int endV = posV + width * height / 4;
+
+ ByteBuffer buffer = ByteBuffer.allocateDirect(data.length);
+ buffer.put(data);
+
+ buffer.limit(posU);
+ buffer.position(posY);
+ ByteBuffer dataY = buffer.slice();
+
+ buffer.limit(posV);
+ buffer.position(posU);
+ ByteBuffer dataU = buffer.slice();
+
+ buffer.limit(endV);
+ buffer.position(posV);
+ ByteBuffer dataV = buffer.slice();
+
+ return JavaI420Buffer.wrap(width, height, dataY, width, dataU, width / 2, dataV, width / 2,
+ /* releaseCallback= */ null);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CryptoOptionsTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CryptoOptionsTest.java
new file mode 100644
index 0000000000..c6cd2c2008
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/CryptoOptionsTest.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import androidx.test.runner.AndroidJUnit4;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+import org.webrtc.CryptoOptions;
+
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class CryptoOptionsTest {
+ // Validates the builder builds by default all false options.
+ @Test
+ public void testBuilderDefaultsAreFalse() {
+ CryptoOptions cryptoOptions = CryptoOptions.builder().createCryptoOptions();
+ assertThat(cryptoOptions.getSrtp().getEnableGcmCryptoSuites()).isFalse();
+ assertThat(cryptoOptions.getSrtp().getEnableAes128Sha1_32CryptoCipher()).isFalse();
+ assertThat(cryptoOptions.getSrtp().getEnableEncryptedRtpHeaderExtensions()).isFalse();
+ assertThat(cryptoOptions.getSFrame().getRequireFrameEncryption()).isFalse();
+ }
+
+ // Validates the builder sets the correct parameters.
+ @Test
+ public void testBuilderCorrectlyInitializingGcmCrypto() {
+ CryptoOptions cryptoOptions =
+ CryptoOptions.builder().setEnableGcmCryptoSuites(true).createCryptoOptions();
+ assertThat(cryptoOptions.getSrtp().getEnableGcmCryptoSuites()).isTrue();
+ assertThat(cryptoOptions.getSrtp().getEnableAes128Sha1_32CryptoCipher()).isFalse();
+ assertThat(cryptoOptions.getSrtp().getEnableEncryptedRtpHeaderExtensions()).isFalse();
+ assertThat(cryptoOptions.getSFrame().getRequireFrameEncryption()).isFalse();
+ }
+
+ @Test
+ public void testBuilderCorrectlyInitializingAes128Sha1_32CryptoCipher() {
+ CryptoOptions cryptoOptions =
+ CryptoOptions.builder().setEnableAes128Sha1_32CryptoCipher(true).createCryptoOptions();
+ assertThat(cryptoOptions.getSrtp().getEnableGcmCryptoSuites()).isFalse();
+ assertThat(cryptoOptions.getSrtp().getEnableAes128Sha1_32CryptoCipher()).isTrue();
+ assertThat(cryptoOptions.getSrtp().getEnableEncryptedRtpHeaderExtensions()).isFalse();
+ assertThat(cryptoOptions.getSFrame().getRequireFrameEncryption()).isFalse();
+ }
+
+ @Test
+ public void testBuilderCorrectlyInitializingEncryptedRtpHeaderExtensions() {
+ CryptoOptions cryptoOptions =
+ CryptoOptions.builder().setEnableEncryptedRtpHeaderExtensions(true).createCryptoOptions();
+ assertThat(cryptoOptions.getSrtp().getEnableGcmCryptoSuites()).isFalse();
+ assertThat(cryptoOptions.getSrtp().getEnableAes128Sha1_32CryptoCipher()).isFalse();
+ assertThat(cryptoOptions.getSrtp().getEnableEncryptedRtpHeaderExtensions()).isTrue();
+ assertThat(cryptoOptions.getSFrame().getRequireFrameEncryption()).isFalse();
+ }
+
+ @Test
+ public void testBuilderCorrectlyInitializingRequireFrameEncryption() {
+ CryptoOptions cryptoOptions =
+ CryptoOptions.builder().setRequireFrameEncryption(true).createCryptoOptions();
+ assertThat(cryptoOptions.getSrtp().getEnableGcmCryptoSuites()).isFalse();
+ assertThat(cryptoOptions.getSrtp().getEnableAes128Sha1_32CryptoCipher()).isFalse();
+ assertThat(cryptoOptions.getSrtp().getEnableEncryptedRtpHeaderExtensions()).isFalse();
+ assertThat(cryptoOptions.getSFrame().getRequireFrameEncryption()).isTrue();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/FakeMediaCodecWrapper.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/FakeMediaCodecWrapper.java
new file mode 100644
index 0000000000..fb7aba4700
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/FakeMediaCodecWrapper.java
@@ -0,0 +1,321 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Fake MediaCodec that implements the basic state machine.
+ *
+ * @note This class is only intended for single-threaded tests and is not thread-safe.
+ */
+public class FakeMediaCodecWrapper implements MediaCodecWrapper {
+ private static final int NUM_INPUT_BUFFERS = 10;
+ private static final int NUM_OUTPUT_BUFFERS = 10;
+ private static final int MAX_ENCODED_DATA_SIZE_BYTES = 1_000;
+
+ /**
+ * MediaCodec state as defined by:
+ * https://developer.android.com/reference/android/media/MediaCodec.html
+ */
+ public enum State {
+ STOPPED_CONFIGURED(Primary.STOPPED),
+ STOPPED_UNINITIALIZED(Primary.STOPPED),
+ STOPPED_ERROR(Primary.STOPPED),
+ EXECUTING_FLUSHED(Primary.EXECUTING),
+ EXECUTING_RUNNING(Primary.EXECUTING),
+ EXECUTING_END_OF_STREAM(Primary.EXECUTING),
+ RELEASED(Primary.RELEASED);
+
+ public enum Primary { STOPPED, EXECUTING, RELEASED }
+
+ private final Primary primary;
+
+ State(Primary primary) {
+ this.primary = primary;
+ }
+
+ public Primary getPrimary() {
+ return primary;
+ }
+ }
+
+ /** Represents an output buffer that will be returned by dequeueOutputBuffer. */
+ public static class QueuedOutputBufferInfo {
+ private int index;
+ private int offset;
+ private int size;
+ private long presentationTimeUs;
+ private int flags;
+
+ private QueuedOutputBufferInfo(
+ int index, int offset, int size, long presentationTimeUs, int flags) {
+ this.index = index;
+ this.offset = offset;
+ this.size = size;
+ this.presentationTimeUs = presentationTimeUs;
+ this.flags = flags;
+ }
+
+ public static QueuedOutputBufferInfo create(
+ int index, int offset, int size, long presentationTimeUs, int flags) {
+ return new QueuedOutputBufferInfo(index, offset, size, presentationTimeUs, flags);
+ }
+
+ public int getIndex() {
+ return index;
+ }
+
+ public int getOffset() {
+ return offset;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ public long getPresentationTimeUs() {
+ return presentationTimeUs;
+ }
+
+ public int getFlags() {
+ return flags;
+ }
+ }
+
+ private State state = State.STOPPED_UNINITIALIZED;
+ private @Nullable MediaFormat configuredFormat;
+ private int configuredFlags;
+ private final MediaFormat inputFormat;
+ private final MediaFormat outputFormat;
+ private final ByteBuffer[] inputBuffers = new ByteBuffer[NUM_INPUT_BUFFERS];
+ private final ByteBuffer[] outputBuffers = new ByteBuffer[NUM_OUTPUT_BUFFERS];
+ private final boolean[] inputBufferReserved = new boolean[NUM_INPUT_BUFFERS];
+ private final boolean[] outputBufferReserved = new boolean[NUM_OUTPUT_BUFFERS];
+ private final List<QueuedOutputBufferInfo> queuedOutputBuffers = new ArrayList<>();
+
+ public FakeMediaCodecWrapper(MediaFormat inputFormat, MediaFormat outputFormat) {
+ this.inputFormat = inputFormat;
+ this.outputFormat = outputFormat;
+ }
+
+ /** Returns the current simulated state of MediaCodec. */
+ public State getState() {
+ return state;
+ }
+
+ /** Gets the last configured media format passed to configure. */
+ public @Nullable MediaFormat getConfiguredFormat() {
+ return configuredFormat;
+ }
+
+ /** Returns the last flags passed to configure. */
+ public int getConfiguredFlags() {
+ return configuredFlags;
+ }
+
+ /**
+ * Adds a texture buffer that will be returned by dequeueOutputBuffer. Returns index of the
+ * buffer.
+ */
+ public int addOutputTexture(long presentationTimestampUs, int flags) {
+ int index = getFreeOutputBuffer();
+ queuedOutputBuffers.add(QueuedOutputBufferInfo.create(
+ index, /* offset= */ 0, /* size= */ 0, presentationTimestampUs, flags));
+ return index;
+ }
+
+ /**
+ * Adds a byte buffer buffer that will be returned by dequeueOutputBuffer. Returns index of the
+ * buffer.
+ */
+ public int addOutputData(byte[] data, long presentationTimestampUs, int flags) {
+ int index = getFreeOutputBuffer();
+ ByteBuffer outputBuffer = outputBuffers[index];
+
+ outputBuffer.clear();
+ outputBuffer.put(data);
+ outputBuffer.rewind();
+
+ queuedOutputBuffers.add(QueuedOutputBufferInfo.create(
+ index, /* offset= */ 0, data.length, presentationTimestampUs, flags));
+ return index;
+ }
+
+ /**
+ * Returns the first output buffer that is not reserved and reserves it. It will be stay reserved
+ * until released with releaseOutputBuffer.
+ */
+ private int getFreeOutputBuffer() {
+ for (int i = 0; i < NUM_OUTPUT_BUFFERS; i++) {
+ if (!outputBufferReserved[i]) {
+ outputBufferReserved[i] = true;
+ return i;
+ }
+ }
+ throw new RuntimeException("All output buffers reserved!");
+ }
+
+ @Override
+ public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
+ if (state != State.STOPPED_UNINITIALIZED) {
+ throw new IllegalStateException("Expected state STOPPED_UNINITIALIZED but was " + state);
+ }
+ state = State.STOPPED_CONFIGURED;
+ configuredFormat = format;
+ configuredFlags = flags;
+
+ final int width = configuredFormat.getInteger(MediaFormat.KEY_WIDTH);
+ final int height = configuredFormat.getInteger(MediaFormat.KEY_HEIGHT);
+ final int yuvSize = width * height * 3 / 2;
+ final int inputBufferSize;
+ final int outputBufferSize;
+
+ if ((flags & MediaCodec.CONFIGURE_FLAG_ENCODE) != 0) {
+ final int colorFormat = configuredFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+
+ inputBufferSize = colorFormat == CodecCapabilities.COLOR_FormatSurface ? 0 : yuvSize;
+ outputBufferSize = MAX_ENCODED_DATA_SIZE_BYTES;
+ } else {
+ inputBufferSize = MAX_ENCODED_DATA_SIZE_BYTES;
+ outputBufferSize = surface != null ? 0 : yuvSize;
+ }
+
+ for (int i = 0; i < inputBuffers.length; i++) {
+ inputBuffers[i] = ByteBuffer.allocateDirect(inputBufferSize);
+ }
+ for (int i = 0; i < outputBuffers.length; i++) {
+ outputBuffers[i] = ByteBuffer.allocateDirect(outputBufferSize);
+ }
+ }
+
+ @Override
+ public void start() {
+ if (state != State.STOPPED_CONFIGURED) {
+ throw new IllegalStateException("Expected state STOPPED_CONFIGURED but was " + state);
+ }
+ state = State.EXECUTING_RUNNING;
+ }
+
+ @Override
+ public void flush() {
+ if (state.getPrimary() != State.Primary.EXECUTING) {
+ throw new IllegalStateException("Expected state EXECUTING but was " + state);
+ }
+ state = State.EXECUTING_FLUSHED;
+ }
+
+ @Override
+ public void stop() {
+ if (state.getPrimary() != State.Primary.EXECUTING) {
+ throw new IllegalStateException("Expected state EXECUTING but was " + state);
+ }
+ state = State.STOPPED_UNINITIALIZED;
+ }
+
+ @Override
+ public void release() {
+ state = State.RELEASED;
+ }
+
+ @Override
+ public int dequeueInputBuffer(long timeoutUs) {
+ if (state != State.EXECUTING_FLUSHED && state != State.EXECUTING_RUNNING) {
+ throw new IllegalStateException(
+ "Expected state EXECUTING_FLUSHED or EXECUTING_RUNNING but was " + state);
+ }
+ state = State.EXECUTING_RUNNING;
+
+ for (int i = 0; i < NUM_INPUT_BUFFERS; i++) {
+ if (!inputBufferReserved[i]) {
+ inputBufferReserved[i] = true;
+ return i;
+ }
+ }
+ return MediaCodec.INFO_TRY_AGAIN_LATER;
+ }
+
+ @Override
+ public void queueInputBuffer(
+ int index, int offset, int size, long presentationTimeUs, int flags) {
+ if (state.getPrimary() != State.Primary.EXECUTING) {
+ throw new IllegalStateException("Expected state EXECUTING but was " + state);
+ }
+ if (flags != 0) {
+ throw new UnsupportedOperationException(
+ "Flags are not implemented in FakeMediaCodecWrapper.");
+ }
+ }
+
+ @Override
+ public int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs) {
+ if (state.getPrimary() != State.Primary.EXECUTING) {
+ throw new IllegalStateException("Expected state EXECUTING but was " + state);
+ }
+
+ if (queuedOutputBuffers.isEmpty()) {
+ return MediaCodec.INFO_TRY_AGAIN_LATER;
+ }
+ QueuedOutputBufferInfo outputBufferInfo = queuedOutputBuffers.remove(/* index= */ 0);
+ info.set(outputBufferInfo.getOffset(), outputBufferInfo.getSize(),
+ outputBufferInfo.getPresentationTimeUs(), outputBufferInfo.getFlags());
+ return outputBufferInfo.getIndex();
+ }
+
+ @Override
+ public void releaseOutputBuffer(int index, boolean render) {
+ if (state.getPrimary() != State.Primary.EXECUTING) {
+ throw new IllegalStateException("Expected state EXECUTING but was " + state);
+ }
+ if (!outputBufferReserved[index]) {
+ throw new RuntimeException("Released output buffer was not in use.");
+ }
+ outputBufferReserved[index] = false;
+ }
+
+ @Override
+ public ByteBuffer getInputBuffer(int index) {
+ return inputBuffers[index];
+ }
+
+ @Override
+ public ByteBuffer getOutputBuffer(int index) {
+ return outputBuffers[index];
+ }
+
+ @Override
+ public MediaFormat getInputFormat() {
+ return inputFormat;
+ }
+
+ @Override
+ public MediaFormat getOutputFormat() {
+ return outputFormat;
+ }
+
+ @Override
+ public Surface createInputSurface() {
+ return new Surface(new SurfaceTexture(/* texName= */ 0));
+ }
+
+ @Override
+ public void setParameters(Bundle params) {}
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/FramerateBitrateAdjusterTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/FramerateBitrateAdjusterTest.java
new file mode 100644
index 0000000000..5fcf9c87d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/FramerateBitrateAdjusterTest.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2021 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import androidx.test.runner.AndroidJUnit4;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+import org.webrtc.VideoEncoder.ScalingSettings;
+
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class FramerateBitrateAdjusterTest {
+ @Test
+ public void getAdjustedFramerate_alwaysReturnsDefault() {
+ FramerateBitrateAdjuster bitrateAdjuster = new FramerateBitrateAdjuster();
+ bitrateAdjuster.setTargets(1000, 15);
+ assertThat(bitrateAdjuster.getAdjustedFramerateFps()).isEqualTo(30.0);
+ }
+
+ @Test
+ public void getAdjustedBitrate_defaultFramerate_returnsTargetBitrate() {
+ FramerateBitrateAdjuster bitrateAdjuster = new FramerateBitrateAdjuster();
+ bitrateAdjuster.setTargets(1000, 30);
+ assertThat(bitrateAdjuster.getAdjustedBitrateBps()).isEqualTo(1000);
+ }
+
+ @Test
+ public void getAdjustedBitrate_nonDefaultFramerate_returnsAdjustedBitrate() {
+ FramerateBitrateAdjuster bitrateAdjuster = new FramerateBitrateAdjuster();
+ bitrateAdjuster.setTargets(1000, 7.5);
+ // Target frame frame is x4 times smaller than the adjusted one (30fps). Adjusted bitrate should
+ // be x4 times larger then the target one.
+ assertThat(bitrateAdjuster.getAdjustedBitrateBps()).isEqualTo(4000);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/GlGenericDrawerTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/GlGenericDrawerTest.java
new file mode 100644
index 0000000000..fdb8f4bf08
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/GlGenericDrawerTest.java
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+
+import androidx.test.runner.AndroidJUnit4;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+import org.webrtc.GlShader;
+
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class GlGenericDrawerTest {
+ // Simplest possible valid generic fragment shader.
+ private static final String FRAGMENT_SHADER = "void main() {\n"
+ + " gl_FragColor = sample(tc);\n"
+ + "}\n";
+ private static final int TEXTURE_ID = 3;
+ private static final float[] TEX_MATRIX =
+ new float[] {1, 2, 3, 4, -1, -2, -3, -4, 0, 0, 1, 0, 0, 0, 0, 1};
+ private static final int FRAME_WIDTH = 640;
+ private static final int FRAME_HEIGHT = 480;
+ private static final int VIEWPORT_X = 3;
+ private static final int VIEWPORT_Y = 5;
+ private static final int VIEWPORT_WIDTH = 500;
+ private static final int VIEWPORT_HEIGHT = 500;
+
+ // Replace OpenGLES GlShader dependency with a mock.
+ private class GlGenericDrawerForTest extends GlGenericDrawer {
+ public GlGenericDrawerForTest(String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ super(genericFragmentSource, shaderCallbacks);
+ }
+
+ @Override
+ GlShader createShader(ShaderType shaderType) {
+ return mockedShader;
+ }
+ }
+
+ private GlShader mockedShader;
+ private GlGenericDrawer glGenericDrawer;
+ private GlGenericDrawer.ShaderCallbacks mockedCallbacks;
+
+ @Before
+ public void setUp() {
+ mockedShader = mock(GlShader.class);
+ mockedCallbacks = mock(GlGenericDrawer.ShaderCallbacks.class);
+ glGenericDrawer = new GlGenericDrawerForTest(FRAGMENT_SHADER, mockedCallbacks);
+ }
+
+ @After
+ public void tearDown() {
+ verifyNoMoreInteractions(mockedCallbacks);
+ }
+
+ @Test
+ public void testOesFragmentShader() {
+ final String expectedOesFragmentShader = "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 tc;\n"
+ + "uniform samplerExternalOES tex;\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(tex, tc);\n"
+ + "}\n";
+ final String oesFragmentShader =
+ GlGenericDrawer.createFragmentShaderString(FRAGMENT_SHADER, GlGenericDrawer.ShaderType.OES);
+ assertEquals(expectedOesFragmentShader, oesFragmentShader);
+ }
+
+ @Test
+ public void testRgbFragmentShader() {
+ final String expectedRgbFragmentShader = "precision mediump float;\n"
+ + "varying vec2 tc;\n"
+ + "uniform sampler2D tex;\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(tex, tc);\n"
+ + "}\n";
+ final String rgbFragmentShader =
+ GlGenericDrawer.createFragmentShaderString(FRAGMENT_SHADER, GlGenericDrawer.ShaderType.RGB);
+ assertEquals(expectedRgbFragmentShader, rgbFragmentShader);
+ }
+
+ @Test
+ public void testYuvFragmentShader() {
+ final String expectedYuvFragmentShader = "precision mediump float;\n"
+ + "varying vec2 tc;\n"
+ + "uniform sampler2D y_tex;\n"
+ + "uniform sampler2D u_tex;\n"
+ + "uniform sampler2D v_tex;\n"
+ + "vec4 sample(vec2 p) {\n"
+ + " float y = texture2D(y_tex, p).r * 1.16438;\n"
+ + " float u = texture2D(u_tex, p).r;\n"
+ + " float v = texture2D(v_tex, p).r;\n"
+ + " return vec4(y + 1.59603 * v - 0.874202,\n"
+ + " y - 0.391762 * u - 0.812968 * v + 0.531668,\n"
+ + " y + 2.01723 * u - 1.08563, 1);\n"
+ + "}\n"
+ + "void main() {\n"
+ + " gl_FragColor = sample(tc);\n"
+ + "}\n";
+ final String yuvFragmentShader =
+ GlGenericDrawer.createFragmentShaderString(FRAGMENT_SHADER, GlGenericDrawer.ShaderType.YUV);
+ assertEquals(expectedYuvFragmentShader, yuvFragmentShader);
+ }
+
+ @Test
+ public void testShaderCallbacksOneRgbFrame() {
+ glGenericDrawer.drawRgb(TEXTURE_ID, TEX_MATRIX, FRAME_WIDTH, FRAME_HEIGHT, VIEWPORT_X,
+ VIEWPORT_Y, VIEWPORT_WIDTH, VIEWPORT_HEIGHT);
+
+ verify(mockedCallbacks).onNewShader(mockedShader);
+ verify(mockedCallbacks)
+ .onPrepareShader(
+ mockedShader, TEX_MATRIX, FRAME_WIDTH, FRAME_HEIGHT, VIEWPORT_WIDTH, VIEWPORT_HEIGHT);
+ }
+
+ @Test
+ public void testShaderCallbacksTwoRgbFrames() {
+ glGenericDrawer.drawRgb(TEXTURE_ID, TEX_MATRIX, FRAME_WIDTH, FRAME_HEIGHT, VIEWPORT_X,
+ VIEWPORT_Y, VIEWPORT_WIDTH, VIEWPORT_HEIGHT);
+ glGenericDrawer.drawRgb(TEXTURE_ID, TEX_MATRIX, FRAME_WIDTH, FRAME_HEIGHT, VIEWPORT_X,
+ VIEWPORT_Y, VIEWPORT_WIDTH, VIEWPORT_HEIGHT);
+
+ // Expect only one shader to be created, but two frames to be drawn.
+ verify(mockedCallbacks, times(1)).onNewShader(mockedShader);
+ verify(mockedCallbacks, times(2))
+ .onPrepareShader(
+ mockedShader, TEX_MATRIX, FRAME_WIDTH, FRAME_HEIGHT, VIEWPORT_WIDTH, VIEWPORT_HEIGHT);
+ }
+
+ @Test
+ public void testShaderCallbacksChangingShaderType() {
+ glGenericDrawer.drawRgb(TEXTURE_ID, TEX_MATRIX, FRAME_WIDTH, FRAME_HEIGHT, VIEWPORT_X,
+ VIEWPORT_Y, VIEWPORT_WIDTH, VIEWPORT_HEIGHT);
+ glGenericDrawer.drawOes(TEXTURE_ID, TEX_MATRIX, FRAME_WIDTH, FRAME_HEIGHT, VIEWPORT_X,
+ VIEWPORT_Y, VIEWPORT_WIDTH, VIEWPORT_HEIGHT);
+
+ // Expect two shaders to be created, and two frames to be drawn.
+ verify(mockedCallbacks, times(2)).onNewShader(mockedShader);
+ verify(mockedCallbacks, times(2))
+ .onPrepareShader(
+ mockedShader, TEX_MATRIX, FRAME_WIDTH, FRAME_HEIGHT, VIEWPORT_WIDTH, VIEWPORT_HEIGHT);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java
new file mode 100644
index 0000000000..bd4a642f00
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java
@@ -0,0 +1,370 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+import static java.util.concurrent.TimeUnit.SECONDS;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import androidx.test.runner.AndroidJUnit4;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.robolectric.annotation.Config;
+import org.webrtc.EncodedImage;
+import org.webrtc.EncodedImage.FrameType;
+import org.webrtc.FakeMediaCodecWrapper.State;
+import org.webrtc.VideoCodecStatus;
+import org.webrtc.VideoEncoder;
+import org.webrtc.VideoEncoder.BitrateAllocation;
+import org.webrtc.VideoEncoder.CodecSpecificInfo;
+import org.webrtc.VideoEncoder.EncodeInfo;
+import org.webrtc.VideoEncoder.Settings;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoFrame.Buffer;
+import org.webrtc.VideoFrame.I420Buffer;
+
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class HardwareVideoEncoderTest {
+ private static final VideoEncoder.Settings TEST_ENCODER_SETTINGS = new Settings(
+ /* numberOfCores= */ 1,
+ /* width= */ 640,
+ /* height= */ 480,
+ /* startBitrate= */ 10000,
+ /* maxFramerate= */ 30,
+ /* numberOfSimulcastStreams= */ 1,
+ /* automaticResizeOn= */ true,
+ /* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */));
+ private static final long POLL_DELAY_MS = 10;
+ private static final long DELIVER_ENCODED_IMAGE_DELAY_MS = 10;
+ private static final EncodeInfo ENCODE_INFO_KEY_FRAME =
+ new EncodeInfo(new FrameType[] {FrameType.VideoFrameKey});
+ private static final EncodeInfo ENCODE_INFO_DELTA_FRAME =
+ new EncodeInfo(new FrameType[] {FrameType.VideoFrameDelta});
+
+ private static class TestEncoder extends HardwareVideoEncoder {
+ private final Object deliverEncodedImageLock = new Object();
+ private boolean deliverEncodedImageDone = true;
+
+ TestEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
+ Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
+ BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
+ super(mediaCodecWrapperFactory, codecName, codecType, surfaceColorFormat, yuvColorFormat,
+ params, keyFrameIntervalSec, forceKeyFrameIntervalMs, bitrateAdjuster, sharedContext);
+ }
+
+ public void waitDeliverEncodedImage() throws InterruptedException {
+ synchronized (deliverEncodedImageLock) {
+ deliverEncodedImageDone = false;
+ deliverEncodedImageLock.notifyAll();
+ while (!deliverEncodedImageDone) {
+ deliverEncodedImageLock.wait();
+ }
+ }
+ }
+
+ @SuppressWarnings("WaitNotInLoop") // This method is called inside a loop.
+ @Override
+ protected void deliverEncodedImage() {
+ synchronized (deliverEncodedImageLock) {
+ if (deliverEncodedImageDone) {
+ try {
+ deliverEncodedImageLock.wait(DELIVER_ENCODED_IMAGE_DELAY_MS);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ return;
+ }
+ }
+ if (deliverEncodedImageDone) {
+ return;
+ }
+ super.deliverEncodedImage();
+ deliverEncodedImageDone = true;
+ deliverEncodedImageLock.notifyAll();
+ }
+ }
+
+ @Override
+ protected void fillInputBuffer(ByteBuffer buffer, Buffer videoFrameBuffer) {
+ I420Buffer i420Buffer = videoFrameBuffer.toI420();
+ buffer.put(i420Buffer.getDataY());
+ buffer.put(i420Buffer.getDataU());
+ buffer.put(i420Buffer.getDataV());
+ buffer.flip();
+ i420Buffer.release();
+ }
+ }
+
+ private class TestEncoderBuilder {
+ private VideoCodecMimeType codecType = VideoCodecMimeType.VP8;
+ private BitrateAdjuster bitrateAdjuster = new BaseBitrateAdjuster();
+
+ public TestEncoderBuilder setCodecType(VideoCodecMimeType codecType) {
+ this.codecType = codecType;
+ return this;
+ }
+
+ public TestEncoderBuilder setBitrateAdjuster(BitrateAdjuster bitrateAdjuster) {
+ this.bitrateAdjuster = bitrateAdjuster;
+ return this;
+ }
+
+ public TestEncoder build() {
+ return new TestEncoder((String name)
+ -> fakeMediaCodecWrapper,
+ "org.webrtc.testencoder", codecType,
+ /* surfaceColorFormat= */ null,
+ /* yuvColorFormat= */ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
+ /* params= */ new HashMap<>(),
+ /* keyFrameIntervalSec= */ 0,
+ /* forceKeyFrameIntervalMs= */ 0, bitrateAdjuster,
+ /* sharedContext= */ null);
+ }
+ }
+
+ private VideoFrame createTestVideoFrame(long timestampNs) {
+ byte[] i420 = CodecTestHelper.generateRandomData(
+ TEST_ENCODER_SETTINGS.width * TEST_ENCODER_SETTINGS.height * 3 / 2);
+ final VideoFrame.I420Buffer testBuffer =
+ CodecTestHelper.wrapI420(TEST_ENCODER_SETTINGS.width, TEST_ENCODER_SETTINGS.height, i420);
+ return new VideoFrame(testBuffer, /* rotation= */ 0, timestampNs);
+ }
+
+ @Mock VideoEncoder.Callback mockEncoderCallback;
+ private FakeMediaCodecWrapper fakeMediaCodecWrapper;
+
+ @Before
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
+ MediaFormat inputFormat = new MediaFormat();
+ MediaFormat outputFormat = new MediaFormat();
+ // TODO(sakal): Add more details to output format as needed.
+ fakeMediaCodecWrapper = spy(new FakeMediaCodecWrapper(inputFormat, outputFormat));
+ }
+
+ @Test
+ public void testInit() {
+ // Set-up.
+ HardwareVideoEncoder encoder =
+ new TestEncoderBuilder().setCodecType(VideoCodecMimeType.VP8).build();
+
+ // Test.
+ assertThat(encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback))
+ .isEqualTo(VideoCodecStatus.OK);
+
+ // Verify.
+ assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.EXECUTING_RUNNING);
+
+ MediaFormat mediaFormat = fakeMediaCodecWrapper.getConfiguredFormat();
+ assertThat(mediaFormat).isNotNull();
+ assertThat(mediaFormat.getInteger(MediaFormat.KEY_WIDTH))
+ .isEqualTo(TEST_ENCODER_SETTINGS.width);
+ assertThat(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
+ .isEqualTo(TEST_ENCODER_SETTINGS.height);
+ assertThat(mediaFormat.getString(MediaFormat.KEY_MIME))
+ .isEqualTo(VideoCodecMimeType.VP8.mimeType());
+
+ assertThat(fakeMediaCodecWrapper.getConfiguredFlags())
+ .isEqualTo(MediaCodec.CONFIGURE_FLAG_ENCODE);
+ }
+
+ @Test
+ public void testEncodeByteBuffer() {
+ // Set-up.
+ HardwareVideoEncoder encoder = new TestEncoderBuilder().build();
+ encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
+
+ // Test.
+ byte[] i420 = CodecTestHelper.generateRandomData(
+ TEST_ENCODER_SETTINGS.width * TEST_ENCODER_SETTINGS.height * 3 / 2);
+ final VideoFrame.I420Buffer testBuffer =
+ CodecTestHelper.wrapI420(TEST_ENCODER_SETTINGS.width, TEST_ENCODER_SETTINGS.height, i420);
+ final VideoFrame testFrame =
+ new VideoFrame(testBuffer, /* rotation= */ 0, /* timestampNs= */ 0);
+ assertThat(encoder.encode(testFrame, new EncodeInfo(new FrameType[] {FrameType.VideoFrameKey})))
+ .isEqualTo(VideoCodecStatus.OK);
+
+ // Verify.
+ ArgumentCaptor<Integer> indexCaptor = ArgumentCaptor.forClass(Integer.class);
+ ArgumentCaptor<Integer> offsetCaptor = ArgumentCaptor.forClass(Integer.class);
+ ArgumentCaptor<Integer> sizeCaptor = ArgumentCaptor.forClass(Integer.class);
+ verify(fakeMediaCodecWrapper)
+ .queueInputBuffer(indexCaptor.capture(), offsetCaptor.capture(), sizeCaptor.capture(),
+ anyLong(), anyInt());
+ ByteBuffer buffer = fakeMediaCodecWrapper.getInputBuffer(indexCaptor.getValue());
+ CodecTestHelper.assertEqualContents(
+ i420, buffer, offsetCaptor.getValue(), sizeCaptor.getValue());
+ }
+
+ @Test
+ public void testDeliversOutputData() throws InterruptedException {
+ // Set-up.
+ TestEncoder encoder = new TestEncoderBuilder().build();
+ encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
+ encoder.encode(createTestVideoFrame(/* timestampNs= */ 42), ENCODE_INFO_KEY_FRAME);
+
+ // Test.
+ byte[] outputData = CodecTestHelper.generateRandomData(100);
+ fakeMediaCodecWrapper.addOutputData(outputData,
+ /* presentationTimestampUs= */ 0,
+ /* flags= */ MediaCodec.BUFFER_FLAG_SYNC_FRAME);
+
+ encoder.waitDeliverEncodedImage();
+
+ // Verify.
+ ArgumentCaptor<EncodedImage> videoFrameCaptor = ArgumentCaptor.forClass(EncodedImage.class);
+ verify(mockEncoderCallback)
+ .onEncodedFrame(videoFrameCaptor.capture(), any(CodecSpecificInfo.class));
+
+ EncodedImage videoFrame = videoFrameCaptor.getValue();
+ assertThat(videoFrame).isNotNull();
+ assertThat(videoFrame.encodedWidth).isEqualTo(TEST_ENCODER_SETTINGS.width);
+ assertThat(videoFrame.encodedHeight).isEqualTo(TEST_ENCODER_SETTINGS.height);
+ assertThat(videoFrame.rotation).isEqualTo(0);
+ assertThat(videoFrame.captureTimeNs).isEqualTo(42);
+ assertThat(videoFrame.frameType).isEqualTo(FrameType.VideoFrameKey);
+ CodecTestHelper.assertEqualContents(
+ outputData, videoFrame.buffer, /* offset= */ 0, videoFrame.buffer.capacity());
+ }
+
+ @Test
+ public void testRelease() {
+ // Set-up.
+ HardwareVideoEncoder encoder = new TestEncoderBuilder().build();
+ encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
+
+ // Test.
+ assertThat(encoder.release()).isEqualTo(VideoCodecStatus.OK);
+
+ // Verify.
+ assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
+ }
+
+ @Test
+ public void testReleaseMultipleTimes() {
+ // Set-up.
+ HardwareVideoEncoder encoder = new TestEncoderBuilder().build();
+ encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
+
+ // Test.
+ assertThat(encoder.release()).isEqualTo(VideoCodecStatus.OK);
+ assertThat(encoder.release()).isEqualTo(VideoCodecStatus.OK);
+
+ // Verify.
+ assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
+ }
+
+ @Test
+ public void testFramerateWithFramerateBitrateAdjuster() {
+ // Enable FramerateBitrateAdjuster and initialize encoder with frame rate 15fps. Vefity that our
+ // initial frame rate setting is ignored and media encoder is initialized with 30fps
+ // (FramerateBitrateAdjuster default).
+ HardwareVideoEncoder encoder =
+ new TestEncoderBuilder().setBitrateAdjuster(new FramerateBitrateAdjuster()).build();
+ encoder.initEncode(
+ new Settings(
+ /* numberOfCores= */ 1,
+ /* width= */ 640,
+ /* height= */ 480,
+ /* startBitrate= */ 10000,
+ /* maxFramerate= */ 15,
+ /* numberOfSimulcastStreams= */ 1,
+ /* automaticResizeOn= */ true,
+ /* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */)),
+ mockEncoderCallback);
+
+ MediaFormat mediaFormat = fakeMediaCodecWrapper.getConfiguredFormat();
+ assertThat(mediaFormat.getFloat(MediaFormat.KEY_FRAME_RATE)).isEqualTo(30.0f);
+ }
+
+ @Test
+ public void testBitrateWithFramerateBitrateAdjuster() throws InterruptedException {
+ // Enable FramerateBitrateAdjuster and change frame rate while encoding video. Verify that
+ // bitrate setting passed to media encoder is adjusted to compensate for changes in frame rate.
+ TestEncoder encoder =
+ new TestEncoderBuilder().setBitrateAdjuster(new FramerateBitrateAdjuster()).build();
+ encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
+
+ encoder.encode(createTestVideoFrame(/* timestampNs= */ 0), ENCODE_INFO_KEY_FRAME);
+
+ // Reduce frame rate by half.
+ BitrateAllocation bitrateAllocation = new BitrateAllocation(
+ /* bitratesBbs= */ new int[][] {new int[] {TEST_ENCODER_SETTINGS.startBitrate}});
+ encoder.setRateAllocation(bitrateAllocation, TEST_ENCODER_SETTINGS.maxFramerate / 2);
+
+ // Generate output to trigger bitrate update in encoder wrapper.
+ fakeMediaCodecWrapper.addOutputData(
+ CodecTestHelper.generateRandomData(100), /* presentationTimestampUs= */ 0, /* flags= */ 0);
+ encoder.waitDeliverEncodedImage();
+
+ // Frame rate has been reduced by half. Verify that bitrate doubled.
+ ArgumentCaptor<Bundle> bundleCaptor = ArgumentCaptor.forClass(Bundle.class);
+ verify(fakeMediaCodecWrapper, times(2)).setParameters(bundleCaptor.capture());
+ Bundle params = bundleCaptor.getAllValues().get(1);
+ assertThat(params.containsKey(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE)).isTrue();
+ assertThat(params.getInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE))
+ .isEqualTo(TEST_ENCODER_SETTINGS.startBitrate * 2);
+ }
+
+ @Test
+ public void testTimestampsWithFramerateBitrateAdjuster() throws InterruptedException {
+ // Enable FramerateBitrateAdjuster and change frame rate while encoding video. Verify that
+ // encoder ignores changes in frame rate and calculates frame timestamps based on fixed frame
+ // rate 30fps.
+ TestEncoder encoder =
+ new TestEncoderBuilder().setBitrateAdjuster(new FramerateBitrateAdjuster()).build();
+ encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
+
+ encoder.encode(createTestVideoFrame(/* timestampNs= */ 0), ENCODE_INFO_KEY_FRAME);
+
+ // Reduce frametate by half.
+ BitrateAllocation bitrateAllocation = new BitrateAllocation(
+ /* bitratesBbs= */ new int[][] {new int[] {TEST_ENCODER_SETTINGS.startBitrate}});
+ encoder.setRateAllocation(bitrateAllocation, TEST_ENCODER_SETTINGS.maxFramerate / 2);
+
+ // Encoder is allowed to buffer up to 2 frames. Generate output to avoid frame dropping.
+ fakeMediaCodecWrapper.addOutputData(
+ CodecTestHelper.generateRandomData(100), /* presentationTimestampUs= */ 0, /* flags= */ 0);
+ encoder.waitDeliverEncodedImage();
+
+ encoder.encode(createTestVideoFrame(/* timestampNs= */ 1), ENCODE_INFO_DELTA_FRAME);
+ encoder.encode(createTestVideoFrame(/* timestampNs= */ 2), ENCODE_INFO_DELTA_FRAME);
+
+ ArgumentCaptor<Long> timestampCaptor = ArgumentCaptor.forClass(Long.class);
+ verify(fakeMediaCodecWrapper, times(3))
+ .queueInputBuffer(
+ /* index= */ anyInt(),
+ /* offset= */ anyInt(),
+ /* size= */ anyInt(), timestampCaptor.capture(),
+ /* flags= */ anyInt());
+
+ long frameDurationMs = SECONDS.toMicros(1) / 30;
+ assertThat(timestampCaptor.getAllValues())
+ .containsExactly(0L, frameDurationMs, 2 * frameDurationMs);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/IceCandidateTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/IceCandidateTest.java
new file mode 100644
index 0000000000..437e146415
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/IceCandidateTest.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2019 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import androidx.test.runner.AndroidJUnit4;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+import org.webrtc.IceCandidate;
+
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class IceCandidateTest {
+ @Test
+ public void testIceCandidateEquals() {
+ IceCandidate c1 = new IceCandidate(
+ "audio", 0, "candidate:1532086002 1 udp 2122194687 192.168.86.144 37138 typ host");
+ IceCandidate c2 = new IceCandidate(
+ "audio", 0, "candidate:1532086002 1 udp 2122194687 192.168.86.144 37138 typ host");
+
+ // c3 differ by sdpMid
+ IceCandidate c3 = new IceCandidate(
+ "video", 0, "candidate:1532086002 1 udp 2122194687 192.168.86.144 37138 typ host");
+ // c4 differ by sdpMLineIndex
+ IceCandidate c4 = new IceCandidate(
+ "audio", 1, "candidate:1532086002 1 udp 2122194687 192.168.86.144 37138 typ host");
+ // c5 differ by sdp.
+ IceCandidate c5 = new IceCandidate(
+ "audio", 0, "candidate:1532086002 1 udp 2122194687 192.168.86.144 37139 typ host");
+
+ assertThat(c1.equals(c2)).isTrue();
+ assertThat(c2.equals(c1)).isTrue();
+ assertThat(c1.equals(null)).isFalse();
+ assertThat(c1.equals(c3)).isFalse();
+ assertThat(c1.equals(c4)).isFalse();
+ assertThat(c5.equals(c1)).isFalse();
+
+ Object o2 = c2;
+ assertThat(c1.equals(o2)).isTrue();
+ assertThat(o2.equals(c1)).isTrue();
+ }
+} \ No newline at end of file
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/RefCountDelegateTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/RefCountDelegateTest.java
new file mode 100644
index 0000000000..eafd722a17
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/RefCountDelegateTest.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+
+import androidx.test.runner.AndroidJUnit4;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.robolectric.annotation.Config;
+
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class RefCountDelegateTest {
+ @Mock Runnable mockReleaseCallback;
+ private RefCountDelegate refCountDelegate;
+
+ @Before
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
+
+ refCountDelegate = new RefCountDelegate(mockReleaseCallback);
+ }
+
+ @Test
+ public void testReleaseRunsReleaseCallback() {
+ refCountDelegate.release();
+ verify(mockReleaseCallback).run();
+ }
+
+ @Test
+ public void testRetainIncreasesRefCount() {
+ refCountDelegate.retain();
+
+ refCountDelegate.release();
+ verify(mockReleaseCallback, never()).run();
+
+ refCountDelegate.release();
+ verify(mockReleaseCallback).run();
+ }
+
+ @Test(expected = IllegalStateException.class)
+ public void testReleaseAfterFreeThrowsIllegalStateException() {
+ refCountDelegate.release();
+ refCountDelegate.release();
+ }
+
+ @Test(expected = IllegalStateException.class)
+ public void testRetainAfterFreeThrowsIllegalStateException() {
+ refCountDelegate.release();
+ refCountDelegate.retain();
+ }
+
+ @Test
+ public void testSafeRetainBeforeFreeReturnsTrueAndIncreasesRefCount() {
+ assertThat(refCountDelegate.safeRetain()).isTrue();
+
+ refCountDelegate.release();
+ verify(mockReleaseCallback, never()).run();
+
+ refCountDelegate.release();
+ verify(mockReleaseCallback).run();
+ }
+
+ @Test
+ public void testSafeRetainAfterFreeReturnsFalse() {
+ refCountDelegate.release();
+ assertThat(refCountDelegate.safeRetain()).isFalse();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/ScalingSettingsTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/ScalingSettingsTest.java
new file mode 100644
index 0000000000..18db61d2dc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/ScalingSettingsTest.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+
+import androidx.test.runner.AndroidJUnit4;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+import org.webrtc.VideoEncoder.ScalingSettings;
+
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE)
+public class ScalingSettingsTest {
+ @Test
+ public void testToString() {
+ assertEquals("[ 1, 2 ]", new ScalingSettings(1, 2).toString());
+ assertEquals("OFF", ScalingSettings.OFF.toString());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/audio/LowLatencyAudioBufferManagerTest.java b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/audio/LowLatencyAudioBufferManagerTest.java
new file mode 100644
index 0000000000..2575cea60e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/tests/src/org/webrtc/audio/LowLatencyAudioBufferManagerTest.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import static org.junit.Assert.assertTrue;
+import static org.mockito.AdditionalMatchers.gt;
+import static org.mockito.AdditionalMatchers.lt;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.media.AudioTrack;
+import android.os.Build;
+import androidx.test.runner.AndroidJUnit4;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.robolectric.annotation.Config;
+import org.webrtc.audio.LowLatencyAudioBufferManager;
+
+/**
+ * Tests for LowLatencyAudioBufferManager.
+ */
+@RunWith(AndroidJUnit4.class)
+@Config(manifest = Config.NONE, sdk = Build.VERSION_CODES.O)
+public class LowLatencyAudioBufferManagerTest {
+ @Mock private AudioTrack mockAudioTrack;
+ private LowLatencyAudioBufferManager bufferManager;
+
+ @Before
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
+ bufferManager = new LowLatencyAudioBufferManager();
+ }
+
+ @Test
+ public void testBufferSizeDecrease() {
+ when(mockAudioTrack.getUnderrunCount()).thenReturn(0);
+ when(mockAudioTrack.getBufferSizeInFrames()).thenReturn(100);
+ when(mockAudioTrack.getPlaybackRate()).thenReturn(1000);
+ for (int i = 0; i < 9; i++) {
+ bufferManager.maybeAdjustBufferSize(mockAudioTrack);
+ }
+ // Check that the buffer size was not changed yet.
+ verify(mockAudioTrack, times(0)).setBufferSizeInFrames(anyInt());
+ // After the 10th call without underruns, we expect the buffer size to decrease.
+ bufferManager.maybeAdjustBufferSize(mockAudioTrack);
+ // The expected size is 10ms below the existing size, which works out to 100 - (1000 / 100)
+ // = 90.
+ verify(mockAudioTrack, times(1)).setBufferSizeInFrames(90);
+ }
+
+ @Test
+ public void testBufferSizeNeverBelow10ms() {
+ when(mockAudioTrack.getUnderrunCount()).thenReturn(0);
+ when(mockAudioTrack.getBufferSizeInFrames()).thenReturn(11);
+ when(mockAudioTrack.getPlaybackRate()).thenReturn(1000);
+ for (int i = 0; i < 10; i++) {
+ bufferManager.maybeAdjustBufferSize(mockAudioTrack);
+ }
+ // Check that the buffer size was not set to a value below 10 ms.
+ verify(mockAudioTrack, times(0)).setBufferSizeInFrames(lt(10));
+ }
+
+ @Test
+ public void testUnderrunBehavior() {
+ when(mockAudioTrack.getUnderrunCount()).thenReturn(1);
+ when(mockAudioTrack.getBufferSizeInFrames()).thenReturn(100);
+ when(mockAudioTrack.getPlaybackRate()).thenReturn(1000);
+ bufferManager.maybeAdjustBufferSize(mockAudioTrack);
+ // Check that the buffer size was increased after the underrrun.
+ verify(mockAudioTrack, times(1)).setBufferSizeInFrames(gt(100));
+ when(mockAudioTrack.getUnderrunCount()).thenReturn(0);
+ for (int i = 0; i < 10; i++) {
+ bufferManager.maybeAdjustBufferSize(mockAudioTrack);
+ }
+ // Check that the buffer size was not changed again, even though there were no underruns for
+ // 10 calls.
+ verify(mockAudioTrack, times(1)).setBufferSizeInFrames(anyInt());
+ }
+
+ @Test
+ public void testBufferIncrease() {
+ when(mockAudioTrack.getBufferSizeInFrames()).thenReturn(100);
+ when(mockAudioTrack.getPlaybackRate()).thenReturn(1000);
+ for (int i = 1; i < 30; i++) {
+ when(mockAudioTrack.getUnderrunCount()).thenReturn(i);
+ bufferManager.maybeAdjustBufferSize(mockAudioTrack);
+ }
+ // Check that the buffer size was not increased more than 5 times.
+ verify(mockAudioTrack, times(5)).setBufferSizeInFrames(gt(100));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/base_objc_gn/moz.build b/third_party/libwebrtc/sdk/base_objc_gn/moz.build
new file mode 100644
index 0000000000..95464447d3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/base_objc_gn/moz.build
@@ -0,0 +1,74 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+CMFLAGS += [
+ "-fobjc-arc"
+]
+
+CMMFLAGS += [
+ "-fobjc-arc"
+]
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_AVX2"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MAC"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_POSIX"] = True
+DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+DEFINES["__STDC_CONSTANT_MACROS"] = True
+DEFINES["__STDC_FORMAT_MACROS"] = True
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/sdk/objc/",
+ "/third_party/libwebrtc/sdk/objc/base/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m",
+ "/third_party/libwebrtc/sdk/objc/base/RTCLogging.mm",
+ "/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m",
+ "/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m",
+ "/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m",
+ "/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m",
+ "/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+Library("base_objc_gn")
diff --git a/third_party/libwebrtc/sdk/helpers_objc_gn/moz.build b/third_party/libwebrtc/sdk/helpers_objc_gn/moz.build
new file mode 100644
index 0000000000..234fe71d1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/helpers_objc_gn/moz.build
@@ -0,0 +1,70 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+CMFLAGS += [
+ "-fobjc-arc"
+]
+
+CMMFLAGS += [
+ "-fobjc-arc"
+]
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_AVX2"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MAC"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_POSIX"] = True
+DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+DEFINES["__STDC_CONSTANT_MACROS"] = True
+DEFINES["__STDC_FORMAT_MACROS"] = True
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/sdk/objc/",
+ "/third_party/libwebrtc/sdk/objc/base/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm",
+ "/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm",
+ "/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+Library("helpers_objc_gn")
diff --git a/third_party/libwebrtc/sdk/media_constraints.cc b/third_party/libwebrtc/sdk/media_constraints.cc
new file mode 100644
index 0000000000..c77bf88929
--- /dev/null
+++ b/third_party/libwebrtc/sdk/media_constraints.cc
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/media_constraints.h"
+
+#include "absl/types/optional.h"
+#include "api/peer_connection_interface.h"
+
+namespace webrtc {
+namespace {
+
+// Find the highest-priority instance of the T-valued constraint named by
+// `key` and return its value as `value`. `constraints` can be null.
+// If `mandatory_constraints` is non-null, it is incremented if the key appears
+// among the mandatory constraints.
+// Returns true if the key was found and has a valid value for type T.
+// If the key appears multiple times as an optional constraint, appearances
+// after the first are ignored.
+// Note: Because this uses FindFirst, repeated optional constraints whose
+// first instance has an unrecognized value are not handled precisely in
+// accordance with the specification.
+template <typename T>
+bool FindConstraint(const MediaConstraints* constraints,
+ const std::string& key,
+ T* value,
+ size_t* mandatory_constraints) {
+ std::string string_value;
+ if (!FindConstraint(constraints, key, &string_value, mandatory_constraints)) {
+ return false;
+ }
+ return rtc::FromString(string_value, value);
+}
+
+// Specialization for std::string, since a string doesn't need conversion.
+template <>
+bool FindConstraint(const MediaConstraints* constraints,
+ const std::string& key,
+ std::string* value,
+ size_t* mandatory_constraints) {
+ if (!constraints) {
+ return false;
+ }
+ if (constraints->GetMandatory().FindFirst(key, value)) {
+ if (mandatory_constraints) {
+ ++*mandatory_constraints;
+ }
+ return true;
+ }
+ if (constraints->GetOptional().FindFirst(key, value)) {
+ return true;
+ }
+ return false;
+}
+
+bool FindConstraint(const MediaConstraints* constraints,
+ const std::string& key,
+ bool* value,
+ size_t* mandatory_constraints) {
+ return FindConstraint<bool>(constraints, key, value, mandatory_constraints);
+}
+
+bool FindConstraint(const MediaConstraints* constraints,
+ const std::string& key,
+ int* value,
+ size_t* mandatory_constraints) {
+ return FindConstraint<int>(constraints, key, value, mandatory_constraints);
+}
+
+// Converts a constraint (mandatory takes precedence over optional) to an
+// absl::optional.
+template <typename T>
+void ConstraintToOptional(const MediaConstraints* constraints,
+ const std::string& key,
+ absl::optional<T>* value_out) {
+ T value;
+ bool present = FindConstraint<T>(constraints, key, &value, nullptr);
+ if (present) {
+ *value_out = value;
+ }
+}
+} // namespace
+
+const char MediaConstraints::kValueTrue[] = "true";
+const char MediaConstraints::kValueFalse[] = "false";
+
+// Constraints declared as static members in mediastreaminterface.h
+
+// Audio constraints.
+const char MediaConstraints::kGoogEchoCancellation[] = "googEchoCancellation";
+const char MediaConstraints::kAutoGainControl[] = "googAutoGainControl";
+const char MediaConstraints::kNoiseSuppression[] = "googNoiseSuppression";
+const char MediaConstraints::kHighpassFilter[] = "googHighpassFilter";
+const char MediaConstraints::kAudioMirroring[] = "googAudioMirroring";
+const char MediaConstraints::kAudioNetworkAdaptorConfig[] =
+ "googAudioNetworkAdaptorConfig";
+const char MediaConstraints::kInitAudioRecordingOnSend[] =
+ "InitAudioRecordingOnSend";
+
+// Constraint keys for CreateOffer / CreateAnswer defined in W3C specification.
+const char MediaConstraints::kOfferToReceiveAudio[] = "OfferToReceiveAudio";
+const char MediaConstraints::kOfferToReceiveVideo[] = "OfferToReceiveVideo";
+const char MediaConstraints::kVoiceActivityDetection[] =
+ "VoiceActivityDetection";
+const char MediaConstraints::kIceRestart[] = "IceRestart";
+// Google specific constraint for BUNDLE enable/disable.
+const char MediaConstraints::kUseRtpMux[] = "googUseRtpMUX";
+
+// Below constraints should be used during PeerConnection construction.
+// Google-specific constraint keys.
+const char MediaConstraints::kEnableDscp[] = "googDscp";
+const char MediaConstraints::kEnableIPv6[] = "googIPv6";
+const char MediaConstraints::kEnableVideoSuspendBelowMinBitrate[] =
+ "googSuspendBelowMinBitrate";
+const char MediaConstraints::kCombinedAudioVideoBwe[] =
+ "googCombinedAudioVideoBwe";
+const char MediaConstraints::kScreencastMinBitrate[] =
+ "googScreencastMinBitrate";
+// TODO(ronghuawu): Remove once cpu overuse detection is stable.
+const char MediaConstraints::kCpuOveruseDetection[] = "googCpuOveruseDetection";
+
+const char MediaConstraints::kRawPacketizationForVideoEnabled[] =
+ "googRawPacketizationForVideoEnabled";
+
+const char MediaConstraints::kNumSimulcastLayers[] = "googNumSimulcastLayers";
+
+// Set `value` to the value associated with the first appearance of `key`, or
+// return false if `key` is not found.
+bool MediaConstraints::Constraints::FindFirst(const std::string& key,
+ std::string* value) const {
+ for (Constraints::const_iterator iter = begin(); iter != end(); ++iter) {
+ if (iter->key == key) {
+ *value = iter->value;
+ return true;
+ }
+ }
+ return false;
+}
+
+void CopyConstraintsIntoRtcConfiguration(
+ const MediaConstraints* constraints,
+ PeerConnectionInterface::RTCConfiguration* configuration) {
+ // Copy info from constraints into configuration, if present.
+ if (!constraints) {
+ return;
+ }
+
+ bool enable_ipv6;
+ if (FindConstraint(constraints, MediaConstraints::kEnableIPv6, &enable_ipv6,
+ nullptr)) {
+ configuration->disable_ipv6 = !enable_ipv6;
+ }
+ FindConstraint(constraints, MediaConstraints::kEnableDscp,
+ &configuration->media_config.enable_dscp, nullptr);
+ FindConstraint(constraints, MediaConstraints::kCpuOveruseDetection,
+ &configuration->media_config.video.enable_cpu_adaptation,
+ nullptr);
+ // Find Suspend Below Min Bitrate constraint.
+ FindConstraint(
+ constraints, MediaConstraints::kEnableVideoSuspendBelowMinBitrate,
+ &configuration->media_config.video.suspend_below_min_bitrate, nullptr);
+ ConstraintToOptional<int>(constraints,
+ MediaConstraints::kScreencastMinBitrate,
+ &configuration->screencast_min_bitrate);
+ ConstraintToOptional<bool>(constraints,
+ MediaConstraints::kCombinedAudioVideoBwe,
+ &configuration->combined_audio_video_bwe);
+}
+
+void CopyConstraintsIntoAudioOptions(const MediaConstraints* constraints,
+ cricket::AudioOptions* options) {
+ if (!constraints) {
+ return;
+ }
+
+ ConstraintToOptional<bool>(constraints,
+ MediaConstraints::kGoogEchoCancellation,
+ &options->echo_cancellation);
+ ConstraintToOptional<bool>(constraints, MediaConstraints::kAutoGainControl,
+ &options->auto_gain_control);
+ ConstraintToOptional<bool>(constraints, MediaConstraints::kNoiseSuppression,
+ &options->noise_suppression);
+ ConstraintToOptional<bool>(constraints, MediaConstraints::kHighpassFilter,
+ &options->highpass_filter);
+ ConstraintToOptional<bool>(constraints, MediaConstraints::kAudioMirroring,
+ &options->stereo_swapping);
+ ConstraintToOptional<std::string>(
+ constraints, MediaConstraints::kAudioNetworkAdaptorConfig,
+ &options->audio_network_adaptor_config);
+ // When `kAudioNetworkAdaptorConfig` is defined, it both means that audio
+ // network adaptor is desired, and provides the config string.
+ if (options->audio_network_adaptor_config) {
+ options->audio_network_adaptor = true;
+ }
+ ConstraintToOptional<bool>(constraints,
+ MediaConstraints::kInitAudioRecordingOnSend,
+ &options->init_recording_on_send);
+}
+
+bool CopyConstraintsIntoOfferAnswerOptions(
+ const MediaConstraints* constraints,
+ PeerConnectionInterface::RTCOfferAnswerOptions* offer_answer_options) {
+ if (!constraints) {
+ return true;
+ }
+
+ bool value = false;
+ size_t mandatory_constraints_satisfied = 0;
+
+ if (FindConstraint(constraints, MediaConstraints::kOfferToReceiveAudio,
+ &value, &mandatory_constraints_satisfied)) {
+ offer_answer_options->offer_to_receive_audio =
+ value ? PeerConnectionInterface::RTCOfferAnswerOptions::
+ kOfferToReceiveMediaTrue
+ : 0;
+ }
+
+ if (FindConstraint(constraints, MediaConstraints::kOfferToReceiveVideo,
+ &value, &mandatory_constraints_satisfied)) {
+ offer_answer_options->offer_to_receive_video =
+ value ? PeerConnectionInterface::RTCOfferAnswerOptions::
+ kOfferToReceiveMediaTrue
+ : 0;
+ }
+ if (FindConstraint(constraints, MediaConstraints::kVoiceActivityDetection,
+ &value, &mandatory_constraints_satisfied)) {
+ offer_answer_options->voice_activity_detection = value;
+ }
+ if (FindConstraint(constraints, MediaConstraints::kUseRtpMux, &value,
+ &mandatory_constraints_satisfied)) {
+ offer_answer_options->use_rtp_mux = value;
+ }
+ if (FindConstraint(constraints, MediaConstraints::kIceRestart, &value,
+ &mandatory_constraints_satisfied)) {
+ offer_answer_options->ice_restart = value;
+ }
+
+ if (FindConstraint(constraints,
+ MediaConstraints::kRawPacketizationForVideoEnabled, &value,
+ &mandatory_constraints_satisfied)) {
+ offer_answer_options->raw_packetization_for_video = value;
+ }
+
+ int layers;
+ if (FindConstraint(constraints, MediaConstraints::kNumSimulcastLayers,
+ &layers, &mandatory_constraints_satisfied)) {
+ offer_answer_options->num_simulcast_layers = layers;
+ }
+
+ return mandatory_constraints_satisfied == constraints->GetMandatory().size();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/media_constraints.h b/third_party/libwebrtc/sdk/media_constraints.h
new file mode 100644
index 0000000000..c946e4fab1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/media_constraints.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Implementation of the w3c constraints spec is the responsibility of the
+// browser. Chrome no longer uses the constraints api declared here, and it will
+// be removed from WebRTC.
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=9239
+
+#ifndef SDK_MEDIA_CONSTRAINTS_H_
+#define SDK_MEDIA_CONSTRAINTS_H_
+
+#include <stddef.h>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/audio_options.h"
+#include "api/peer_connection_interface.h"
+
+namespace webrtc {
+
+// Class representing constraints, as used by the android and objc apis.
+//
+// Constraints may be either "mandatory", which means that unless satisfied,
+// the method taking the constraints should fail, or "optional", which means
+// they may not be satisfied..
+class MediaConstraints {
+ public:
+ struct Constraint {
+ Constraint() {}
+ Constraint(const std::string& key, const std::string value)
+ : key(key), value(value) {}
+ std::string key;
+ std::string value;
+ };
+
+ class Constraints : public std::vector<Constraint> {
+ public:
+ Constraints() = default;
+ Constraints(std::initializer_list<Constraint> l)
+ : std::vector<Constraint>(l) {}
+
+ bool FindFirst(const std::string& key, std::string* value) const;
+ };
+
+ MediaConstraints() = default;
+ MediaConstraints(Constraints mandatory, Constraints optional)
+ : mandatory_(std::move(mandatory)), optional_(std::move(optional)) {}
+
+ // Constraint keys used by a local audio source.
+
+ // These keys are google specific.
+ static const char kGoogEchoCancellation[]; // googEchoCancellation
+
+ static const char kAutoGainControl[]; // googAutoGainControl
+ static const char kNoiseSuppression[]; // googNoiseSuppression
+ static const char kHighpassFilter[]; // googHighpassFilter
+ static const char kAudioMirroring[]; // googAudioMirroring
+ static const char
+ kAudioNetworkAdaptorConfig[]; // googAudioNetworkAdaptorConfig
+ static const char kInitAudioRecordingOnSend[]; // InitAudioRecordingOnSend;
+
+ // Constraint keys for CreateOffer / CreateAnswer
+ // Specified by the W3C PeerConnection spec
+ static const char kOfferToReceiveVideo[]; // OfferToReceiveVideo
+ static const char kOfferToReceiveAudio[]; // OfferToReceiveAudio
+ static const char kVoiceActivityDetection[]; // VoiceActivityDetection
+ static const char kIceRestart[]; // IceRestart
+ // These keys are google specific.
+ static const char kUseRtpMux[]; // googUseRtpMUX
+
+ // Constraints values.
+ static const char kValueTrue[]; // true
+ static const char kValueFalse[]; // false
+
+ // PeerConnection constraint keys.
+ // Google-specific constraint keys.
+ // Temporary pseudo-constraint for enabling DSCP through JS.
+ static const char kEnableDscp[]; // googDscp
+ // Constraint to enable IPv6 through JS.
+ static const char kEnableIPv6[]; // googIPv6
+ // Temporary constraint to enable suspend below min bitrate feature.
+ static const char kEnableVideoSuspendBelowMinBitrate[];
+ // googSuspendBelowMinBitrate
+ // Constraint to enable combined audio+video bandwidth estimation.
+ static const char kCombinedAudioVideoBwe[]; // googCombinedAudioVideoBwe
+ static const char kScreencastMinBitrate[]; // googScreencastMinBitrate
+ static const char kCpuOveruseDetection[]; // googCpuOveruseDetection
+
+ // Constraint to enable negotiating raw RTP packetization using attribute
+ // "a=packetization:<payload_type> raw" in the SDP for all video payload.
+ static const char kRawPacketizationForVideoEnabled[];
+
+ // Specifies number of simulcast layers for all video tracks
+ // with a Plan B offer/answer
+ // (see RTCOfferAnswerOptions::num_simulcast_layers).
+ static const char kNumSimulcastLayers[];
+
+ ~MediaConstraints() = default;
+
+ const Constraints& GetMandatory() const { return mandatory_; }
+ const Constraints& GetOptional() const { return optional_; }
+
+ private:
+ const Constraints mandatory_ = {};
+ const Constraints optional_ = {};
+};
+
+// Copy all relevant constraints into an RTCConfiguration object.
+void CopyConstraintsIntoRtcConfiguration(
+ const MediaConstraints* constraints,
+ PeerConnectionInterface::RTCConfiguration* configuration);
+
+// Copy all relevant constraints into an AudioOptions object.
+void CopyConstraintsIntoAudioOptions(const MediaConstraints* constraints,
+ cricket::AudioOptions* options);
+
+bool CopyConstraintsIntoOfferAnswerOptions(
+ const MediaConstraints* constraints,
+ PeerConnectionInterface::RTCOfferAnswerOptions* offer_answer_options);
+
+} // namespace webrtc
+
+#endif // SDK_MEDIA_CONSTRAINTS_H_
diff --git a/third_party/libwebrtc/sdk/media_constraints_unittest.cc b/third_party/libwebrtc/sdk/media_constraints_unittest.cc
new file mode 100644
index 0000000000..478ba98a2d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/media_constraints_unittest.cc
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/media_constraints.h"
+
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+
+// Checks all settings touched by CopyConstraintsIntoRtcConfiguration,
+// plus audio_jitter_buffer_max_packets.
+bool Matches(const PeerConnectionInterface::RTCConfiguration& a,
+ const PeerConnectionInterface::RTCConfiguration& b) {
+ return a.disable_ipv6 == b.disable_ipv6 &&
+ a.audio_jitter_buffer_max_packets ==
+ b.audio_jitter_buffer_max_packets &&
+ a.screencast_min_bitrate == b.screencast_min_bitrate &&
+ a.combined_audio_video_bwe == b.combined_audio_video_bwe &&
+ a.media_config == b.media_config;
+}
+
+TEST(MediaConstraints, CopyConstraintsIntoRtcConfiguration) {
+ const MediaConstraints constraints_empty;
+ PeerConnectionInterface::RTCConfiguration old_configuration;
+ PeerConnectionInterface::RTCConfiguration configuration;
+
+ CopyConstraintsIntoRtcConfiguration(&constraints_empty, &configuration);
+ EXPECT_TRUE(Matches(old_configuration, configuration));
+
+ const MediaConstraints constraits_enable_ipv6(
+ {MediaConstraints::Constraint(MediaConstraints::kEnableIPv6, "true")},
+ {});
+ CopyConstraintsIntoRtcConfiguration(&constraits_enable_ipv6, &configuration);
+ EXPECT_FALSE(configuration.disable_ipv6);
+ const MediaConstraints constraints_disable_ipv6(
+ {MediaConstraints::Constraint(MediaConstraints::kEnableIPv6, "false")},
+ {});
+ CopyConstraintsIntoRtcConfiguration(&constraints_disable_ipv6,
+ &configuration);
+ EXPECT_TRUE(configuration.disable_ipv6);
+
+ const MediaConstraints constraints_screencast(
+ {MediaConstraints::Constraint(MediaConstraints::kScreencastMinBitrate,
+ "27")},
+ {});
+ CopyConstraintsIntoRtcConfiguration(&constraints_screencast, &configuration);
+ EXPECT_TRUE(configuration.screencast_min_bitrate);
+ EXPECT_EQ(27, *(configuration.screencast_min_bitrate));
+
+ // An empty set of constraints will not overwrite
+ // values that are already present.
+ configuration = old_configuration;
+ configuration.audio_jitter_buffer_max_packets = 34;
+ CopyConstraintsIntoRtcConfiguration(&constraints_empty, &configuration);
+ EXPECT_EQ(34, configuration.audio_jitter_buffer_max_packets);
+}
+
+} // namespace
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/DEPS b/third_party/libwebrtc/sdk/objc/DEPS
new file mode 100644
index 0000000000..4cff92caf1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/DEPS
@@ -0,0 +1,18 @@
+include_rules = [
+ "+base",
+ "+components",
+ "+helpers",
+ "+sdk",
+ "+common_video/h264",
+ "+common_video/include",
+ "+common_video/libyuv/include",
+ "+logging/rtc_event_log/rtc_event_log_factory.h",
+ "+media",
+ "+modules/video_coding",
+ "+pc",
+ "+system_wrappers",
+ "+modules/audio_device",
+ "+modules/audio_processing",
+ "+native",
+ "+third_party/libyuv",
+]
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h
new file mode 100644
index 0000000000..3ec1b613ef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "helpers/NSString+StdString.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h
new file mode 100644
index 0000000000..e5e376b0bc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "helpers/scoped_cftyperef.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h
new file mode 100644
index 0000000000..529aa8dcf5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/peerconnection/RTCConfiguration+Native.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h
new file mode 100644
index 0000000000..222e06ef33
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/peerconnection/RTCPeerConnectionFactory+Native.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
new file mode 100644
index 0000000000..136d7003c6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/renderer/opengl/RTCDefaultShader.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h
new file mode 100644
index 0000000000..4ba1caa41d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/renderer/opengl/RTCNV12TextureCache.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h
new file mode 100644
index 0000000000..21281f36ac
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/video_codec/nalu_rewriter.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/api/audio_device_module.h b/third_party/libwebrtc/sdk/objc/Framework/Native/api/audio_device_module.h
new file mode 100644
index 0000000000..7b448024de
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/api/audio_device_module.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/api/audio_device_module.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_decoder_factory.h b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_decoder_factory.h
new file mode 100644
index 0000000000..ca9371c54d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_decoder_factory.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/api/video_decoder_factory.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_encoder_factory.h b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_encoder_factory.h
new file mode 100644
index 0000000000..35e1e6c99f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_encoder_factory.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/api/video_encoder_factory.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_frame_buffer.h b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_frame_buffer.h
new file mode 100644
index 0000000000..0e862cfa07
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_frame_buffer.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/api/video_frame_buffer.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_decoder_factory.h b/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_decoder_factory.h
new file mode 100644
index 0000000000..bd8513c342
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_decoder_factory.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/src/objc_video_decoder_factory.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_encoder_factory.h b/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_encoder_factory.h
new file mode 100644
index 0000000000..b6bd650a4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_encoder_factory.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/src/objc_video_encoder_factory.h"
diff --git a/third_party/libwebrtc/sdk/objc/Info.plist b/third_party/libwebrtc/sdk/objc/Info.plist
new file mode 100644
index 0000000000..38c437e7fe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Info.plist
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>WebRTC</string>
+ <key>CFBundleIdentifier</key>
+ <string>org.webrtc.WebRTC</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>WebRTC</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/sdk/objc/OWNERS b/third_party/libwebrtc/sdk/objc/OWNERS
new file mode 100644
index 0000000000..6af9062b2d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/OWNERS
@@ -0,0 +1,9 @@
+# Normal code changes.
+kthelgason@webrtc.org
+andersc@webrtc.org
+peterhanspers@webrtc.org
+denicija@webrtc.org
+
+# Rubberstamps of e.g. reverts and critical bug fixes.
+magjed@webrtc.org
+tkchin@webrtc.org
diff --git a/third_party/libwebrtc/sdk/objc/README.md b/third_party/libwebrtc/sdk/objc/README.md
new file mode 100644
index 0000000000..ff294a266f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/README.md
@@ -0,0 +1,37 @@
+# WebRTC Obj-C SDK
+
+This directory contains the Obj-C SDK for WebRTC. This includes wrappers for the
+C++ PeerConnection API and some platform specific components for iOS and macOS.
+
+## Organization
+
+- api/
+
+ Wrappers around classes and functions in the C++ API for creating and
+ configuring peer connections, etc.
+
+- base/
+
+ This directory contains some base protocols and classes that are used by both
+ the platform specific components and the SDK wrappers.
+
+- components/
+
+ These are the platform specific components. Contains components for handling
+ audio, capturing and rendering video, encoding and decoding using the
+ platform's hardware codec implementation and for representing video frames
+ in the platform's native format.
+
+- helpers/
+
+ These files are not WebRTC specific, but are general helper classes and
+ utilities for the Cocoa platforms.
+
+- native/
+
+ APIs for wrapping the platform specific components and using them with the
+ C++ API.
+
+- unittests/
+
+ This directory contains the tests.
diff --git a/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter+Private.h
new file mode 100644
index 0000000000..9b123d2d05
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter+Private.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoRendererAdapter.h"
+
+#import "base/RTCVideoRenderer.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCVideoRendererAdapter ()
+
+/**
+ * The Objective-C video renderer passed to this adapter during construction.
+ * Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
+ * this video renderer.
+ */
+@property(nonatomic, readonly) id<RTC_OBJC_TYPE(RTCVideoRenderer)> videoRenderer;
+
+/**
+ * The native VideoSinkInterface surface exposed by this adapter. Calls made
+ * to this interface will be adapted and passed to the RTCVideoRenderer supplied
+ * during construction. This pointer is unsafe and owned by this class.
+ */
+@property(nonatomic, readonly) rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
+
+/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
+- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.h b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.h
new file mode 100644
index 0000000000..b0b6f04488
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*
+ * Creates a rtc::VideoSinkInterface surface for an RTCVideoRenderer. The
+ * rtc::VideoSinkInterface is used by WebRTC rendering code - this
+ * adapter adapts calls made to that interface to the RTCVideoRenderer supplied
+ * during construction.
+ */
+@interface RTCVideoRendererAdapter : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.mm b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.mm
new file mode 100644
index 0000000000..ef02f72f60
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.mm
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoRendererAdapter+Private.h"
+#import "base/RTCVideoFrame.h"
+
+#include <memory>
+
+#include "sdk/objc/native/api/video_frame.h"
+
+namespace webrtc {
+
+class VideoRendererAdapter
+ : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ VideoRendererAdapter(RTCVideoRendererAdapter* adapter) {
+ adapter_ = adapter;
+ size_ = CGSizeZero;
+ }
+
+ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
+ RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
+
+ CGSize current_size = (videoFrame.rotation % 180 == 0)
+ ? CGSizeMake(videoFrame.width, videoFrame.height)
+ : CGSizeMake(videoFrame.height, videoFrame.width);
+
+ if (!CGSizeEqualToSize(size_, current_size)) {
+ size_ = current_size;
+ [adapter_.videoRenderer setSize:size_];
+ }
+ [adapter_.videoRenderer renderFrame:videoFrame];
+ }
+
+ private:
+ __weak RTCVideoRendererAdapter *adapter_;
+ CGSize size_;
+};
+}
+
+@implementation RTCVideoRendererAdapter {
+ std::unique_ptr<webrtc::VideoRendererAdapter> _adapter;
+}
+
+@synthesize videoRenderer = _videoRenderer;
+
+- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer {
+ NSParameterAssert(videoRenderer);
+ if (self = [super init]) {
+ _videoRenderer = videoRenderer;
+ _adapter.reset(new webrtc::VideoRendererAdapter(self));
+ }
+ return self;
+}
+
+- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)nativeVideoRenderer {
+ return _adapter.get();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.h b/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.h
new file mode 100644
index 0000000000..c1aeb825cb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCLogging.h"
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef void (^RTCCallbackLoggerMessageHandler)(NSString *message);
+typedef void (^RTCCallbackLoggerMessageAndSeverityHandler)(NSString *message,
+ RTCLoggingSeverity severity);
+
+// This class intercepts WebRTC logs and forwards them to a registered block.
+// This class is not threadsafe.
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCallbackLogger) : NSObject
+
+// The severity level to capture. The default is kRTCLoggingSeverityInfo.
+@property(nonatomic, assign) RTCLoggingSeverity severity;
+
+// The callback handler will be called on the same thread that does the
+// logging, so if the logging callback can be slow it may be a good idea
+// to implement dispatching to some other queue.
+- (void)start:(nullable RTCCallbackLoggerMessageHandler)handler;
+- (void)startWithMessageAndSeverityHandler:
+ (nullable RTCCallbackLoggerMessageAndSeverityHandler)handler;
+
+- (void)stop;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.mm b/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.mm
new file mode 100644
index 0000000000..ba6fe1b1cc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.mm
@@ -0,0 +1,151 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCallbackLogger.h"
+
+#import "helpers/NSString+StdString.h"
+
+#include <memory>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/log_sinks.h"
+#include "rtc_base/logging.h"
+
+namespace {
+
+class CallbackLogSink final : public rtc::LogSink {
+ public:
+ CallbackLogSink(RTCCallbackLoggerMessageHandler callbackHandler)
+ : callback_handler_(callbackHandler) {}
+
+ void OnLogMessage(const std::string& message) override {
+ OnLogMessage(absl::string_view(message));
+ }
+
+ void OnLogMessage(absl::string_view message) override {
+ if (callback_handler_) {
+ callback_handler_([NSString stringForAbslStringView:message]);
+ }
+ }
+
+ private:
+ RTCCallbackLoggerMessageHandler callback_handler_;
+};
+
+class CallbackWithSeverityLogSink final : public rtc::LogSink {
+ public:
+ CallbackWithSeverityLogSink(RTCCallbackLoggerMessageAndSeverityHandler callbackHandler)
+ : callback_handler_(callbackHandler) {}
+
+ void OnLogMessage(const std::string& message) override { RTC_DCHECK_NOTREACHED(); }
+
+ void OnLogMessage(const std::string& message, rtc::LoggingSeverity severity) override {
+ OnLogMessage(absl::string_view(message), severity);
+ }
+
+ void OnLogMessage(absl::string_view message, rtc::LoggingSeverity severity) override {
+ if (callback_handler_) {
+ RTCLoggingSeverity loggingSeverity = NativeSeverityToObjcSeverity(severity);
+ callback_handler_([NSString stringForAbslStringView:message], loggingSeverity);
+ }
+ }
+
+ private:
+ static RTCLoggingSeverity NativeSeverityToObjcSeverity(rtc::LoggingSeverity severity) {
+ switch (severity) {
+ case rtc::LS_VERBOSE:
+ return RTCLoggingSeverityVerbose;
+ case rtc::LS_INFO:
+ return RTCLoggingSeverityInfo;
+ case rtc::LS_WARNING:
+ return RTCLoggingSeverityWarning;
+ case rtc::LS_ERROR:
+ return RTCLoggingSeverityError;
+ case rtc::LS_NONE:
+ return RTCLoggingSeverityNone;
+ }
+ }
+
+ RTCCallbackLoggerMessageAndSeverityHandler callback_handler_;
+};
+
+}
+
+@implementation RTC_OBJC_TYPE (RTCCallbackLogger) {
+ BOOL _hasStarted;
+ std::unique_ptr<rtc::LogSink> _logSink;
+}
+
+@synthesize severity = _severity;
+
+- (instancetype)init {
+ self = [super init];
+ if (self != nil) {
+ _severity = RTCLoggingSeverityInfo;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self stop];
+}
+
+- (void)start:(nullable RTCCallbackLoggerMessageHandler)handler {
+ if (_hasStarted) {
+ return;
+ }
+
+ _logSink.reset(new CallbackLogSink(handler));
+
+ rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
+ _hasStarted = YES;
+}
+
+- (void)startWithMessageAndSeverityHandler:
+ (nullable RTCCallbackLoggerMessageAndSeverityHandler)handler {
+ if (_hasStarted) {
+ return;
+ }
+
+ _logSink.reset(new CallbackWithSeverityLogSink(handler));
+
+ rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
+ _hasStarted = YES;
+}
+
+- (void)stop {
+ if (!_hasStarted) {
+ return;
+ }
+ RTC_DCHECK(_logSink);
+ rtc::LogMessage::RemoveLogToStream(_logSink.get());
+ _hasStarted = NO;
+ _logSink.reset();
+}
+
+#pragma mark - Private
+
+- (rtc::LoggingSeverity)rtcSeverity {
+ switch (_severity) {
+ case RTCLoggingSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case RTCLoggingSeverityInfo:
+ return rtc::LS_INFO;
+ case RTCLoggingSeverityWarning:
+ return rtc::LS_WARNING;
+ case RTCLoggingSeverityError:
+ return rtc::LS_ERROR;
+ case RTCLoggingSeverityNone:
+ return rtc::LS_NONE;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource+Private.h
new file mode 100644
index 0000000000..2c333f9d73
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource+Private.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSource.h"
+
+#import "RTCMediaSource+Private.h"
+
+@interface RTC_OBJC_TYPE (RTCAudioSource)
+()
+
+ /**
+ * The AudioSourceInterface object passed to this RTCAudioSource during
+ * construction.
+ */
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
+
+/** Initialize an RTCAudioSource from a native AudioSourceInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeAudioSource:(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
+ NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type NS_UNAVAILABLE;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.h
new file mode 100644
index 0000000000..9272fdf2d8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMediaSource.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCAudioSource) : RTC_OBJC_TYPE(RTCMediaSource)
+
+- (instancetype)init NS_UNAVAILABLE;
+
+// Sets the volume for the RTCMediaSource. `volume` is a gain value in the range
+// [0, 10].
+// Temporary fix to be able to modify volume of remote audio tracks.
+// TODO(kthelgason): Property stays here temporarily until a proper volume-api
+// is available on the surface exposed by webrtc.
+@property(nonatomic, assign) double volume;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.mm
new file mode 100644
index 0000000000..1541045099
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.mm
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSource+Private.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCAudioSource) {
+}
+
+@synthesize volume = _volume;
+@synthesize nativeAudioSource = _nativeAudioSource;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeAudioSource:
+ (rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
+ RTC_DCHECK(factory);
+ RTC_DCHECK(nativeAudioSource);
+
+ if (self = [super initWithFactory:factory
+ nativeMediaSource:nativeAudioSource
+ type:RTCMediaSourceTypeAudio]) {
+ _nativeAudioSource = nativeAudioSource;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+- (NSString *)description {
+ NSString *stateString = [[self class] stringForState:self.state];
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCAudioSource)( %p ): %@", self, stateString];
+}
+
+- (void)setVolume:(double)volume {
+ _volume = volume;
+ _nativeAudioSource->SetVolume(volume);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h
new file mode 100644
index 0000000000..6495500484
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioTrack.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@interface RTC_OBJC_TYPE (RTCAudioTrack)
+()
+
+ /** AudioTrackInterface created or passed in at construction. */
+ @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
+
+/** Initialize an RTCAudioTrack with an id. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ source:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+ trackId:(NSString *)trackId;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.h
new file mode 100644
index 0000000000..95eb5d3d48
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMacros.h"
+#import "RTCMediaStreamTrack.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCAudioSource);
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCAudioTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack)
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** The audio source for this audio track. */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.mm
new file mode 100644
index 0000000000..5c1736f436
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.mm
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioTrack+Private.h"
+
+#import "RTCAudioSource+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCAudioTrack)
+
+@synthesize source = _source;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ source:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+ trackId:(NSString *)trackId {
+ RTC_DCHECK(factory);
+ RTC_DCHECK(source);
+ RTC_DCHECK(trackId.length);
+
+ std::string nativeId = [NSString stdStringForString:trackId];
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
+ factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource.get());
+ if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
+ _source = source;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ type:(RTCMediaStreamTrackType)type {
+ NSParameterAssert(factory);
+ NSParameterAssert(nativeTrack);
+ NSParameterAssert(type == RTCMediaStreamTrackTypeAudio);
+ return [super initWithFactory:factory nativeTrack:nativeTrack type:type];
+}
+
+- (RTC_OBJC_TYPE(RTCAudioSource) *)source {
+ if (!_source) {
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source(self.nativeAudioTrack->GetSource());
+ if (source) {
+ _source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory
+ nativeAudioSource:source];
+ }
+ }
+ return _source;
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
+ return rtc::scoped_refptr<webrtc::AudioTrackInterface>(
+ static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get()));
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.h
new file mode 100644
index 0000000000..5ac8984d4a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCertificate) : NSObject <NSCopying>
+
+/** Private key in PEM. */
+@property(nonatomic, readonly, copy) NSString *private_key;
+
+/** Public key in an x509 cert encoded in PEM. */
+@property(nonatomic, readonly, copy) NSString *certificate;
+
+/**
+ * Initialize an RTCCertificate with PEM strings for private_key and certificate.
+ */
+- (instancetype)initWithPrivateKey:(NSString *)private_key
+ certificate:(NSString *)certificate NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Generate a new certificate for 're' use.
+ *
+ * Optional dictionary of parameters. Defaults to KeyType ECDSA if none are
+ * provided.
+ * - name: "ECDSA" or "RSASSA-PKCS1-v1_5"
+ */
++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.mm
new file mode 100644
index 0000000000..e5c33e407c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.mm
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCertificate.h"
+
+#import "base/RTCLogging.h"
+
+#include "rtc_base/logging.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/ssl_identity.h"
+
+@implementation RTC_OBJC_TYPE (RTCCertificate)
+
+@synthesize private_key = _private_key;
+@synthesize certificate = _certificate;
+
+- (id)copyWithZone:(NSZone *)zone {
+ id copy = [[[self class] alloc] initWithPrivateKey:[self.private_key copyWithZone:zone]
+ certificate:[self.certificate copyWithZone:zone]];
+ return copy;
+}
+
+- (instancetype)initWithPrivateKey:(NSString *)private_key certificate:(NSString *)certificate {
+ if (self = [super init]) {
+ _private_key = [private_key copy];
+ _certificate = [certificate copy];
+ }
+ return self;
+}
+
++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params {
+ rtc::KeyType keyType = rtc::KT_ECDSA;
+ NSString *keyTypeString = [params valueForKey:@"name"];
+ if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) {
+ keyType = rtc::KT_RSA;
+ }
+
+ NSNumber *expires = [params valueForKey:@"expires"];
+ rtc::scoped_refptr<rtc::RTCCertificate> cc_certificate = nullptr;
+ if (expires != nil) {
+ uint64_t expirationTimestamp = [expires unsignedLongLongValue];
+ cc_certificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
+ expirationTimestamp);
+ } else {
+ cc_certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType), absl::nullopt);
+ }
+ if (!cc_certificate) {
+ RTCLogError(@"Failed to generate certificate.");
+ return nullptr;
+ }
+ // grab PEMs and create an NS RTCCerticicate
+ rtc::RTCCertificatePEM pem = cc_certificate->ToPEM();
+ std::string pem_private_key = pem.private_key();
+ std::string pem_certificate = pem.certificate();
+ RTC_LOG(LS_INFO) << "CERT PEM ";
+ RTC_LOG(LS_INFO) << pem_certificate;
+
+ RTC_OBJC_TYPE(RTCCertificate) *cert =
+ [[RTC_OBJC_TYPE(RTCCertificate) alloc] initWithPrivateKey:@(pem_private_key.c_str())
+ certificate:@(pem_certificate.c_str())];
+ return cert;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Native.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Native.h
new file mode 100644
index 0000000000..07c0da6041
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Native.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCConfiguration.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCConfiguration)
+()
+
+ /** Optional TurnCustomizer.
+ * With this class one can modify outgoing TURN messages.
+ * The object passed in must remain valid until PeerConnection::Close() is
+ * called.
+ */
+ @property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Private.h
new file mode 100644
index 0000000000..70a6532dbc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Private.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCConfiguration.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCConfiguration)
+()
+
+ + (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy
+ : (RTCIceTransportPolicy)policy;
+
++ (RTCIceTransportPolicy)transportPolicyForTransportsType:
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
+
++ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy;
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
+ (RTCBundlePolicy)policy;
+
++ (RTCBundlePolicy)bundlePolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy;
+
++ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy;
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
+ (RTCRtcpMuxPolicy)policy;
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy;
+
++ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy;
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeTcpCandidatePolicyForPolicy:
+ (RTCTcpCandidatePolicy)policy;
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy;
+
++ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy;
+
++ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativeCandidateNetworkPolicyForPolicy:
+ (RTCCandidateNetworkPolicy)policy;
+
++ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy;
+
++ (NSString *)stringForCandidateNetworkPolicy:(RTCCandidateNetworkPolicy)policy;
+
++ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTCEncryptionKeyType)keyType;
+
++ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
+
++ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics;
+
++ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
+
+/**
+ * RTCConfiguration struct representation of this RTCConfiguration.
+ * This is needed to pass to the underlying C++ APIs.
+ */
+- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
+
+- (instancetype)initWithNativeConfiguration:
+ (const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.h
new file mode 100644
index 0000000000..345bf179bc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.h
@@ -0,0 +1,273 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCertificate.h"
+#import "RTCCryptoOptions.h"
+#import "RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCIceServer);
+
+/**
+ * Represents the ice transport policy. This exposes the same states in C++,
+ * which include one more state than what exists in the W3C spec.
+ */
+typedef NS_ENUM(NSInteger, RTCIceTransportPolicy) {
+ RTCIceTransportPolicyNone,
+ RTCIceTransportPolicyRelay,
+ RTCIceTransportPolicyNoHost,
+ RTCIceTransportPolicyAll
+};
+
+/** Represents the bundle policy. */
+typedef NS_ENUM(NSInteger, RTCBundlePolicy) {
+ RTCBundlePolicyBalanced,
+ RTCBundlePolicyMaxCompat,
+ RTCBundlePolicyMaxBundle
+};
+
+/** Represents the rtcp mux policy. */
+typedef NS_ENUM(NSInteger, RTCRtcpMuxPolicy) { RTCRtcpMuxPolicyNegotiate, RTCRtcpMuxPolicyRequire };
+
+/** Represents the tcp candidate policy. */
+typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) {
+ RTCTcpCandidatePolicyEnabled,
+ RTCTcpCandidatePolicyDisabled
+};
+
+/** Represents the candidate network policy. */
+typedef NS_ENUM(NSInteger, RTCCandidateNetworkPolicy) {
+ RTCCandidateNetworkPolicyAll,
+ RTCCandidateNetworkPolicyLowCost
+};
+
+/** Represents the continual gathering policy. */
+typedef NS_ENUM(NSInteger, RTCContinualGatheringPolicy) {
+ RTCContinualGatheringPolicyGatherOnce,
+ RTCContinualGatheringPolicyGatherContinually
+};
+
+/** Represents the encryption key type. */
+typedef NS_ENUM(NSInteger, RTCEncryptionKeyType) {
+ RTCEncryptionKeyTypeRSA,
+ RTCEncryptionKeyTypeECDSA,
+};
+
+/** Represents the chosen SDP semantics for the RTCPeerConnection. */
+typedef NS_ENUM(NSInteger, RTCSdpSemantics) {
+ // TODO(https://crbug.com/webrtc/13528): Remove support for Plan B.
+ RTCSdpSemanticsPlanB,
+ RTCSdpSemanticsUnifiedPlan,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCConfiguration) : NSObject
+
+/** If true, allows DSCP codes to be set on outgoing packets, configured using
+ * networkPriority field of RTCRtpEncodingParameters. Defaults to false.
+ */
+@property(nonatomic, assign) BOOL enableDscp;
+
+/** An array of Ice Servers available to be used by ICE. */
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCIceServer) *> *iceServers;
+
+/** An RTCCertificate for 're' use. */
+@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCertificate) * certificate;
+
+/** Which candidates the ICE agent is allowed to use. The W3C calls it
+ * `iceTransportPolicy`, while in C++ it is called `type`. */
+@property(nonatomic, assign) RTCIceTransportPolicy iceTransportPolicy;
+
+/** The media-bundling policy to use when gathering ICE candidates. */
+@property(nonatomic, assign) RTCBundlePolicy bundlePolicy;
+
+/** The rtcp-mux policy to use when gathering ICE candidates. */
+@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy;
+@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy;
+@property(nonatomic, assign) RTCCandidateNetworkPolicy candidateNetworkPolicy;
+@property(nonatomic, assign) RTCContinualGatheringPolicy continualGatheringPolicy;
+
+/** If set to YES, don't gather IPv6 ICE candidates.
+ * Default is NO.
+ */
+@property(nonatomic, assign) BOOL disableIPV6;
+
+/** If set to YES, don't gather IPv6 ICE candidates on Wi-Fi.
+ * Only intended to be used on specific devices. Certain phones disable IPv6
+ * when the screen is turned off and it would be better to just disable the
+ * IPv6 ICE candidates on Wi-Fi in those cases.
+ * Default is NO.
+ */
+@property(nonatomic, assign) BOOL disableIPV6OnWiFi;
+
+/** By default, the PeerConnection will use a limited number of IPv6 network
+ * interfaces, in order to avoid too many ICE candidate pairs being created
+ * and delaying ICE completion.
+ *
+ * Can be set to INT_MAX to effectively disable the limit.
+ */
+@property(nonatomic, assign) int maxIPv6Networks;
+
+/** Exclude link-local network interfaces
+ * from considertaion for gathering ICE candidates.
+ * Defaults to NO.
+ */
+@property(nonatomic, assign) BOOL disableLinkLocalNetworks;
+
+@property(nonatomic, assign) int audioJitterBufferMaxPackets;
+@property(nonatomic, assign) BOOL audioJitterBufferFastAccelerate;
+@property(nonatomic, assign) int iceConnectionReceivingTimeout;
+@property(nonatomic, assign) int iceBackupCandidatePairPingInterval;
+
+/** Key type used to generate SSL identity. Default is ECDSA. */
+@property(nonatomic, assign) RTCEncryptionKeyType keyType;
+
+/** ICE candidate pool size as defined in JSEP. Default is 0. */
+@property(nonatomic, assign) int iceCandidatePoolSize;
+
+/** Prune turn ports on the same network to the same turn server.
+ * Default is NO.
+ */
+@property(nonatomic, assign) BOOL shouldPruneTurnPorts;
+
+/** If set to YES, this means the ICE transport should presume TURN-to-TURN
+ * candidate pairs will succeed, even before a binding response is received.
+ */
+@property(nonatomic, assign) BOOL shouldPresumeWritableWhenFullyRelayed;
+
+/* This flag is only effective when `continualGatheringPolicy` is
+ * RTCContinualGatheringPolicyGatherContinually.
+ *
+ * If YES, after the ICE transport type is changed such that new types of
+ * ICE candidates are allowed by the new transport type, e.g. from
+ * RTCIceTransportPolicyRelay to RTCIceTransportPolicyAll, candidates that
+ * have been gathered by the ICE transport but not matching the previous
+ * transport type and as a result not observed by PeerConnectionDelegateAdapter,
+ * will be surfaced to the delegate.
+ */
+@property(nonatomic, assign) BOOL shouldSurfaceIceCandidatesOnIceTransportTypeChanged;
+
+/** If set to non-nil, controls the minimal interval between consecutive ICE
+ * check packets.
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceCheckMinInterval;
+
+/**
+ * Configure the SDP semantics used by this PeerConnection. By default, this
+ * is RTCSdpSemanticsUnifiedPlan which is compliant to the WebRTC 1.0
+ * specification. It is possible to overrwite this to the deprecated
+ * RTCSdpSemanticsPlanB SDP format, but note that RTCSdpSemanticsPlanB will be
+ * deleted at some future date, see https://crbug.com/webrtc/13528.
+ *
+ * RTCSdpSemanticsUnifiedPlan will cause RTCPeerConnection to create offers and
+ * answers with multiple m= sections where each m= section maps to one
+ * RTCRtpSender and one RTCRtpReceiver (an RTCRtpTransceiver), either both audio
+ * or both video. This will also cause RTCPeerConnection to ignore all but the
+ * first a=ssrc lines that form a Plan B stream.
+ *
+ * RTCSdpSemanticsPlanB will cause RTCPeerConnection to create offers and
+ * answers with at most one audio and one video m= section with multiple
+ * RTCRtpSenders and RTCRtpReceivers specified as multiple a=ssrc lines within
+ * the section. This will also cause RTCPeerConnection to ignore all but the
+ * first m= section of the same media type.
+ */
+@property(nonatomic, assign) RTCSdpSemantics sdpSemantics;
+
+/** Actively reset the SRTP parameters when the DTLS transports underneath are
+ * changed after offer/answer negotiation. This is only intended to be a
+ * workaround for crbug.com/835958
+ */
+@property(nonatomic, assign) BOOL activeResetSrtpParams;
+
+/** If the remote side support mid-stream codec switches then allow encoder
+ * switching to be performed.
+ */
+
+@property(nonatomic, assign) BOOL allowCodecSwitching;
+
+/**
+ * Defines advanced optional cryptographic settings related to SRTP and
+ * frame encryption for native WebRTC. Setting this will overwrite any
+ * options set through the PeerConnectionFactory (which is deprecated).
+ */
+@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCryptoOptions) * cryptoOptions;
+
+/**
+ * An optional string that will be attached to the TURN_ALLOCATE_REQUEST which
+ * which can be used to correlate client logs with backend logs.
+ */
+@property(nonatomic, nullable, copy) NSString *turnLoggingId;
+
+/**
+ * Time interval between audio RTCP reports.
+ */
+@property(nonatomic, assign) int rtcpAudioReportIntervalMs;
+
+/**
+ * Time interval between video RTCP reports.
+ */
+@property(nonatomic, assign) int rtcpVideoReportIntervalMs;
+
+/**
+ * Allow implicit rollback of local description when remote description
+ * conflicts with local description.
+ * See: https://w3c.github.io/webrtc-pc/#dom-peerconnection-setremotedescription
+ */
+@property(nonatomic, assign) BOOL enableImplicitRollback;
+
+/**
+ * Control if "a=extmap-allow-mixed" is included in the offer.
+ * See: https://www.chromestatus.com/feature/6269234631933952
+ */
+@property(nonatomic, assign) BOOL offerExtmapAllowMixed;
+
+/**
+ * Defines the interval applied to ALL candidate pairs
+ * when ICE is strongly connected, and it overrides the
+ * default value of this interval in the ICE implementation;
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceCheckIntervalStrongConnectivity;
+
+/**
+ * Defines the counterpart for ALL pairs when ICE is
+ * weakly connected, and it overrides the default value of
+ * this interval in the ICE implementation
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceCheckIntervalWeakConnectivity;
+
+/**
+ * The min time period for which a candidate pair must wait for response to
+ * connectivity checks before it becomes unwritable. This parameter
+ * overrides the default value in the ICE implementation if set.
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceUnwritableTimeout;
+
+/**
+ * The min number of connectivity checks that a candidate pair must sent
+ * without receiving response before it becomes unwritable. This parameter
+ * overrides the default value in the ICE implementation if set.
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceUnwritableMinChecks;
+
+/**
+ * The min time period for which a candidate pair must wait for response to
+ * connectivity checks it becomes inactive. This parameter overrides the
+ * default value in the ICE implementation if set.
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceInactiveTimeout;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm
new file mode 100644
index 0000000000..859aa8ad76
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm
@@ -0,0 +1,549 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCConfiguration+Private.h"
+
+#include <memory>
+
+#import "RTCCertificate.h"
+#import "RTCConfiguration+Native.h"
+#import "RTCIceServer+Private.h"
+#import "base/RTCLogging.h"
+
+#include "rtc_base/checks.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/ssl_identity.h"
+
+@implementation RTC_OBJC_TYPE (RTCConfiguration)
+
+@synthesize enableDscp = _enableDscp;
+@synthesize iceServers = _iceServers;
+@synthesize certificate = _certificate;
+@synthesize iceTransportPolicy = _iceTransportPolicy;
+@synthesize bundlePolicy = _bundlePolicy;
+@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
+@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
+@synthesize candidateNetworkPolicy = _candidateNetworkPolicy;
+@synthesize continualGatheringPolicy = _continualGatheringPolicy;
+@synthesize disableIPV6 = _disableIPV6;
+@synthesize disableIPV6OnWiFi = _disableIPV6OnWiFi;
+@synthesize maxIPv6Networks = _maxIPv6Networks;
+@synthesize disableLinkLocalNetworks = _disableLinkLocalNetworks;
+@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
+@synthesize audioJitterBufferFastAccelerate = _audioJitterBufferFastAccelerate;
+@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
+@synthesize iceBackupCandidatePairPingInterval =
+ _iceBackupCandidatePairPingInterval;
+@synthesize keyType = _keyType;
+@synthesize iceCandidatePoolSize = _iceCandidatePoolSize;
+@synthesize shouldPruneTurnPorts = _shouldPruneTurnPorts;
+@synthesize shouldPresumeWritableWhenFullyRelayed =
+ _shouldPresumeWritableWhenFullyRelayed;
+@synthesize shouldSurfaceIceCandidatesOnIceTransportTypeChanged =
+ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged;
+@synthesize iceCheckMinInterval = _iceCheckMinInterval;
+@synthesize sdpSemantics = _sdpSemantics;
+@synthesize turnCustomizer = _turnCustomizer;
+@synthesize activeResetSrtpParams = _activeResetSrtpParams;
+@synthesize allowCodecSwitching = _allowCodecSwitching;
+@synthesize cryptoOptions = _cryptoOptions;
+@synthesize turnLoggingId = _turnLoggingId;
+@synthesize rtcpAudioReportIntervalMs = _rtcpAudioReportIntervalMs;
+@synthesize rtcpVideoReportIntervalMs = _rtcpVideoReportIntervalMs;
+@synthesize enableImplicitRollback = _enableImplicitRollback;
+@synthesize offerExtmapAllowMixed = _offerExtmapAllowMixed;
+@synthesize iceCheckIntervalStrongConnectivity = _iceCheckIntervalStrongConnectivity;
+@synthesize iceCheckIntervalWeakConnectivity = _iceCheckIntervalWeakConnectivity;
+@synthesize iceUnwritableTimeout = _iceUnwritableTimeout;
+@synthesize iceUnwritableMinChecks = _iceUnwritableMinChecks;
+@synthesize iceInactiveTimeout = _iceInactiveTimeout;
+
+- (instancetype)init {
+ // Copy defaults.
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ return [self initWithNativeConfiguration:config];
+}
+
+- (instancetype)initWithNativeConfiguration:
+ (const webrtc::PeerConnectionInterface::RTCConfiguration &)config {
+ if (self = [super init]) {
+ _enableDscp = config.dscp();
+ NSMutableArray *iceServers = [NSMutableArray array];
+ for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
+ RTC_OBJC_TYPE(RTCIceServer) *iceServer =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:server];
+ [iceServers addObject:iceServer];
+ }
+ _iceServers = iceServers;
+ if (!config.certificates.empty()) {
+ rtc::scoped_refptr<rtc::RTCCertificate> native_cert;
+ native_cert = config.certificates[0];
+ rtc::RTCCertificatePEM native_pem = native_cert->ToPEM();
+ _certificate = [[RTC_OBJC_TYPE(RTCCertificate) alloc]
+ initWithPrivateKey:@(native_pem.private_key().c_str())
+ certificate:@(native_pem.certificate().c_str())];
+ }
+ _iceTransportPolicy =
+ [[self class] transportPolicyForTransportsType:config.type];
+ _bundlePolicy =
+ [[self class] bundlePolicyForNativePolicy:config.bundle_policy];
+ _rtcpMuxPolicy =
+ [[self class] rtcpMuxPolicyForNativePolicy:config.rtcp_mux_policy];
+ _tcpCandidatePolicy = [[self class] tcpCandidatePolicyForNativePolicy:
+ config.tcp_candidate_policy];
+ _candidateNetworkPolicy = [[self class]
+ candidateNetworkPolicyForNativePolicy:config.candidate_network_policy];
+ webrtc::PeerConnectionInterface::ContinualGatheringPolicy nativePolicy =
+ config.continual_gathering_policy;
+ _continualGatheringPolicy =
+ [[self class] continualGatheringPolicyForNativePolicy:nativePolicy];
+ _disableIPV6 = config.disable_ipv6;
+ _disableIPV6OnWiFi = config.disable_ipv6_on_wifi;
+ _maxIPv6Networks = config.max_ipv6_networks;
+ _disableLinkLocalNetworks = config.disable_link_local_networks;
+ _audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
+ _audioJitterBufferFastAccelerate = config.audio_jitter_buffer_fast_accelerate;
+ _iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
+ _iceBackupCandidatePairPingInterval =
+ config.ice_backup_candidate_pair_ping_interval;
+ _keyType = RTCEncryptionKeyTypeECDSA;
+ _iceCandidatePoolSize = config.ice_candidate_pool_size;
+ _shouldPruneTurnPorts = config.prune_turn_ports;
+ _shouldPresumeWritableWhenFullyRelayed =
+ config.presume_writable_when_fully_relayed;
+ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged =
+ config.surface_ice_candidates_on_ice_transport_type_changed;
+ if (config.ice_check_min_interval) {
+ _iceCheckMinInterval =
+ [NSNumber numberWithInt:*config.ice_check_min_interval];
+ }
+ _sdpSemantics = [[self class] sdpSemanticsForNativeSdpSemantics:config.sdp_semantics];
+ _turnCustomizer = config.turn_customizer;
+ _activeResetSrtpParams = config.active_reset_srtp_params;
+ if (config.crypto_options) {
+ _cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc]
+ initWithSrtpEnableGcmCryptoSuites:config.crypto_options->srtp
+ .enable_gcm_crypto_suites
+ srtpEnableAes128Sha1_32CryptoCipher:config.crypto_options->srtp
+ .enable_aes128_sha1_32_crypto_cipher
+ srtpEnableEncryptedRtpHeaderExtensions:config.crypto_options->srtp
+ .enable_encrypted_rtp_header_extensions
+ sframeRequireFrameEncryption:config.crypto_options->sframe
+ .require_frame_encryption];
+ }
+ _turnLoggingId = [NSString stringWithUTF8String:config.turn_logging_id.c_str()];
+ _rtcpAudioReportIntervalMs = config.audio_rtcp_report_interval_ms();
+ _rtcpVideoReportIntervalMs = config.video_rtcp_report_interval_ms();
+ _allowCodecSwitching = config.allow_codec_switching.value_or(false);
+ _enableImplicitRollback = config.enable_implicit_rollback;
+ _offerExtmapAllowMixed = config.offer_extmap_allow_mixed;
+ _iceCheckIntervalStrongConnectivity =
+ config.ice_check_interval_strong_connectivity.has_value() ?
+ [NSNumber numberWithInt:*config.ice_check_interval_strong_connectivity] :
+ nil;
+ _iceCheckIntervalWeakConnectivity = config.ice_check_interval_weak_connectivity.has_value() ?
+ [NSNumber numberWithInt:*config.ice_check_interval_weak_connectivity] :
+ nil;
+ _iceUnwritableTimeout = config.ice_unwritable_timeout.has_value() ?
+ [NSNumber numberWithInt:*config.ice_unwritable_timeout] :
+ nil;
+ _iceUnwritableMinChecks = config.ice_unwritable_min_checks.has_value() ?
+ [NSNumber numberWithInt:*config.ice_unwritable_min_checks] :
+ nil;
+ _iceInactiveTimeout = config.ice_inactive_timeout.has_value() ?
+ [NSNumber numberWithInt:*config.ice_inactive_timeout] :
+ nil;
+ }
+ return self;
+}
+
+- (NSString *)description {
+ static NSString *formatString = @"RTC_OBJC_TYPE(RTCConfiguration): "
+ @"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n"
+ @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n%d\n}\n";
+
+ return [NSString
+ stringWithFormat:formatString,
+ _iceServers,
+ [[self class] stringForTransportPolicy:_iceTransportPolicy],
+ [[self class] stringForBundlePolicy:_bundlePolicy],
+ [[self class] stringForRtcpMuxPolicy:_rtcpMuxPolicy],
+ [[self class] stringForTcpCandidatePolicy:_tcpCandidatePolicy],
+ [[self class] stringForCandidateNetworkPolicy:_candidateNetworkPolicy],
+ [[self class] stringForContinualGatheringPolicy:_continualGatheringPolicy],
+ [[self class] stringForSdpSemantics:_sdpSemantics],
+ _audioJitterBufferMaxPackets,
+ _audioJitterBufferFastAccelerate,
+ _iceConnectionReceivingTimeout,
+ _iceBackupCandidatePairPingInterval,
+ _iceCandidatePoolSize,
+ _shouldPruneTurnPorts,
+ _shouldPresumeWritableWhenFullyRelayed,
+ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged,
+ _iceCheckMinInterval,
+ _disableLinkLocalNetworks,
+ _disableIPV6,
+ _disableIPV6OnWiFi,
+ _maxIPv6Networks,
+ _activeResetSrtpParams,
+ _enableDscp,
+ _enableImplicitRollback];
+}
+
+#pragma mark - Private
+
+- (webrtc::PeerConnectionInterface::RTCConfiguration *)
+ createNativeConfiguration {
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
+ nativeConfig(new webrtc::PeerConnectionInterface::RTCConfiguration(
+ webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
+
+ nativeConfig->set_dscp(_enableDscp);
+ for (RTC_OBJC_TYPE(RTCIceServer) * iceServer in _iceServers) {
+ nativeConfig->servers.push_back(iceServer.nativeServer);
+ }
+ nativeConfig->type =
+ [[self class] nativeTransportsTypeForTransportPolicy:_iceTransportPolicy];
+ nativeConfig->bundle_policy =
+ [[self class] nativeBundlePolicyForPolicy:_bundlePolicy];
+ nativeConfig->rtcp_mux_policy =
+ [[self class] nativeRtcpMuxPolicyForPolicy:_rtcpMuxPolicy];
+ nativeConfig->tcp_candidate_policy =
+ [[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy];
+ nativeConfig->candidate_network_policy = [[self class]
+ nativeCandidateNetworkPolicyForPolicy:_candidateNetworkPolicy];
+ nativeConfig->continual_gathering_policy = [[self class]
+ nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy];
+ nativeConfig->disable_ipv6 = _disableIPV6;
+ nativeConfig->disable_ipv6_on_wifi = _disableIPV6OnWiFi;
+ nativeConfig->max_ipv6_networks = _maxIPv6Networks;
+ nativeConfig->disable_link_local_networks = _disableLinkLocalNetworks;
+ nativeConfig->audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
+ nativeConfig->audio_jitter_buffer_fast_accelerate =
+ _audioJitterBufferFastAccelerate ? true : false;
+ nativeConfig->ice_connection_receiving_timeout =
+ _iceConnectionReceivingTimeout;
+ nativeConfig->ice_backup_candidate_pair_ping_interval =
+ _iceBackupCandidatePairPingInterval;
+ rtc::KeyType keyType =
+ [[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
+ if (_certificate != nullptr) {
+ // if offered a pemcert use it...
+ RTC_LOG(LS_INFO) << "Have configured cert - using it.";
+ std::string pem_private_key = [[_certificate private_key] UTF8String];
+ std::string pem_certificate = [[_certificate certificate] UTF8String];
+ rtc::RTCCertificatePEM pem = rtc::RTCCertificatePEM(pem_private_key, pem_certificate);
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate = rtc::RTCCertificate::FromPEM(pem);
+ RTC_LOG(LS_INFO) << "Created cert from PEM strings.";
+ if (!certificate) {
+ RTC_LOG(LS_ERROR) << "Failed to generate certificate from PEM.";
+ return nullptr;
+ }
+ nativeConfig->certificates.push_back(certificate);
+ } else {
+ RTC_LOG(LS_INFO) << "Don't have configured cert.";
+ // Generate non-default certificate.
+ if (keyType != rtc::KT_DEFAULT) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
+ absl::optional<uint64_t>());
+ if (!certificate) {
+ RTCLogError(@"Failed to generate certificate.");
+ return nullptr;
+ }
+ nativeConfig->certificates.push_back(certificate);
+ }
+ }
+ nativeConfig->ice_candidate_pool_size = _iceCandidatePoolSize;
+ nativeConfig->prune_turn_ports = _shouldPruneTurnPorts ? true : false;
+ nativeConfig->presume_writable_when_fully_relayed =
+ _shouldPresumeWritableWhenFullyRelayed ? true : false;
+ nativeConfig->surface_ice_candidates_on_ice_transport_type_changed =
+ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged ? true : false;
+ if (_iceCheckMinInterval != nil) {
+ nativeConfig->ice_check_min_interval = absl::optional<int>(_iceCheckMinInterval.intValue);
+ }
+ nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics];
+ if (_turnCustomizer) {
+ nativeConfig->turn_customizer = _turnCustomizer;
+ }
+ nativeConfig->active_reset_srtp_params = _activeResetSrtpParams ? true : false;
+ if (_cryptoOptions) {
+ webrtc::CryptoOptions nativeCryptoOptions;
+ nativeCryptoOptions.srtp.enable_gcm_crypto_suites =
+ _cryptoOptions.srtpEnableGcmCryptoSuites ? true : false;
+ nativeCryptoOptions.srtp.enable_aes128_sha1_32_crypto_cipher =
+ _cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher ? true : false;
+ nativeCryptoOptions.srtp.enable_encrypted_rtp_header_extensions =
+ _cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions ? true : false;
+ nativeCryptoOptions.sframe.require_frame_encryption =
+ _cryptoOptions.sframeRequireFrameEncryption ? true : false;
+ nativeConfig->crypto_options = absl::optional<webrtc::CryptoOptions>(nativeCryptoOptions);
+ }
+ nativeConfig->turn_logging_id = [_turnLoggingId UTF8String];
+ nativeConfig->set_audio_rtcp_report_interval_ms(_rtcpAudioReportIntervalMs);
+ nativeConfig->set_video_rtcp_report_interval_ms(_rtcpVideoReportIntervalMs);
+ nativeConfig->allow_codec_switching = _allowCodecSwitching;
+ nativeConfig->enable_implicit_rollback = _enableImplicitRollback;
+ nativeConfig->offer_extmap_allow_mixed = _offerExtmapAllowMixed;
+ if (_iceCheckIntervalStrongConnectivity != nil) {
+ nativeConfig->ice_check_interval_strong_connectivity =
+ absl::optional<int>(_iceCheckIntervalStrongConnectivity.intValue);
+ }
+ if (_iceCheckIntervalWeakConnectivity != nil) {
+ nativeConfig->ice_check_interval_weak_connectivity =
+ absl::optional<int>(_iceCheckIntervalWeakConnectivity.intValue);
+ }
+ if (_iceUnwritableTimeout != nil) {
+ nativeConfig->ice_unwritable_timeout = absl::optional<int>(_iceUnwritableTimeout.intValue);
+ }
+ if (_iceUnwritableMinChecks != nil) {
+ nativeConfig->ice_unwritable_min_checks = absl::optional<int>(_iceUnwritableMinChecks.intValue);
+ }
+ if (_iceInactiveTimeout != nil) {
+ nativeConfig->ice_inactive_timeout = absl::optional<int>(_iceInactiveTimeout.intValue);
+ }
+ return nativeConfig.release();
+}
+
++ (webrtc::PeerConnectionInterface::IceTransportsType)
+ nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy {
+ switch (policy) {
+ case RTCIceTransportPolicyNone:
+ return webrtc::PeerConnectionInterface::kNone;
+ case RTCIceTransportPolicyRelay:
+ return webrtc::PeerConnectionInterface::kRelay;
+ case RTCIceTransportPolicyNoHost:
+ return webrtc::PeerConnectionInterface::kNoHost;
+ case RTCIceTransportPolicyAll:
+ return webrtc::PeerConnectionInterface::kAll;
+ }
+}
+
++ (RTCIceTransportPolicy)transportPolicyForTransportsType:
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeType {
+ switch (nativeType) {
+ case webrtc::PeerConnectionInterface::kNone:
+ return RTCIceTransportPolicyNone;
+ case webrtc::PeerConnectionInterface::kRelay:
+ return RTCIceTransportPolicyRelay;
+ case webrtc::PeerConnectionInterface::kNoHost:
+ return RTCIceTransportPolicyNoHost;
+ case webrtc::PeerConnectionInterface::kAll:
+ return RTCIceTransportPolicyAll;
+ }
+}
+
++ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy {
+ switch (policy) {
+ case RTCIceTransportPolicyNone:
+ return @"NONE";
+ case RTCIceTransportPolicyRelay:
+ return @"RELAY";
+ case RTCIceTransportPolicyNoHost:
+ return @"NO_HOST";
+ case RTCIceTransportPolicyAll:
+ return @"ALL";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
+ (RTCBundlePolicy)policy {
+ switch (policy) {
+ case RTCBundlePolicyBalanced:
+ return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
+ case RTCBundlePolicyMaxCompat:
+ return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
+ case RTCBundlePolicyMaxBundle:
+ return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
+ }
+}
+
++ (RTCBundlePolicy)bundlePolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
+ return RTCBundlePolicyBalanced;
+ case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
+ return RTCBundlePolicyMaxCompat;
+ case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
+ return RTCBundlePolicyMaxBundle;
+ }
+}
+
++ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy {
+ switch (policy) {
+ case RTCBundlePolicyBalanced:
+ return @"BALANCED";
+ case RTCBundlePolicyMaxCompat:
+ return @"MAX_COMPAT";
+ case RTCBundlePolicyMaxBundle:
+ return @"MAX_BUNDLE";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
+ (RTCRtcpMuxPolicy)policy {
+ switch (policy) {
+ case RTCRtcpMuxPolicyNegotiate:
+ return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+ case RTCRtcpMuxPolicyRequire:
+ return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ }
+}
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
+ return RTCRtcpMuxPolicyNegotiate;
+ case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
+ return RTCRtcpMuxPolicyRequire;
+ }
+}
+
++ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy {
+ switch (policy) {
+ case RTCRtcpMuxPolicyNegotiate:
+ return @"NEGOTIATE";
+ case RTCRtcpMuxPolicyRequire:
+ return @"REQUIRE";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
+ nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy {
+ switch (policy) {
+ case RTCTcpCandidatePolicyEnabled:
+ return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+ case RTCTcpCandidatePolicyDisabled:
+ return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)
+ nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy {
+ switch (policy) {
+ case RTCCandidateNetworkPolicyAll:
+ return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll;
+ case RTCCandidateNetworkPolicyLowCost:
+ return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
+ }
+}
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
+ return RTCTcpCandidatePolicyEnabled;
+ case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
+ return RTCTcpCandidatePolicyDisabled;
+ }
+}
+
++ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy {
+ switch (policy) {
+ case RTCTcpCandidatePolicyEnabled:
+ return @"TCP_ENABLED";
+ case RTCTcpCandidatePolicyDisabled:
+ return @"TCP_DISABLED";
+ }
+}
+
++ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll:
+ return RTCCandidateNetworkPolicyAll;
+ case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost:
+ return RTCCandidateNetworkPolicyLowCost;
+ }
+}
+
++ (NSString *)stringForCandidateNetworkPolicy:
+ (RTCCandidateNetworkPolicy)policy {
+ switch (policy) {
+ case RTCCandidateNetworkPolicyAll:
+ return @"CANDIDATE_ALL_NETWORKS";
+ case RTCCandidateNetworkPolicyLowCost:
+ return @"CANDIDATE_LOW_COST_NETWORKS";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)
+ nativeContinualGatheringPolicyForPolicy:
+ (RTCContinualGatheringPolicy)policy {
+ switch (policy) {
+ case RTCContinualGatheringPolicyGatherOnce:
+ return webrtc::PeerConnectionInterface::GATHER_ONCE;
+ case RTCContinualGatheringPolicyGatherContinually:
+ return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY;
+ }
+}
+
++ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::GATHER_ONCE:
+ return RTCContinualGatheringPolicyGatherOnce;
+ case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY:
+ return RTCContinualGatheringPolicyGatherContinually;
+ }
+}
+
++ (NSString *)stringForContinualGatheringPolicy:
+ (RTCContinualGatheringPolicy)policy {
+ switch (policy) {
+ case RTCContinualGatheringPolicyGatherOnce:
+ return @"GATHER_ONCE";
+ case RTCContinualGatheringPolicyGatherContinually:
+ return @"GATHER_CONTINUALLY";
+ }
+}
+
++ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:
+ (RTCEncryptionKeyType)keyType {
+ switch (keyType) {
+ case RTCEncryptionKeyTypeRSA:
+ return rtc::KT_RSA;
+ case RTCEncryptionKeyTypeECDSA:
+ return rtc::KT_ECDSA;
+ }
+}
+
++ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics {
+ switch (sdpSemantics) {
+ case RTCSdpSemanticsPlanB:
+ return webrtc::SdpSemantics::kPlanB_DEPRECATED;
+ case RTCSdpSemanticsUnifiedPlan:
+ return webrtc::SdpSemantics::kUnifiedPlan;
+ }
+}
+
++ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics {
+ switch (sdpSemantics) {
+ case webrtc::SdpSemantics::kPlanB_DEPRECATED:
+ return RTCSdpSemanticsPlanB;
+ case webrtc::SdpSemantics::kUnifiedPlan:
+ return RTCSdpSemanticsUnifiedPlan;
+ }
+}
+
++ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics {
+ switch (sdpSemantics) {
+ case RTCSdpSemanticsPlanB:
+ return @"PLAN_B";
+ case RTCSdpSemanticsUnifiedPlan:
+ return @"UNIFIED_PLAN";
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.h
new file mode 100644
index 0000000000..7894c8d50c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Objective-C bindings for webrtc::CryptoOptions. This API had to be flattened
+ * as Objective-C doesn't support nested structures.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCryptoOptions) : NSObject
+
+/**
+ * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
+ * if both sides enable it
+ */
+@property(nonatomic, assign) BOOL srtpEnableGcmCryptoSuites;
+/**
+ * If set to true, the (potentially insecure) crypto cipher
+ * kSrtpAes128CmSha1_32 will be included in the list of supported ciphers
+ * during negotiation. It will only be used if both peers support it and no
+ * other ciphers get preferred.
+ */
+@property(nonatomic, assign) BOOL srtpEnableAes128Sha1_32CryptoCipher;
+/**
+ * If set to true, encrypted RTP header extensions as defined in RFC 6904
+ * will be negotiated. They will only be used if both peers support them.
+ */
+@property(nonatomic, assign) BOOL srtpEnableEncryptedRtpHeaderExtensions;
+
+/**
+ * If set all RtpSenders must have an FrameEncryptor attached to them before
+ * they are allowed to send packets. All RtpReceivers must have a
+ * FrameDecryptor attached to them before they are able to receive packets.
+ */
+@property(nonatomic, assign) BOOL sframeRequireFrameEncryption;
+
+/**
+ * Initializes CryptoOptions with all possible options set explicitly. This
+ * is done when converting from a native RTCConfiguration.crypto_options.
+ */
+- (instancetype)initWithSrtpEnableGcmCryptoSuites:(BOOL)srtpEnableGcmCryptoSuites
+ srtpEnableAes128Sha1_32CryptoCipher:(BOOL)srtpEnableAes128Sha1_32CryptoCipher
+ srtpEnableEncryptedRtpHeaderExtensions:(BOOL)srtpEnableEncryptedRtpHeaderExtensions
+ sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption
+ NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.mm
new file mode 100644
index 0000000000..fbaa1de58d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.mm
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCryptoOptions.h"
+
+@implementation RTC_OBJC_TYPE (RTCCryptoOptions)
+
+@synthesize srtpEnableGcmCryptoSuites = _srtpEnableGcmCryptoSuites;
+@synthesize srtpEnableAes128Sha1_32CryptoCipher = _srtpEnableAes128Sha1_32CryptoCipher;
+@synthesize srtpEnableEncryptedRtpHeaderExtensions = _srtpEnableEncryptedRtpHeaderExtensions;
+@synthesize sframeRequireFrameEncryption = _sframeRequireFrameEncryption;
+
+- (instancetype)initWithSrtpEnableGcmCryptoSuites:(BOOL)srtpEnableGcmCryptoSuites
+ srtpEnableAes128Sha1_32CryptoCipher:(BOOL)srtpEnableAes128Sha1_32CryptoCipher
+ srtpEnableEncryptedRtpHeaderExtensions:(BOOL)srtpEnableEncryptedRtpHeaderExtensions
+ sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption {
+ if (self = [super init]) {
+ _srtpEnableGcmCryptoSuites = srtpEnableGcmCryptoSuites;
+ _srtpEnableAes128Sha1_32CryptoCipher = srtpEnableAes128Sha1_32CryptoCipher;
+ _srtpEnableEncryptedRtpHeaderExtensions = srtpEnableEncryptedRtpHeaderExtensions;
+ _sframeRequireFrameEncryption = sframeRequireFrameEncryption;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel+Private.h
new file mode 100644
index 0000000000..2cdbdabec6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel+Private.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannel.h"
+
+#include "api/data_channel_interface.h"
+#include "api/scoped_refptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface RTC_OBJC_TYPE (RTCDataBuffer)
+()
+
+ /**
+ * The native DataBuffer representation of this RTCDatabuffer object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+ @property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
+
+/** Initialize an RTCDataBuffer from a native DataBuffer. */
+- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer;
+
+@end
+
+@interface RTC_OBJC_TYPE (RTCDataChannel)
+()
+
+ /** Initialize an RTCDataChannel from a native DataChannelInterface. */
+ - (instancetype)initWithFactory
+ : (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeDataChannel
+ : (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState:
+ (RTCDataChannelState)state;
+
++ (RTCDataChannelState)dataChannelStateForNativeState:
+ (webrtc::DataChannelInterface::DataState)nativeState;
+
++ (NSString *)stringForState:(RTCDataChannelState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.h
new file mode 100644
index 0000000000..89eb58bc3f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AvailabilityMacros.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDataBuffer) : NSObject
+
+/** NSData representation of the underlying buffer. */
+@property(nonatomic, readonly) NSData *data;
+
+/** Indicates whether `data` contains UTF-8 or binary data. */
+@property(nonatomic, readonly) BOOL isBinary;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Initialize an RTCDataBuffer from NSData. `isBinary` indicates whether `data`
+ * contains UTF-8 or binary data.
+ */
+- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary;
+
+@end
+
+@class RTC_OBJC_TYPE(RTCDataChannel);
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCDataChannelDelegate)<NSObject>
+
+ /** The data channel state changed. */
+ - (void)dataChannelDidChangeState : (RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel;
+
+/** The data channel successfully received a data buffer. */
+- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel
+ didReceiveMessageWithBuffer:(RTC_OBJC_TYPE(RTCDataBuffer) *)buffer;
+
+@optional
+/** The data channel's `bufferedAmount` changed. */
+- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel
+ didChangeBufferedAmount:(uint64_t)amount;
+
+@end
+
+/** Represents the state of the data channel. */
+typedef NS_ENUM(NSInteger, RTCDataChannelState) {
+ RTCDataChannelStateConnecting,
+ RTCDataChannelStateOpen,
+ RTCDataChannelStateClosing,
+ RTCDataChannelStateClosed,
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDataChannel) : NSObject
+
+/**
+ * A label that can be used to distinguish this data channel from other data
+ * channel objects.
+ */
+@property(nonatomic, readonly) NSString *label;
+
+/** Whether the data channel can send messages in unreliable mode. */
+@property(nonatomic, readonly) BOOL isReliable DEPRECATED_ATTRIBUTE;
+
+/** Returns whether this data channel is ordered or not. */
+@property(nonatomic, readonly) BOOL isOrdered;
+
+/** Deprecated. Use maxPacketLifeTime. */
+@property(nonatomic, readonly) NSUInteger maxRetransmitTime DEPRECATED_ATTRIBUTE;
+
+/**
+ * The length of the time window (in milliseconds) during which transmissions
+ * and retransmissions may occur in unreliable mode.
+ */
+@property(nonatomic, readonly) uint16_t maxPacketLifeTime;
+
+/**
+ * The maximum number of retransmissions that are attempted in unreliable mode.
+ */
+@property(nonatomic, readonly) uint16_t maxRetransmits;
+
+/**
+ * The name of the sub-protocol used with this data channel, if any. Otherwise
+ * this returns an empty string.
+ */
+@property(nonatomic, readonly) NSString *protocol;
+
+/**
+ * Returns whether this data channel was negotiated by the application or not.
+ */
+@property(nonatomic, readonly) BOOL isNegotiated;
+
+/** Deprecated. Use channelId. */
+@property(nonatomic, readonly) NSInteger streamId DEPRECATED_ATTRIBUTE;
+
+/** The identifier for this data channel. */
+@property(nonatomic, readonly) int channelId;
+
+/** The state of the data channel. */
+@property(nonatomic, readonly) RTCDataChannelState readyState;
+
+/**
+ * The number of bytes of application data that have been queued using
+ * `sendData:` but that have not yet been transmitted to the network.
+ */
+@property(nonatomic, readonly) uint64_t bufferedAmount;
+
+/** The delegate for this data channel. */
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCDataChannelDelegate)> delegate;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Closes the data channel. */
+- (void)close;
+
+/** Attempt to send `data` on this data channel's underlying data transport. */
+- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.mm
new file mode 100644
index 0000000000..4a79cefdb4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.mm
@@ -0,0 +1,220 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannel+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+#include <memory>
+
+namespace webrtc {
+
+class DataChannelDelegateAdapter : public DataChannelObserver {
+ public:
+ DataChannelDelegateAdapter(RTC_OBJC_TYPE(RTCDataChannel) * channel) { channel_ = channel; }
+
+ void OnStateChange() override {
+ [channel_.delegate dataChannelDidChangeState:channel_];
+ }
+
+ void OnMessage(const DataBuffer& buffer) override {
+ RTC_OBJC_TYPE(RTCDataBuffer) *data_buffer =
+ [[RTC_OBJC_TYPE(RTCDataBuffer) alloc] initWithNativeBuffer:buffer];
+ [channel_.delegate dataChannel:channel_
+ didReceiveMessageWithBuffer:data_buffer];
+ }
+
+ void OnBufferedAmountChange(uint64_t previousAmount) override {
+ id<RTC_OBJC_TYPE(RTCDataChannelDelegate)> delegate = channel_.delegate;
+ SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
+ }
+ }
+
+ private:
+ __weak RTC_OBJC_TYPE(RTCDataChannel) * channel_;
+};
+}
+
+@implementation RTC_OBJC_TYPE (RTCDataBuffer) {
+ std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
+}
+
+- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
+ NSParameterAssert(data);
+ if (self = [super init]) {
+ rtc::CopyOnWriteBuffer buffer(
+ reinterpret_cast<const uint8_t*>(data.bytes), data.length);
+ _dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
+ }
+ return self;
+}
+
+- (NSData *)data {
+ return [NSData dataWithBytes:_dataBuffer->data.data()
+ length:_dataBuffer->data.size()];
+}
+
+- (BOOL)isBinary {
+ return _dataBuffer->binary;
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
+ if (self = [super init]) {
+ _dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
+ }
+ return self;
+}
+
+- (const webrtc::DataBuffer *)nativeDataBuffer {
+ return _dataBuffer.get();
+}
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCDataChannel) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
+ std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
+ BOOL _isObserverRegistered;
+}
+
+@synthesize delegate = _delegate;
+
+- (void)dealloc {
+ // Handles unregistering the observer properly. We need to do this because
+ // there may still be other references to the underlying data channel.
+ _nativeDataChannel->UnregisterObserver();
+}
+
+- (NSString *)label {
+ return [NSString stringForStdString:_nativeDataChannel->label()];
+}
+
+- (BOOL)isReliable {
+ return _nativeDataChannel->reliable();
+}
+
+- (BOOL)isOrdered {
+ return _nativeDataChannel->ordered();
+}
+
+- (NSUInteger)maxRetransmitTime {
+ return self.maxPacketLifeTime;
+}
+
+- (uint16_t)maxPacketLifeTime {
+ return _nativeDataChannel->maxRetransmitTime();
+}
+
+- (uint16_t)maxRetransmits {
+ return _nativeDataChannel->maxRetransmits();
+}
+
+- (NSString *)protocol {
+ return [NSString stringForStdString:_nativeDataChannel->protocol()];
+}
+
+- (BOOL)isNegotiated {
+ return _nativeDataChannel->negotiated();
+}
+
+- (NSInteger)streamId {
+ return self.channelId;
+}
+
+- (int)channelId {
+ return _nativeDataChannel->id();
+}
+
+- (RTCDataChannelState)readyState {
+ return [[self class] dataChannelStateForNativeState:
+ _nativeDataChannel->state()];
+}
+
+- (uint64_t)bufferedAmount {
+ return _nativeDataChannel->buffered_amount();
+}
+
+- (void)close {
+ _nativeDataChannel->Close();
+}
+
+- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data {
+ return _nativeDataChannel->Send(*data.nativeDataBuffer);
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDataChannel):\n%ld\n%@\n%@",
+ (long)self.channelId,
+ self.label,
+ [[self class] stringForState:self.readyState]];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeDataChannel:
+ (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
+ NSParameterAssert(nativeDataChannel);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeDataChannel = nativeDataChannel;
+ _observer.reset(new webrtc::DataChannelDelegateAdapter(self));
+ _nativeDataChannel->RegisterObserver(_observer.get());
+ }
+ return self;
+}
+
++ (webrtc::DataChannelInterface::DataState)
+ nativeDataChannelStateForState:(RTCDataChannelState)state {
+ switch (state) {
+ case RTCDataChannelStateConnecting:
+ return webrtc::DataChannelInterface::DataState::kConnecting;
+ case RTCDataChannelStateOpen:
+ return webrtc::DataChannelInterface::DataState::kOpen;
+ case RTCDataChannelStateClosing:
+ return webrtc::DataChannelInterface::DataState::kClosing;
+ case RTCDataChannelStateClosed:
+ return webrtc::DataChannelInterface::DataState::kClosed;
+ }
+}
+
++ (RTCDataChannelState)dataChannelStateForNativeState:
+ (webrtc::DataChannelInterface::DataState)nativeState {
+ switch (nativeState) {
+ case webrtc::DataChannelInterface::DataState::kConnecting:
+ return RTCDataChannelStateConnecting;
+ case webrtc::DataChannelInterface::DataState::kOpen:
+ return RTCDataChannelStateOpen;
+ case webrtc::DataChannelInterface::DataState::kClosing:
+ return RTCDataChannelStateClosing;
+ case webrtc::DataChannelInterface::DataState::kClosed:
+ return RTCDataChannelStateClosed;
+ }
+}
+
++ (NSString *)stringForState:(RTCDataChannelState)state {
+ switch (state) {
+ case RTCDataChannelStateConnecting:
+ return @"Connecting";
+ case RTCDataChannelStateOpen:
+ return @"Open";
+ case RTCDataChannelStateClosing:
+ return @"Closing";
+ case RTCDataChannelStateClosed:
+ return @"Closed";
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h
new file mode 100644
index 0000000000..5aef10fcef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannelConfiguration.h"
+
+#include "api/data_channel_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration)
+()
+
+ @property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h
new file mode 100644
index 0000000000..9459ae0a13
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AvailabilityMacros.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration) : NSObject
+
+/** Set to YES if ordered delivery is required. */
+@property(nonatomic, assign) BOOL isOrdered;
+
+/** Deprecated. Use maxPacketLifeTime. */
+@property(nonatomic, assign) NSInteger maxRetransmitTimeMs DEPRECATED_ATTRIBUTE;
+
+/**
+ * Max period in milliseconds in which retransmissions will be sent. After this
+ * time, no more retransmissions will be sent. -1 if unset.
+ */
+@property(nonatomic, assign) int maxPacketLifeTime;
+
+/** The max number of retransmissions. -1 if unset. */
+@property(nonatomic, assign) int maxRetransmits;
+
+/** Set to YES if the channel has been externally negotiated and we do not send
+ * an in-band signalling in the form of an "open" message.
+ */
+@property(nonatomic, assign) BOOL isNegotiated;
+
+/** Deprecated. Use channelId. */
+@property(nonatomic, assign) int streamId DEPRECATED_ATTRIBUTE;
+
+/** The id of the data channel. */
+@property(nonatomic, assign) int channelId;
+
+/** Set by the application and opaque to the WebRTC implementation. */
+@property(nonatomic) NSString* protocol;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm
new file mode 100644
index 0000000000..bf775b1afd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannelConfiguration+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCDataChannelConfiguration)
+
+@synthesize nativeDataChannelInit = _nativeDataChannelInit;
+
+- (BOOL)isOrdered {
+ return _nativeDataChannelInit.ordered;
+}
+
+- (void)setIsOrdered:(BOOL)isOrdered {
+ _nativeDataChannelInit.ordered = isOrdered;
+}
+
+- (NSInteger)maxRetransmitTimeMs {
+ return self.maxPacketLifeTime;
+}
+
+- (void)setMaxRetransmitTimeMs:(NSInteger)maxRetransmitTimeMs {
+ self.maxPacketLifeTime = maxRetransmitTimeMs;
+}
+
+- (int)maxPacketLifeTime {
+ return *_nativeDataChannelInit.maxRetransmitTime;
+}
+
+- (void)setMaxPacketLifeTime:(int)maxPacketLifeTime {
+ _nativeDataChannelInit.maxRetransmitTime = maxPacketLifeTime;
+}
+
+- (int)maxRetransmits {
+ if (_nativeDataChannelInit.maxRetransmits) {
+ return *_nativeDataChannelInit.maxRetransmits;
+ } else {
+ return -1;
+ }
+}
+
+- (void)setMaxRetransmits:(int)maxRetransmits {
+ _nativeDataChannelInit.maxRetransmits = maxRetransmits;
+}
+
+- (NSString *)protocol {
+ return [NSString stringForStdString:_nativeDataChannelInit.protocol];
+}
+
+- (void)setProtocol:(NSString *)protocol {
+ _nativeDataChannelInit.protocol = [NSString stdStringForString:protocol];
+}
+
+- (BOOL)isNegotiated {
+ return _nativeDataChannelInit.negotiated;
+}
+
+- (void)setIsNegotiated:(BOOL)isNegotiated {
+ _nativeDataChannelInit.negotiated = isNegotiated;
+}
+
+- (int)streamId {
+ return self.channelId;
+}
+
+- (void)setStreamId:(int)streamId {
+ self.channelId = streamId;
+}
+
+- (int)channelId {
+ return _nativeDataChannelInit.id;
+}
+
+- (void)setChannelId:(int)channelId {
+ _nativeDataChannelInit.id = channelId;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h
new file mode 100644
index 0000000000..49a62164cd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDtmfSender.h"
+
+#include "api/dtmf_sender_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCDtmfSender) : NSObject <RTC_OBJC_TYPE(RTCDtmfSender)>
+
+@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Initialize an RTCDtmfSender with a native DtmfSenderInterface. */
+- (instancetype)initWithNativeDtmfSender:
+ (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.h
new file mode 100644
index 0000000000..0f1b6ba4da
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCDtmfSender)<NSObject>
+
+ /**
+ * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise
+ * returns false. To be able to send DTMF, the associated RTCRtpSender must be
+ * able to send packets, and a "telephone-event" codec must be negotiated.
+ */
+ @property(nonatomic, readonly) BOOL canInsertDtmf;
+
+/**
+ * Queues a task that sends the DTMF tones. The tones parameter is treated
+ * as a series of characters. The characters 0 through 9, A through D, #, and *
+ * generate the associated DTMF tones. The characters a to d are equivalent
+ * to A to D. The character ',' indicates a delay of 2 seconds before
+ * processing the next character in the tones parameter.
+ *
+ * Unrecognized characters are ignored.
+ *
+ * @param duration The parameter indicates the duration to use for each
+ * character passed in the tones parameter. The duration cannot be more
+ * than 6000 or less than 70 ms.
+ *
+ * @param interToneGap The parameter indicates the gap between tones.
+ * This parameter must be at least 50 ms but should be as short as
+ * possible.
+ *
+ * If InsertDtmf is called on the same object while an existing task for this
+ * object to generate DTMF is still running, the previous task is canceled.
+ * Returns true on success and false on failure.
+ */
+- (BOOL)insertDtmf:(nonnull NSString *)tones
+ duration:(NSTimeInterval)duration
+ interToneGap:(NSTimeInterval)interToneGap;
+
+/** The tones remaining to be played out */
+- (nonnull NSString *)remainingTones;
+
+/**
+ * The current tone duration value. This value will be the value last set via the
+ * insertDtmf method, or the default value of 100 ms if insertDtmf was never called.
+ */
+- (NSTimeInterval)duration;
+
+/**
+ * The current value of the between-tone gap. This value will be the value last set
+ * via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
+ * called.
+ */
+- (NSTimeInterval)interToneGap;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.mm
new file mode 100644
index 0000000000..ee3b79cd37
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.mm
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDtmfSender+Private.h"
+
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/time_utils.h"
+
+@implementation RTC_OBJC_TYPE (RTCDtmfSender) {
+ rtc::scoped_refptr<webrtc::DtmfSenderInterface> _nativeDtmfSender;
+}
+
+- (BOOL)canInsertDtmf {
+ return _nativeDtmfSender->CanInsertDtmf();
+}
+
+- (BOOL)insertDtmf:(nonnull NSString *)tones
+ duration:(NSTimeInterval)duration
+ interToneGap:(NSTimeInterval)interToneGap {
+ RTC_DCHECK(tones != nil);
+
+ int durationMs = static_cast<int>(duration * rtc::kNumMillisecsPerSec);
+ int interToneGapMs = static_cast<int>(interToneGap * rtc::kNumMillisecsPerSec);
+ return _nativeDtmfSender->InsertDtmf(
+ [NSString stdStringForString:tones], durationMs, interToneGapMs);
+}
+
+- (nonnull NSString *)remainingTones {
+ return [NSString stringForStdString:_nativeDtmfSender->tones()];
+}
+
+- (NSTimeInterval)duration {
+ return static_cast<NSTimeInterval>(_nativeDtmfSender->duration()) / rtc::kNumMillisecsPerSec;
+}
+
+- (NSTimeInterval)interToneGap {
+ return static_cast<NSTimeInterval>(_nativeDtmfSender->inter_tone_gap()) /
+ rtc::kNumMillisecsPerSec;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDtmfSender) {\n remainingTones: %@\n "
+ @"duration: %f sec\n interToneGap: %f sec\n}",
+ [self remainingTones],
+ [self duration],
+ [self interToneGap]];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
+ return _nativeDtmfSender;
+}
+
+- (instancetype)initWithNativeDtmfSender:
+ (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
+ NSParameterAssert(nativeDtmfSender);
+ if (self = [super init]) {
+ _nativeDtmfSender = nativeDtmfSender;
+ RTCLogInfo(
+ @"RTC_OBJC_TYPE(RTCDtmfSender)(%p): created DTMF sender: %@", self, self.description);
+ }
+ return self;
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h
new file mode 100644
index 0000000000..a078b0aded
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "base/RTCEncodedImage.h"
+
+#include "api/video/encoded_image.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/* Interfaces for converting to/from internal C++ formats. */
+@interface RTC_OBJC_TYPE (RTCEncodedImage)
+(Private)
+
+ - (instancetype)initWithNativeEncodedImage : (const webrtc::EncodedImage &)encodedImage;
+- (webrtc::EncodedImage)nativeEncodedImage;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
new file mode 100644
index 0000000000..7f8ae739e0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCEncodedImage+Private.h"
+
+#import <objc/runtime.h>
+
+#include "rtc_base/numerics/safe_conversions.h"
+
+namespace {
+// An implementation of EncodedImageBufferInterface that doesn't perform any copies.
+class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface {
+ public:
+ static rtc::scoped_refptr<ObjCEncodedImageBuffer> Create(NSData *data) {
+ return rtc::make_ref_counted<ObjCEncodedImageBuffer>(data);
+ }
+ const uint8_t *data() const override { return static_cast<const uint8_t *>(data_.bytes); }
+ // TODO(bugs.webrtc.org/9378): delete this non-const data method.
+ uint8_t *data() override {
+ return const_cast<uint8_t *>(static_cast<const uint8_t *>(data_.bytes));
+ }
+ size_t size() const override { return data_.length; }
+
+ protected:
+ explicit ObjCEncodedImageBuffer(NSData *data) : data_(data) {}
+ ~ObjCEncodedImageBuffer() {}
+
+ NSData *data_;
+};
+}
+
+// A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable with associated
+// objects.
+@interface RTCWrappedEncodedImageBuffer : NSObject
+@property(nonatomic) rtc::scoped_refptr<webrtc::EncodedImageBufferInterface> buffer;
+- (instancetype)initWithEncodedImageBuffer:
+ (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer;
+@end
+@implementation RTCWrappedEncodedImageBuffer
+@synthesize buffer = _buffer;
+- (instancetype)initWithEncodedImageBuffer:
+ (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer {
+ self = [super init];
+ if (self) {
+ _buffer = buffer;
+ }
+ return self;
+}
+@end
+
+@implementation RTC_OBJC_TYPE (RTCEncodedImage)
+(Private)
+
+ - (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)encodedData {
+ RTCWrappedEncodedImageBuffer *wrappedBuffer =
+ objc_getAssociatedObject(self, @selector(encodedData));
+ return wrappedBuffer.buffer;
+}
+
+- (void)setEncodedData:(rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer {
+ return objc_setAssociatedObject(
+ self,
+ @selector(encodedData),
+ [[RTCWrappedEncodedImageBuffer alloc] initWithEncodedImageBuffer:buffer],
+ OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+}
+
+- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage {
+ if (self = [super init]) {
+ // A reference to the encodedData must be stored so that it's kept alive as long
+ // self.buffer references its underlying data.
+ self.encodedData = encodedImage.GetEncodedData();
+ // Wrap the buffer in NSData without copying, do not take ownership.
+ self.buffer = [NSData dataWithBytesNoCopy:self.encodedData->data()
+ length:encodedImage.size()
+ freeWhenDone:NO];
+ self.encodedWidth = rtc::dchecked_cast<int32_t>(encodedImage._encodedWidth);
+ self.encodedHeight = rtc::dchecked_cast<int32_t>(encodedImage._encodedHeight);
+ self.timeStamp = encodedImage.Timestamp();
+ self.captureTimeMs = encodedImage.capture_time_ms_;
+ self.ntpTimeMs = encodedImage.ntp_time_ms_;
+ self.flags = encodedImage.timing_.flags;
+ self.encodeStartMs = encodedImage.timing_.encode_start_ms;
+ self.encodeFinishMs = encodedImage.timing_.encode_finish_ms;
+ self.frameType = static_cast<RTCFrameType>(encodedImage._frameType);
+ self.rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
+ self.qp = @(encodedImage.qp_);
+ self.contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
+ RTCVideoContentTypeScreenshare :
+ RTCVideoContentTypeUnspecified;
+ }
+
+ return self;
+}
+
+- (webrtc::EncodedImage)nativeEncodedImage {
+ // Return the pointer without copying.
+ webrtc::EncodedImage encodedImage;
+ if (self.encodedData) {
+ encodedImage.SetEncodedData(self.encodedData);
+ } else if (self.buffer) {
+ encodedImage.SetEncodedData(ObjCEncodedImageBuffer::Create(self.buffer));
+ }
+ encodedImage.set_size(self.buffer.length);
+ encodedImage._encodedWidth = rtc::dchecked_cast<uint32_t>(self.encodedWidth);
+ encodedImage._encodedHeight = rtc::dchecked_cast<uint32_t>(self.encodedHeight);
+ encodedImage.SetTimestamp(self.timeStamp);
+ encodedImage.capture_time_ms_ = self.captureTimeMs;
+ encodedImage.ntp_time_ms_ = self.ntpTimeMs;
+ encodedImage.timing_.flags = self.flags;
+ encodedImage.timing_.encode_start_ms = self.encodeStartMs;
+ encodedImage.timing_.encode_finish_ms = self.encodeFinishMs;
+ encodedImage._frameType = webrtc::VideoFrameType(self.frameType);
+ encodedImage.rotation_ = webrtc::VideoRotation(self.rotation);
+ encodedImage.qp_ = self.qp ? self.qp.intValue : -1;
+ encodedImage.content_type_ = (self.contentType == RTCVideoContentTypeScreenshare) ?
+ webrtc::VideoContentType::SCREENSHARE :
+ webrtc::VideoContentType::UNSPECIFIED;
+
+ return encodedImage;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h
new file mode 100644
index 0000000000..1f290d8a66
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+/** The only valid value for the following if set is kRTCFieldTrialEnabledValue. */
+RTC_EXTERN NSString * const kRTCFieldTrialAudioForceNoTWCCKey;
+RTC_EXTERN NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey;
+RTC_EXTERN NSString * const kRTCFieldTrialSendSideBweWithOverheadKey;
+RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03AdvertisedKey;
+RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03Key;
+RTC_EXTERN NSString * const kRTCFieldTrialH264HighProfileKey;
+RTC_EXTERN NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey;
+RTC_EXTERN NSString *const kRTCFieldTrialUseNWPathMonitor;
+
+/** The valid value for field trials above. */
+RTC_EXTERN NSString * const kRTCFieldTrialEnabledValue;
+
+/** Initialize field trials using a dictionary mapping field trial keys to their
+ * values. See above for valid keys and values. Must be called before any other
+ * call into WebRTC. See: webrtc/system_wrappers/include/field_trial.h
+ */
+RTC_EXTERN void RTCInitFieldTrialDictionary(NSDictionary<NSString *, NSString *> *fieldTrials);
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm
new file mode 100644
index 0000000000..852aeeec84
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCFieldTrials.h"
+
+#include <memory>
+
+#import "base/RTCLogging.h"
+
+#include "system_wrappers/include/field_trial.h"
+
+NSString * const kRTCFieldTrialAudioForceNoTWCCKey = @"WebRTC-Audio-ForceNoTWCC";
+NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC";
+NSString * const kRTCFieldTrialSendSideBweWithOverheadKey = @"WebRTC-SendSideBwe-WithOverhead";
+NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised";
+NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03";
+NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile";
+NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey =
+ @"WebRTC-Audio-MinimizeResamplingOnMobile";
+NSString *const kRTCFieldTrialUseNWPathMonitor = @"WebRTC-Network-UseNWPathMonitor";
+NSString * const kRTCFieldTrialEnabledValue = @"Enabled";
+
+// InitFieldTrialsFromString stores the char*, so the char array must outlive
+// the application.
+static char *gFieldTrialInitString = nullptr;
+
+void RTCInitFieldTrialDictionary(NSDictionary<NSString *, NSString *> *fieldTrials) {
+ if (!fieldTrials) {
+ RTCLogWarning(@"No fieldTrials provided.");
+ return;
+ }
+ // Assemble the keys and values into the field trial string.
+ // We don't perform any extra format checking. That should be done by the underlying WebRTC calls.
+ NSMutableString *fieldTrialInitString = [NSMutableString string];
+ for (NSString *key in fieldTrials) {
+ NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]];
+ [fieldTrialInitString appendString:fieldTrialEntry];
+ }
+ size_t len = fieldTrialInitString.length + 1;
+ if (gFieldTrialInitString != nullptr) {
+ delete[] gFieldTrialInitString;
+ }
+ gFieldTrialInitString = new char[len];
+ if (![fieldTrialInitString getCString:gFieldTrialInitString
+ maxLength:len
+ encoding:NSUTF8StringEncoding]) {
+ RTCLogError(@"Failed to convert field trial string.");
+ return;
+ }
+ webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString);
+}
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.h
new file mode 100644
index 0000000000..cb397c9633
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) {
+ RTCFileLoggerSeverityVerbose,
+ RTCFileLoggerSeverityInfo,
+ RTCFileLoggerSeverityWarning,
+ RTCFileLoggerSeverityError
+};
+
+typedef NS_ENUM(NSUInteger, RTCFileLoggerRotationType) {
+ RTCFileLoggerTypeCall,
+ RTCFileLoggerTypeApp,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+// This class intercepts WebRTC logs and saves them to a file. The file size
+// will not exceed the given maximum bytesize. When the maximum bytesize is
+// reached, logs are rotated according to the rotationType specified.
+// For kRTCFileLoggerTypeCall, logs from the beginning and the end
+// are preserved while the middle section is overwritten instead.
+// For kRTCFileLoggerTypeApp, the oldest log is overwritten.
+// This class is not threadsafe.
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCFileLogger) : NSObject
+
+// The severity level to capture. The default is kRTCFileLoggerSeverityInfo.
+@property(nonatomic, assign) RTCFileLoggerSeverity severity;
+
+// The rotation type for this file logger. The default is
+// kRTCFileLoggerTypeCall.
+@property(nonatomic, readonly) RTCFileLoggerRotationType rotationType;
+
+// Disables buffering disk writes. Should be set before `start`. Buffering
+// is enabled by default for performance.
+@property(nonatomic, assign) BOOL shouldDisableBuffering;
+
+// Default constructor provides default settings for dir path, file size and
+// rotation type.
+- (instancetype)init;
+
+// Create file logger with default rotation type.
+- (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize;
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize
+ rotationType:(RTCFileLoggerRotationType)rotationType NS_DESIGNATED_INITIALIZER;
+
+// Starts writing WebRTC logs to disk if not already started. Overwrites any
+// existing file(s).
+- (void)start;
+
+// Stops writing WebRTC logs to disk. This method is also called on dealloc.
+- (void)stop;
+
+// Returns the current contents of the logs, or nil if start has been called
+// without a stop.
+- (nullable NSData *)logData;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.mm
new file mode 100644
index 0000000000..9562245611
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.mm
@@ -0,0 +1,170 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCFileLogger.h"
+
+#include <memory>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/file_rotating_stream.h"
+#include "rtc_base/log_sinks.h"
+#include "rtc_base/logging.h"
+
+NSString *const kDefaultLogDirName = @"webrtc_logs";
+NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
+const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
+
+@implementation RTC_OBJC_TYPE (RTCFileLogger) {
+ BOOL _hasStarted;
+ NSString *_dirPath;
+ NSUInteger _maxFileSize;
+ std::unique_ptr<rtc::FileRotatingLogSink> _logSink;
+}
+
+@synthesize severity = _severity;
+@synthesize rotationType = _rotationType;
+@synthesize shouldDisableBuffering = _shouldDisableBuffering;
+
+- (instancetype)init {
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(
+ NSDocumentDirectory, NSUserDomainMask, YES);
+ NSString *documentsDirPath = [paths firstObject];
+ NSString *defaultDirPath =
+ [documentsDirPath stringByAppendingPathComponent:kDefaultLogDirName];
+ return [self initWithDirPath:defaultDirPath
+ maxFileSize:kDefaultMaxFileSize];
+}
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize {
+ return [self initWithDirPath:dirPath
+ maxFileSize:maxFileSize
+ rotationType:RTCFileLoggerTypeCall];
+}
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize
+ rotationType:(RTCFileLoggerRotationType)rotationType {
+ NSParameterAssert(dirPath.length);
+ NSParameterAssert(maxFileSize);
+ if (self = [super init]) {
+ BOOL isDir = NO;
+ NSFileManager *fileManager = [NSFileManager defaultManager];
+ if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
+ if (!isDir) {
+ // Bail if something already exists there.
+ return nil;
+ }
+ } else {
+ if (![fileManager createDirectoryAtPath:dirPath
+ withIntermediateDirectories:NO
+ attributes:nil
+ error:nil]) {
+ // Bail if we failed to create a directory.
+ return nil;
+ }
+ }
+ _dirPath = dirPath;
+ _maxFileSize = maxFileSize;
+ _severity = RTCFileLoggerSeverityInfo;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self stop];
+}
+
+- (void)start {
+ if (_hasStarted) {
+ return;
+ }
+ switch (_rotationType) {
+ case RTCFileLoggerTypeApp:
+ _logSink.reset(
+ new rtc::FileRotatingLogSink(_dirPath.UTF8String,
+ kRTCFileLoggerRotatingLogPrefix,
+ _maxFileSize,
+ _maxFileSize / 10));
+ break;
+ case RTCFileLoggerTypeCall:
+ _logSink.reset(
+ new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
+ _maxFileSize));
+ break;
+ }
+ if (!_logSink->Init()) {
+ RTC_LOG(LS_ERROR) << "Failed to open log files at path: " << _dirPath.UTF8String;
+ _logSink.reset();
+ return;
+ }
+ if (_shouldDisableBuffering) {
+ _logSink->DisableBuffering();
+ }
+ rtc::LogMessage::LogThreads(true);
+ rtc::LogMessage::LogTimestamps(true);
+ rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
+ _hasStarted = YES;
+}
+
+- (void)stop {
+ if (!_hasStarted) {
+ return;
+ }
+ RTC_DCHECK(_logSink);
+ rtc::LogMessage::RemoveLogToStream(_logSink.get());
+ _hasStarted = NO;
+ _logSink.reset();
+}
+
+- (nullable NSData *)logData {
+ if (_hasStarted) {
+ return nil;
+ }
+ NSMutableData* logData = [NSMutableData data];
+ std::unique_ptr<rtc::FileRotatingStreamReader> stream;
+ switch(_rotationType) {
+ case RTCFileLoggerTypeApp:
+ stream = std::make_unique<rtc::FileRotatingStreamReader>(_dirPath.UTF8String,
+ kRTCFileLoggerRotatingLogPrefix);
+ break;
+ case RTCFileLoggerTypeCall:
+ stream = std::make_unique<rtc::CallSessionFileRotatingStreamReader>(_dirPath.UTF8String);
+ break;
+ }
+ size_t bufferSize = stream->GetSize();
+ if (bufferSize == 0) {
+ return logData;
+ }
+ // Allocate memory using malloc so we can pass it direcly to NSData without
+ // copying.
+ std::unique_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
+ size_t read = stream->ReadAll(buffer.get(), bufferSize);
+ logData = [[NSMutableData alloc] initWithBytesNoCopy:buffer.release()
+ length:read];
+ return logData;
+}
+
+#pragma mark - Private
+
+- (rtc::LoggingSeverity)rtcSeverity {
+ switch (_severity) {
+ case RTCFileLoggerSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case RTCFileLoggerSeverityInfo:
+ return rtc::LS_INFO;
+ case RTCFileLoggerSeverityWarning:
+ return rtc::LS_WARNING;
+ case RTCFileLoggerSeverityError:
+ return rtc::LS_ERROR;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h
new file mode 100644
index 0000000000..409e16b608
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate.h"
+
+#include <memory>
+
+#include "api/jsep.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCIceCandidate)
+()
+
+ /**
+ * The native IceCandidateInterface representation of this RTCIceCandidate
+ * object. This is needed to pass to the underlying C++ APIs.
+ */
+ @property(nonatomic, readonly) std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
+
+/**
+ * Initialize an RTCIceCandidate from a native IceCandidateInterface. No
+ * ownership is taken of the native candidate.
+ */
+- (instancetype)initWithNativeCandidate:(const webrtc::IceCandidateInterface *)candidate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.h
new file mode 100644
index 0000000000..f84843af6c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCIceCandidate) : NSObject
+
+/**
+ * If present, the identifier of the "media stream identification" for the media
+ * component this candidate is associated with.
+ */
+@property(nonatomic, readonly, nullable) NSString *sdpMid;
+
+/**
+ * The index (starting at zero) of the media description this candidate is
+ * associated with in the SDP.
+ */
+@property(nonatomic, readonly) int sdpMLineIndex;
+
+/** The SDP string for this candidate. */
+@property(nonatomic, readonly) NSString *sdp;
+
+/** The URL of the ICE server which this candidate is gathered from. */
+@property(nonatomic, readonly, nullable) NSString *serverUrl;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Initialize an RTCIceCandidate from SDP.
+ */
+- (instancetype)initWithSdp:(NSString *)sdp
+ sdpMLineIndex:(int)sdpMLineIndex
+ sdpMid:(nullable NSString *)sdpMid NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.mm
new file mode 100644
index 0000000000..48385ef5b4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.mm
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate+Private.h"
+
+#include <memory>
+
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCIceCandidate)
+
+@synthesize sdpMid = _sdpMid;
+@synthesize sdpMLineIndex = _sdpMLineIndex;
+@synthesize sdp = _sdp;
+@synthesize serverUrl = _serverUrl;
+
+- (instancetype)initWithSdp:(NSString *)sdp
+ sdpMLineIndex:(int)sdpMLineIndex
+ sdpMid:(NSString *)sdpMid {
+ NSParameterAssert(sdp.length);
+ if (self = [super init]) {
+ _sdpMid = [sdpMid copy];
+ _sdpMLineIndex = sdpMLineIndex;
+ _sdp = [sdp copy];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceCandidate):\n%@\n%d\n%@\n%@",
+ _sdpMid,
+ _sdpMLineIndex,
+ _sdp,
+ _serverUrl];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeCandidate:
+ (const webrtc::IceCandidateInterface *)candidate {
+ NSParameterAssert(candidate);
+ std::string sdp;
+ candidate->ToString(&sdp);
+
+ RTC_OBJC_TYPE(RTCIceCandidate) *rtcCandidate =
+ [self initWithSdp:[NSString stringForStdString:sdp]
+ sdpMLineIndex:candidate->sdp_mline_index()
+ sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
+ rtcCandidate->_serverUrl = [NSString stringForStdString:candidate->server_url()];
+ return rtcCandidate;
+}
+
+- (std::unique_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
+ webrtc::SdpParseError error;
+
+ webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
+ _sdpMid.stdString, _sdpMLineIndex, _sdp.stdString, &error);
+
+ if (!candidate) {
+ RTCLog(@"Failed to create ICE candidate: %s\nline: %s",
+ error.description.c_str(),
+ error.line.c_str());
+ }
+
+ return std::unique_ptr<webrtc::IceCandidateInterface>(candidate);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h
new file mode 100644
index 0000000000..8502da08a8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidateErrorEvent.h"
+
+#include <string>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCIceCandidateErrorEvent)
+()
+
+ - (instancetype)initWithAddress : (const std::string&)address port : (const int)port url
+ : (const std::string&)url errorCode : (const int)errorCode errorText
+ : (const std::string&)errorText;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h
new file mode 100644
index 0000000000..e0906fdbdd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCIceCandidateErrorEvent) : NSObject
+
+/** The local IP address used to communicate with the STUN or TURN server. */
+@property(nonatomic, readonly) NSString *address;
+
+/** The port used to communicate with the STUN or TURN server. */
+@property(nonatomic, readonly) int port;
+
+/** The STUN or TURN URL that identifies the STUN or TURN server for which the failure occurred. */
+@property(nonatomic, readonly) NSString *url;
+
+/** The numeric STUN error code returned by the STUN or TURN server. If no host candidate can reach
+ * the server, errorCode will be set to the value 701 which is outside the STUN error code range.
+ * This error is only fired once per server URL while in the RTCIceGatheringState of "gathering". */
+@property(nonatomic, readonly) int errorCode;
+
+/** The STUN reason text returned by the STUN or TURN server. If the server could not be reached,
+ * errorText will be set to an implementation-specific value providing details about the error. */
+@property(nonatomic, readonly) NSString *errorText;
+
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.mm
new file mode 100644
index 0000000000..573e30642b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.mm
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidateErrorEvent+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCIceCandidateErrorEvent)
+
+@synthesize address = _address;
+@synthesize port = _port;
+@synthesize url = _url;
+@synthesize errorCode = _errorCode;
+@synthesize errorText = _errorText;
+
+- (instancetype)init {
+ return [super init];
+}
+
+- (instancetype)initWithAddress:(const std::string&)address
+ port:(const int)port
+ url:(const std::string&)url
+ errorCode:(const int)errorCode
+ errorText:(const std::string&)errorText {
+ if (self = [self init]) {
+ _address = [NSString stringForStdString:address];
+ _port = port;
+ _url = [NSString stringForStdString:url];
+ _errorCode = errorCode;
+ _errorText = [NSString stringForStdString:errorText];
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer+Private.h
new file mode 100644
index 0000000000..3eee819965
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer+Private.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCIceServer)
+()
+
+ /**
+ * IceServer struct representation of this RTCIceServer object's data.
+ * This is needed to pass to the underlying C++ APIs.
+ */
+ @property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer;
+
+/** Initialize an RTCIceServer from a native IceServer. */
+- (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.h
new file mode 100644
index 0000000000..7ddcbc1a1f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) {
+ RTCTlsCertPolicySecure,
+ RTCTlsCertPolicyInsecureNoCheck
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCIceServer) : NSObject
+
+/** URI(s) for this server represented as NSStrings. */
+@property(nonatomic, readonly) NSArray<NSString *> *urlStrings;
+
+/** Username to use if this RTCIceServer object is a TURN server. */
+@property(nonatomic, readonly, nullable) NSString *username;
+
+/** Credential to use if this RTCIceServer object is a TURN server. */
+@property(nonatomic, readonly, nullable) NSString *credential;
+
+/**
+ * TLS certificate policy to use if this RTCIceServer object is a TURN server.
+ */
+@property(nonatomic, readonly) RTCTlsCertPolicy tlsCertPolicy;
+
+/**
+ If the URIs in `urls` only contain IP addresses, this field can be used
+ to indicate the hostname, which may be necessary for TLS (using the SNI
+ extension). If `urls` itself contains the hostname, this isn't necessary.
+ */
+@property(nonatomic, readonly, nullable) NSString *hostname;
+
+/** List of protocols to be used in the TLS ALPN extension. */
+@property(nonatomic, readonly) NSArray<NSString *> *tlsAlpnProtocols;
+
+/**
+ List elliptic curves to be used in the TLS elliptic curves extension.
+ Only curve names supported by OpenSSL should be used (eg. "P-256","X25519").
+ */
+@property(nonatomic, readonly) NSArray<NSString *> *tlsEllipticCurves;
+
+- (nonnull instancetype)init NS_UNAVAILABLE;
+
+/** Convenience initializer for a server with no authentication (e.g. STUN). */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, and credentialType.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, and TLS cert policy.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, TLS cert policy and hostname.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(nullable NSString *)hostname;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, TLS cert policy, hostname and ALPN protocols.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(nullable NSString *)hostname
+ tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, TLS cert policy, hostname, ALPN protocols and
+ * elliptic curves.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(nullable NSString *)hostname
+ tlsAlpnProtocols:(nullable NSArray<NSString *> *)tlsAlpnProtocols
+ tlsEllipticCurves:(nullable NSArray<NSString *> *)tlsEllipticCurves
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.mm
new file mode 100644
index 0000000000..19a0a7e9e8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.mm
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCIceServer)
+
+@synthesize urlStrings = _urlStrings;
+@synthesize username = _username;
+@synthesize credential = _credential;
+@synthesize tlsCertPolicy = _tlsCertPolicy;
+@synthesize hostname = _hostname;
+@synthesize tlsAlpnProtocols = _tlsAlpnProtocols;
+@synthesize tlsEllipticCurves = _tlsEllipticCurves;
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings {
+ return [self initWithURLStrings:urlStrings
+ username:nil
+ credential:nil];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential {
+ return [self initWithURLStrings:urlStrings
+ username:username
+ credential:credential
+ tlsCertPolicy:RTCTlsCertPolicySecure];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
+ return [self initWithURLStrings:urlStrings
+ username:username
+ credential:credential
+ tlsCertPolicy:tlsCertPolicy
+ hostname:nil];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(NSString *)hostname {
+ return [self initWithURLStrings:urlStrings
+ username:username
+ credential:credential
+ tlsCertPolicy:tlsCertPolicy
+ hostname:hostname
+ tlsAlpnProtocols:[NSArray array]];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(NSString *)hostname
+ tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols {
+ return [self initWithURLStrings:urlStrings
+ username:username
+ credential:credential
+ tlsCertPolicy:tlsCertPolicy
+ hostname:hostname
+ tlsAlpnProtocols:tlsAlpnProtocols
+ tlsEllipticCurves:[NSArray array]];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(NSString *)hostname
+ tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols
+ tlsEllipticCurves:(NSArray<NSString *> *)tlsEllipticCurves {
+ NSParameterAssert(urlStrings.count);
+ if (self = [super init]) {
+ _urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
+ _username = [username copy];
+ _credential = [credential copy];
+ _tlsCertPolicy = tlsCertPolicy;
+ _hostname = [hostname copy];
+ _tlsAlpnProtocols = [[NSArray alloc] initWithArray:tlsAlpnProtocols copyItems:YES];
+ _tlsEllipticCurves = [[NSArray alloc] initWithArray:tlsEllipticCurves copyItems:YES];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceServer):\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
+ _urlStrings,
+ _username,
+ _credential,
+ [self stringForTlsCertPolicy:_tlsCertPolicy],
+ _hostname,
+ _tlsAlpnProtocols,
+ _tlsEllipticCurves];
+}
+
+#pragma mark - Private
+
+- (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
+ switch (tlsCertPolicy) {
+ case RTCTlsCertPolicySecure:
+ return @"RTCTlsCertPolicySecure";
+ case RTCTlsCertPolicyInsecureNoCheck:
+ return @"RTCTlsCertPolicyInsecureNoCheck";
+ }
+}
+
+- (webrtc::PeerConnectionInterface::IceServer)nativeServer {
+ __block webrtc::PeerConnectionInterface::IceServer iceServer;
+
+ iceServer.username = [NSString stdStringForString:_username];
+ iceServer.password = [NSString stdStringForString:_credential];
+ iceServer.hostname = [NSString stdStringForString:_hostname];
+
+ [_tlsAlpnProtocols enumerateObjectsUsingBlock:^(NSString *proto, NSUInteger idx, BOOL *stop) {
+ iceServer.tls_alpn_protocols.push_back(proto.stdString);
+ }];
+
+ [_tlsEllipticCurves enumerateObjectsUsingBlock:^(NSString *curve, NSUInteger idx, BOOL *stop) {
+ iceServer.tls_elliptic_curves.push_back(curve.stdString);
+ }];
+
+ [_urlStrings enumerateObjectsUsingBlock:^(NSString *url,
+ NSUInteger idx,
+ BOOL *stop) {
+ iceServer.urls.push_back(url.stdString);
+ }];
+
+ switch (_tlsCertPolicy) {
+ case RTCTlsCertPolicySecure:
+ iceServer.tls_cert_policy =
+ webrtc::PeerConnectionInterface::kTlsCertPolicySecure;
+ break;
+ case RTCTlsCertPolicyInsecureNoCheck:
+ iceServer.tls_cert_policy =
+ webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
+ break;
+ }
+ return iceServer;
+}
+
+- (instancetype)initWithNativeServer:
+ (webrtc::PeerConnectionInterface::IceServer)nativeServer {
+ NSMutableArray *urls =
+ [NSMutableArray arrayWithCapacity:nativeServer.urls.size()];
+ for (auto const &url : nativeServer.urls) {
+ [urls addObject:[NSString stringForStdString:url]];
+ }
+ NSString *username = [NSString stringForStdString:nativeServer.username];
+ NSString *credential = [NSString stringForStdString:nativeServer.password];
+ NSString *hostname = [NSString stringForStdString:nativeServer.hostname];
+ NSMutableArray *tlsAlpnProtocols =
+ [NSMutableArray arrayWithCapacity:nativeServer.tls_alpn_protocols.size()];
+ for (auto const &proto : nativeServer.tls_alpn_protocols) {
+ [tlsAlpnProtocols addObject:[NSString stringForStdString:proto]];
+ }
+ NSMutableArray *tlsEllipticCurves =
+ [NSMutableArray arrayWithCapacity:nativeServer.tls_elliptic_curves.size()];
+ for (auto const &curve : nativeServer.tls_elliptic_curves) {
+ [tlsEllipticCurves addObject:[NSString stringForStdString:curve]];
+ }
+ RTCTlsCertPolicy tlsCertPolicy;
+
+ switch (nativeServer.tls_cert_policy) {
+ case webrtc::PeerConnectionInterface::kTlsCertPolicySecure:
+ tlsCertPolicy = RTCTlsCertPolicySecure;
+ break;
+ case webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck:
+ tlsCertPolicy = RTCTlsCertPolicyInsecureNoCheck;
+ break;
+ }
+
+ self = [self initWithURLStrings:urls
+ username:username
+ credential:credential
+ tlsCertPolicy:tlsCertPolicy
+ hostname:hostname
+ tlsAlpnProtocols:tlsAlpnProtocols
+ tlsEllipticCurves:tlsEllipticCurves];
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h
new file mode 100644
index 0000000000..faa7962821
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCLegacyStatsReport.h"
+
+#include "api/stats_types.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCLegacyStatsReport)
+()
+
+ /** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
+ - (instancetype)initWithNativeReport : (const webrtc::StatsReport &)nativeReport;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h
new file mode 100644
index 0000000000..b3bd12c5d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** This does not currently conform to the spec. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCLegacyStatsReport) : NSObject
+
+/** Time since 1970-01-01T00:00:00Z in milliseconds. */
+@property(nonatomic, readonly) CFTimeInterval timestamp;
+
+/** The type of stats held by this object. */
+@property(nonatomic, readonly) NSString *type;
+
+/** The identifier for this object. */
+@property(nonatomic, readonly) NSString *reportId;
+
+/** A dictionary holding the actual stats. */
+@property(nonatomic, readonly) NSDictionary<NSString *, NSString *> *values;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm
new file mode 100644
index 0000000000..bd7a1ad9c9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCLegacyStatsReport+Private.h"
+
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCLegacyStatsReport)
+
+@synthesize timestamp = _timestamp;
+@synthesize type = _type;
+@synthesize reportId = _reportId;
+@synthesize values = _values;
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCLegacyStatsReport):\n%@\n%@\n%f\n%@",
+ _reportId,
+ _type,
+ _timestamp,
+ _values];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
+ if (self = [super init]) {
+ _timestamp = nativeReport.timestamp();
+ _type = [NSString stringForStdString:nativeReport.TypeToString()];
+ _reportId = [NSString stringForStdString:
+ nativeReport.id()->ToString()];
+
+ NSUInteger capacity = nativeReport.values().size();
+ NSMutableDictionary *values =
+ [NSMutableDictionary dictionaryWithCapacity:capacity];
+ for (auto const &valuePair : nativeReport.values()) {
+ NSString *key = [NSString stringForStdString:
+ valuePair.second->display_name()];
+ NSString *value = [NSString stringForStdString:
+ valuePair.second->ToString()];
+
+ // Not expecting duplicate keys.
+ RTC_DCHECK(![values objectForKey:key]);
+ [values setObject:value forKey:key];
+ }
+ _values = values;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h
new file mode 100644
index 0000000000..97eee8307d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaConstraints.h"
+
+#include <memory>
+
+#include "sdk/media_constraints.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCMediaConstraints)
+()
+
+ /**
+ * A MediaConstraints representation of this RTCMediaConstraints object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+ - (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
+
+/** Return a native Constraints object representing these constraints */
++ (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints:
+ (NSDictionary<NSString*, NSString*>*)constraints;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.h
new file mode 100644
index 0000000000..c5baf20c1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Constraint keys for media sources. */
+/** The value for this key should be a base64 encoded string containing
+ * the data from the serialized configuration proto.
+ */
+RTC_EXTERN NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig;
+
+/** Constraint keys for generating offers and answers. */
+RTC_EXTERN NSString *const kRTCMediaConstraintsIceRestart;
+RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveAudio;
+RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveVideo;
+RTC_EXTERN NSString *const kRTCMediaConstraintsVoiceActivityDetection;
+
+/** Constraint values for Boolean parameters. */
+RTC_EXTERN NSString *const kRTCMediaConstraintsValueTrue;
+RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse;
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMediaConstraints) : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Initialize with mandatory and/or optional constraints. */
+- (instancetype)
+ initWithMandatoryConstraints:(nullable NSDictionary<NSString *, NSString *> *)mandatory
+ optionalConstraints:(nullable NSDictionary<NSString *, NSString *> *)optional
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.mm
new file mode 100644
index 0000000000..0f46e4b8fe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.mm
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaConstraints+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+#include <memory>
+
+NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig =
+ @(webrtc::MediaConstraints::kAudioNetworkAdaptorConfig);
+
+NSString *const kRTCMediaConstraintsIceRestart = @(webrtc::MediaConstraints::kIceRestart);
+NSString *const kRTCMediaConstraintsOfferToReceiveAudio =
+ @(webrtc::MediaConstraints::kOfferToReceiveAudio);
+NSString *const kRTCMediaConstraintsOfferToReceiveVideo =
+ @(webrtc::MediaConstraints::kOfferToReceiveVideo);
+NSString *const kRTCMediaConstraintsVoiceActivityDetection =
+ @(webrtc::MediaConstraints::kVoiceActivityDetection);
+
+NSString *const kRTCMediaConstraintsValueTrue = @(webrtc::MediaConstraints::kValueTrue);
+NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kValueFalse);
+
+@implementation RTC_OBJC_TYPE (RTCMediaConstraints) {
+ NSDictionary<NSString *, NSString *> *_mandatory;
+ NSDictionary<NSString *, NSString *> *_optional;
+}
+
+- (instancetype)initWithMandatoryConstraints:
+ (NSDictionary<NSString *, NSString *> *)mandatory
+ optionalConstraints:
+ (NSDictionary<NSString *, NSString *> *)optional {
+ if (self = [super init]) {
+ _mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
+ copyItems:YES];
+ _optional = [[NSDictionary alloc] initWithDictionary:optional
+ copyItems:YES];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaConstraints):\n%@\n%@", _mandatory, _optional];
+}
+
+#pragma mark - Private
+
+- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints {
+ webrtc::MediaConstraints::Constraints mandatory =
+ [[self class] nativeConstraintsForConstraints:_mandatory];
+ webrtc::MediaConstraints::Constraints optional =
+ [[self class] nativeConstraintsForConstraints:_optional];
+
+ webrtc::MediaConstraints *nativeConstraints =
+ new webrtc::MediaConstraints(mandatory, optional);
+ return std::unique_ptr<webrtc::MediaConstraints>(nativeConstraints);
+}
+
++ (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints:
+ (NSDictionary<NSString *, NSString *> *)constraints {
+ webrtc::MediaConstraints::Constraints nativeConstraints;
+ for (NSString *key in constraints) {
+ NSAssert([key isKindOfClass:[NSString class]],
+ @"%@ is not an NSString.", key);
+ NSString *value = [constraints objectForKey:key];
+ NSAssert([value isKindOfClass:[NSString class]],
+ @"%@ is not an NSString.", value);
+ if ([kRTCMediaConstraintsAudioNetworkAdaptorConfig isEqualToString:key]) {
+ // This value is base64 encoded.
+ NSData *charData = [[NSData alloc] initWithBase64EncodedString:value options:0];
+ std::string configValue =
+ std::string(reinterpret_cast<const char *>(charData.bytes), charData.length);
+ nativeConstraints.push_back(webrtc::MediaConstraints::Constraint(key.stdString, configValue));
+ } else {
+ nativeConstraints.push_back(
+ webrtc::MediaConstraints::Constraint(key.stdString, value.stdString));
+ }
+ }
+ return nativeConstraints;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource+Private.h
new file mode 100644
index 0000000000..edda892e50
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource+Private.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaSource.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
+ RTCMediaSourceTypeAudio,
+ RTCMediaSourceTypeVideo,
+};
+
+@interface RTC_OBJC_TYPE (RTCMediaSource)
+()
+
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:(RTCSourceState)state;
+
++ (RTCSourceState)sourceStateForNativeState:(webrtc::MediaSourceInterface::SourceState)nativeState;
+
++ (NSString *)stringForState:(RTCSourceState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.h
new file mode 100644
index 0000000000..ba19c2a352
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+typedef NS_ENUM(NSInteger, RTCSourceState) {
+ RTCSourceStateInitializing,
+ RTCSourceStateLive,
+ RTCSourceStateEnded,
+ RTCSourceStateMuted,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMediaSource) : NSObject
+
+/** The current state of the RTCMediaSource. */
+@property(nonatomic, readonly) RTCSourceState state;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.mm
new file mode 100644
index 0000000000..61472a782a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.mm
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaSource+Private.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCMediaSource) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ RTCMediaSourceType _type;
+}
+
+@synthesize nativeMediaSource = _nativeMediaSource;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type {
+ RTC_DCHECK(factory);
+ RTC_DCHECK(nativeMediaSource);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeMediaSource = nativeMediaSource;
+ _type = type;
+ }
+ return self;
+}
+
+- (RTCSourceState)state {
+ return [[self class] sourceStateForNativeState:_nativeMediaSource->state()];
+}
+
+#pragma mark - Private
+
++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
+ (RTCSourceState)state {
+ switch (state) {
+ case RTCSourceStateInitializing:
+ return webrtc::MediaSourceInterface::kInitializing;
+ case RTCSourceStateLive:
+ return webrtc::MediaSourceInterface::kLive;
+ case RTCSourceStateEnded:
+ return webrtc::MediaSourceInterface::kEnded;
+ case RTCSourceStateMuted:
+ return webrtc::MediaSourceInterface::kMuted;
+ }
+}
+
++ (RTCSourceState)sourceStateForNativeState:
+ (webrtc::MediaSourceInterface::SourceState)nativeState {
+ switch (nativeState) {
+ case webrtc::MediaSourceInterface::kInitializing:
+ return RTCSourceStateInitializing;
+ case webrtc::MediaSourceInterface::kLive:
+ return RTCSourceStateLive;
+ case webrtc::MediaSourceInterface::kEnded:
+ return RTCSourceStateEnded;
+ case webrtc::MediaSourceInterface::kMuted:
+ return RTCSourceStateMuted;
+ }
+}
+
++ (NSString *)stringForState:(RTCSourceState)state {
+ switch (state) {
+ case RTCSourceStateInitializing:
+ return @"Initializing";
+ case RTCSourceStateLive:
+ return @"Live";
+ case RTCSourceStateEnded:
+ return @"Ended";
+ case RTCSourceStateMuted:
+ return @"Muted";
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream+Private.h
new file mode 100644
index 0000000000..6c8a602766
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream+Private.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStream.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCMediaStream)
+()
+
+ /**
+ * MediaStreamInterface representation of this RTCMediaStream object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
+
+/** Initialize an RTCMediaStream with an id. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ streamId:(NSString *)streamId;
+
+/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaStream:(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.h
new file mode 100644
index 0000000000..2d56f15c7d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCAudioTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCVideoTrack);
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMediaStream) : NSObject
+
+/** The audio tracks in this stream. */
+@property(nonatomic, strong, readonly) NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *audioTracks;
+
+/** The video tracks in this stream. */
+@property(nonatomic, strong, readonly) NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *videoTracks;
+
+/** An identifier for this media stream. */
+@property(nonatomic, readonly) NSString *streamId;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Adds the given audio track to this media stream. */
+- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack;
+
+/** Adds the given video track to this media stream. */
+- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack;
+
+/** Removes the given audio track to this media stream. */
+- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack;
+
+/** Removes the given video track to this media stream. */
+- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.mm
new file mode 100644
index 0000000000..beb4a7a91b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.mm
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStream+Private.h"
+
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoTrack+Private.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCMediaStream) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::Thread *_signalingThread;
+ NSMutableArray *_audioTracks /* accessed on _signalingThread */;
+ NSMutableArray *_videoTracks /* accessed on _signalingThread */;
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ streamId:(NSString *)streamId {
+ NSParameterAssert(factory);
+ NSParameterAssert(streamId.length);
+ std::string nativeId = [NSString stdStringForString:streamId];
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ factory.nativeFactory->CreateLocalMediaStream(nativeId);
+ return [self initWithFactory:factory nativeMediaStream:stream];
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *)audioTracks {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *>(
+ RTC_FROM_HERE, [self]() { return self.audioTracks; });
+ }
+ return [_audioTracks copy];
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *)videoTracks {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *>(
+ RTC_FROM_HERE, [self]() { return self.videoTracks; });
+ }
+ return [_videoTracks copy];
+}
+
+- (NSString *)streamId {
+ return [NSString stringForStdString:_nativeMediaStream->id()];
+}
+
+- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<void>(
+ RTC_FROM_HERE, [audioTrack, self]() { return [self addAudioTrack:audioTrack]; });
+ }
+ if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
+ [_audioTracks addObject:audioTrack];
+ }
+}
+
+- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<void>(
+ RTC_FROM_HERE, [videoTrack, self]() { return [self addVideoTrack:videoTrack]; });
+ }
+ if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
+ [_videoTracks addObject:videoTrack];
+ }
+}
+
+- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<void>(
+ RTC_FROM_HERE, [audioTrack, self]() { return [self removeAudioTrack:audioTrack]; });
+ }
+ NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
+ if (index == NSNotFound) {
+ RTC_LOG(LS_INFO) << "|removeAudioTrack| called on unexpected RTC_OBJC_TYPE(RTCAudioTrack)";
+ return;
+ }
+ if (_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
+ [_audioTracks removeObjectAtIndex:index];
+ }
+}
+
+- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<void>(
+ RTC_FROM_HERE, [videoTrack, self]() { return [self removeVideoTrack:videoTrack]; });
+ }
+ NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
+ if (index == NSNotFound) {
+ RTC_LOG(LS_INFO) << "|removeVideoTrack| called on unexpected RTC_OBJC_TYPE(RTCVideoTrack)";
+ return;
+ }
+
+ if (_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
+ [_videoTracks removeObjectAtIndex:index];
+ }
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStream):\n%@\nA=%lu\nV=%lu",
+ self.streamId,
+ (unsigned long)self.audioTracks.count,
+ (unsigned long)self.videoTracks.count];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
+ return _nativeMediaStream;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaStream:
+ (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
+ NSParameterAssert(nativeMediaStream);
+ if (self = [super init]) {
+ _factory = factory;
+ _signalingThread = factory.signalingThread;
+
+ webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks();
+ webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks();
+
+ _audioTracks = [NSMutableArray arrayWithCapacity:audioTracks.size()];
+ _videoTracks = [NSMutableArray arrayWithCapacity:videoTracks.size()];
+ _nativeMediaStream = nativeMediaStream;
+
+ for (auto &track : audioTracks) {
+ RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
+ RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack =
+ [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:_factory
+ nativeTrack:track
+ type:type];
+ [_audioTracks addObject:audioTrack];
+ }
+
+ for (auto &track : videoTracks) {
+ RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
+ RTC_OBJC_TYPE(RTCVideoTrack) *videoTrack =
+ [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:_factory
+ nativeTrack:track
+ type:type];
+ [_videoTracks addObject:videoTrack];
+ }
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h
new file mode 100644
index 0000000000..ee51e27b2d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+#include "api/media_stream_interface.h"
+
+typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
+ RTCMediaStreamTrackTypeAudio,
+ RTCMediaStreamTrackTypeVideo,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface RTC_OBJC_TYPE (RTCMediaStreamTrack)
+()
+
+ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) *
+ factory;
+
+/**
+ * The native MediaStreamTrackInterface passed in or created during
+ * construction.
+ */
+@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack;
+
+/**
+ * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
+ */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
+
+- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track;
+
++ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
+ (RTCMediaStreamTrackState)state;
+
++ (RTCMediaStreamTrackState)trackStateForNativeState:
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeState;
+
++ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
+
++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)
+ mediaTrackForNativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h
new file mode 100644
index 0000000000..2200122ccd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+/**
+ * Represents the state of the track. This exposes the same states in C++.
+ */
+typedef NS_ENUM(NSInteger, RTCMediaStreamTrackState) {
+ RTCMediaStreamTrackStateLive,
+ RTCMediaStreamTrackStateEnded
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXTERN NSString *const kRTCMediaStreamTrackKindAudio;
+RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo;
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMediaStreamTrack) : NSObject
+
+/**
+ * The kind of track. For example, "audio" if this track represents an audio
+ * track and "video" if this track represents a video track.
+ */
+@property(nonatomic, readonly) NSString *kind;
+
+/** An identifier string. */
+@property(nonatomic, readonly) NSString *trackId;
+
+/** The enabled state of the track. */
+@property(nonatomic, assign) BOOL isEnabled;
+
+/** The state of the track. */
+@property(nonatomic, readonly) RTCMediaStreamTrackState readyState;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm
new file mode 100644
index 0000000000..f1e128ca60
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCVideoTrack+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+NSString * const kRTCMediaStreamTrackKindAudio =
+ @(webrtc::MediaStreamTrackInterface::kAudioKind);
+NSString * const kRTCMediaStreamTrackKindVideo =
+ @(webrtc::MediaStreamTrackInterface::kVideoKind);
+
+@implementation RTC_OBJC_TYPE (RTCMediaStreamTrack) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
+ RTCMediaStreamTrackType _type;
+}
+
+- (NSString *)kind {
+ return [NSString stringForStdString:_nativeTrack->kind()];
+}
+
+- (NSString *)trackId {
+ return [NSString stringForStdString:_nativeTrack->id()];
+}
+
+- (BOOL)isEnabled {
+ return _nativeTrack->enabled();
+}
+
+- (void)setIsEnabled:(BOOL)isEnabled {
+ _nativeTrack->set_enabled(isEnabled);
+}
+
+- (RTCMediaStreamTrackState)readyState {
+ return [[self class] trackStateForNativeState:_nativeTrack->state()];
+}
+
+- (NSString *)description {
+ NSString *readyState = [[self class] stringForState:self.readyState];
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStreamTrack):\n%@\n%@\n%@\n%@",
+ self.kind,
+ self.trackId,
+ self.isEnabled ? @"enabled" : @"disabled",
+ readyState];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ return [self isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)object];
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeTrack.get();
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+ return _nativeTrack;
+}
+
+@synthesize factory = _factory;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ type:(RTCMediaStreamTrackType)type {
+ NSParameterAssert(nativeTrack);
+ NSParameterAssert(factory);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeTrack = nativeTrack;
+ _type = type;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+ NSParameterAssert(nativeTrack);
+ if (nativeTrack->kind() ==
+ std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) {
+ return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeAudio];
+ }
+ if (nativeTrack->kind() ==
+ std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) {
+ return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeVideo];
+ }
+ return nil;
+}
+
+- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ if (!track) {
+ return NO;
+ }
+ return _nativeTrack == track.nativeTrack;
+}
+
++ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
+ (RTCMediaStreamTrackState)state {
+ switch (state) {
+ case RTCMediaStreamTrackStateLive:
+ return webrtc::MediaStreamTrackInterface::kLive;
+ case RTCMediaStreamTrackStateEnded:
+ return webrtc::MediaStreamTrackInterface::kEnded;
+ }
+}
+
++ (RTCMediaStreamTrackState)trackStateForNativeState:
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeState {
+ switch (nativeState) {
+ case webrtc::MediaStreamTrackInterface::kLive:
+ return RTCMediaStreamTrackStateLive;
+ case webrtc::MediaStreamTrackInterface::kEnded:
+ return RTCMediaStreamTrackStateEnded;
+ }
+}
+
++ (NSString *)stringForState:(RTCMediaStreamTrackState)state {
+ switch (state) {
+ case RTCMediaStreamTrackStateLive:
+ return @"Live";
+ case RTCMediaStreamTrackStateEnded:
+ return @"Ended";
+ }
+}
+
++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)
+ mediaTrackForNativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory {
+ NSParameterAssert(nativeTrack);
+ NSParameterAssert(factory);
+ if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) {
+ return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:factory
+ nativeTrack:nativeTrack
+ type:RTCMediaStreamTrackTypeAudio];
+ } else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
+ return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:factory
+ nativeTrack:nativeTrack
+ type:RTCMediaStreamTrackTypeVideo];
+ } else {
+ return [[RTC_OBJC_TYPE(RTCMediaStreamTrack) alloc] initWithFactory:factory
+ nativeTrack:nativeTrack];
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.h
new file mode 100644
index 0000000000..fddbb27c90
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMetricsSampleInfo.h"
+
+/**
+ * Enables gathering of metrics (which can be fetched with
+ * RTCGetAndResetMetrics). Must be called before any other call into WebRTC.
+ */
+RTC_EXTERN void RTCEnableMetrics(void);
+
+/** Gets and clears native histograms. */
+RTC_EXTERN NSArray<RTC_OBJC_TYPE(RTCMetricsSampleInfo) *>* RTCGetAndResetMetrics(void);
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.mm
new file mode 100644
index 0000000000..87eb8c0210
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.mm
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMetrics.h"
+
+#import "RTCMetricsSampleInfo+Private.h"
+
+#include "rtc_base/string_utils.h"
+
+void RTCEnableMetrics(void) {
+ webrtc::metrics::Enable();
+}
+
+NSArray<RTC_OBJC_TYPE(RTCMetricsSampleInfo) *> *RTCGetAndResetMetrics(void) {
+ std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>, rtc::AbslStringViewCmp>
+ histograms;
+ webrtc::metrics::GetAndReset(&histograms);
+
+ NSMutableArray *metrics =
+ [NSMutableArray arrayWithCapacity:histograms.size()];
+ for (auto const &histogram : histograms) {
+ RTC_OBJC_TYPE(RTCMetricsSampleInfo) *metric =
+ [[RTC_OBJC_TYPE(RTCMetricsSampleInfo) alloc] initWithNativeSampleInfo:*histogram.second];
+ [metrics addObject:metric];
+ }
+ return metrics;
+}
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h
new file mode 100644
index 0000000000..e4aa41f6c7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMetricsSampleInfo.h"
+
+#include "system_wrappers/include/metrics.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo)
+()
+
+ /** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
+ - (instancetype)initWithNativeSampleInfo : (const webrtc::metrics::SampleInfo &)info;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h
new file mode 100644
index 0000000000..47a877b6fb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo) : NSObject
+
+/**
+ * Example of RTCMetricsSampleInfo:
+ * name: "WebRTC.Video.InputFramesPerSecond"
+ * min: 1
+ * max: 100
+ * bucketCount: 50
+ * samples: [29]:2 [30]:1
+ */
+
+/** The name of the histogram. */
+@property(nonatomic, readonly) NSString *name;
+
+/** The minimum bucket value. */
+@property(nonatomic, readonly) int min;
+
+/** The maximum bucket value. */
+@property(nonatomic, readonly) int max;
+
+/** The number of buckets. */
+@property(nonatomic, readonly) int bucketCount;
+
+/** A dictionary holding the samples <value, # of events>. */
+@property(nonatomic, readonly) NSDictionary<NSNumber *, NSNumber *> *samples;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm
new file mode 100644
index 0000000000..e4be94e90a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMetricsSampleInfo+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCMetricsSampleInfo)
+
+@synthesize name = _name;
+@synthesize min = _min;
+@synthesize max = _max;
+@synthesize bucketCount = _bucketCount;
+@synthesize samples = _samples;
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeSampleInfo:
+ (const webrtc::metrics::SampleInfo &)info {
+ if (self = [super init]) {
+ _name = [NSString stringForStdString:info.name];
+ _min = info.min;
+ _max = info.max;
+ _bucketCount = info.bucket_count;
+
+ NSMutableDictionary *samples =
+ [NSMutableDictionary dictionaryWithCapacity:info.samples.size()];
+ for (auto const &sample : info.samples) {
+ [samples setObject:@(sample.second) forKey:@(sample.first)];
+ }
+ _samples = samples;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
new file mode 100644
index 0000000000..cb75f061d8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "RTCDataChannel+Private.h"
+#import "RTCDataChannelConfiguration+Private.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnection)
+(DataChannel)
+
+ - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel
+ : (NSString *)label configuration
+ : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration {
+ std::string labelString = [NSString stdStringForString:label];
+ const webrtc::DataChannelInit nativeInit =
+ configuration.nativeDataChannelInit;
+ auto result = self.nativePeerConnection->CreateDataChannelOrError(labelString, &nativeInit);
+ if (!result.ok()) {
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:self.factory
+ nativeDataChannel:result.MoveValue()];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h
new file mode 100644
index 0000000000..00f2ef7834
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+namespace webrtc {
+
+/**
+ * These objects are created by RTCPeerConnectionFactory to wrap an
+ * id<RTCPeerConnectionDelegate> and call methods on that interface.
+ */
+class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
+ public:
+ PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection);
+ ~PeerConnectionDelegateAdapter() override;
+
+ void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override;
+
+ void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
+
+ void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
+
+ void OnTrack(rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
+
+ void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> data_channel) override;
+
+ void OnRenegotiationNeeded() override;
+
+ void OnIceConnectionChange(PeerConnectionInterface::IceConnectionState new_state) override;
+
+ void OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override;
+
+ void OnConnectionChange(PeerConnectionInterface::PeerConnectionState new_state) override;
+
+ void OnIceGatheringChange(PeerConnectionInterface::IceGatheringState new_state) override;
+
+ void OnIceCandidate(const IceCandidateInterface *candidate) override;
+
+ void OnIceCandidateError(const std::string &address,
+ int port,
+ const std::string &url,
+ int error_code,
+ const std::string &error_text) override;
+
+ void OnIceCandidatesRemoved(const std::vector<cricket::Candidate> &candidates) override;
+
+ void OnIceSelectedCandidatePairChanged(const cricket::CandidatePairChangeEvent &event) override;
+
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>> &streams) override;
+
+ void OnRemoveTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
+
+ private:
+ __weak RTC_OBJC_TYPE(RTCPeerConnection) * peer_connection_;
+};
+
+} // namespace webrtc
+@protocol RTC_OBJC_TYPE
+(RTCSSLCertificateVerifier);
+
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+()
+
+ /** The factory used to create this RTCPeerConnection */
+ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) *
+ factory;
+
+/** The native PeerConnectionInterface created during construction. */
+@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::PeerConnectionInterface>
+ nativePeerConnection;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints, and
+ * delegate.
+ */
+- (nullable instancetype)
+ initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ certificateVerifier:(nullable id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)>)certificateVerifier
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints,
+ * delegate and PeerConnectionDependencies.
+ */
+- (nullable instancetype)
+ initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate
+ NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
+ (RTCSignalingState)state;
+
++ (RTCSignalingState)signalingStateForNativeState:
+ (webrtc::PeerConnectionInterface::SignalingState)nativeState;
+
++ (NSString *)stringForSignalingState:(RTCSignalingState)state;
+
++ (webrtc::PeerConnectionInterface::IceConnectionState)nativeIceConnectionStateForState:
+ (RTCIceConnectionState)state;
+
++ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeConnectionStateForState:
+ (RTCPeerConnectionState)state;
+
++ (RTCIceConnectionState)iceConnectionStateForNativeState:
+ (webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
+
++ (RTCPeerConnectionState)connectionStateForNativeState:
+ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState;
+
++ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state;
+
++ (NSString *)stringForConnectionState:(RTCPeerConnectionState)state;
+
++ (webrtc::PeerConnectionInterface::IceGatheringState)nativeIceGatheringStateForState:
+ (RTCIceGatheringState)state;
+
++ (RTCIceGatheringState)iceGatheringStateForNativeState:
+ (webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
+
++ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state;
+
++ (webrtc::PeerConnectionInterface::StatsOutputLevel)nativeStatsOutputLevelForLevel:
+ (RTCStatsOutputLevel)level;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
new file mode 100644
index 0000000000..f8d38143f3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "RTCLegacyStatsReport+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpReceiver+Private.h"
+#import "RTCRtpSender+Private.h"
+#import "RTCStatisticsReport+Private.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+class StatsCollectorCallbackAdapter : public RTCStatsCollectorCallback {
+ public:
+ StatsCollectorCallbackAdapter(RTCStatisticsCompletionHandler completion_handler)
+ : completion_handler_(completion_handler) {}
+
+ void OnStatsDelivered(const rtc::scoped_refptr<const RTCStatsReport> &report) override {
+ RTC_DCHECK(completion_handler_);
+ RTC_OBJC_TYPE(RTCStatisticsReport) *statisticsReport =
+ [[RTC_OBJC_TYPE(RTCStatisticsReport) alloc] initWithReport:*report];
+ completion_handler_(statisticsReport);
+ completion_handler_ = nil;
+ }
+
+ private:
+ RTCStatisticsCompletionHandler completion_handler_;
+};
+
+class StatsObserverAdapter : public StatsObserver {
+ public:
+ StatsObserverAdapter(
+ void (^completionHandler)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats)) {
+ completion_handler_ = completionHandler;
+ }
+
+ ~StatsObserverAdapter() override { completion_handler_ = nil; }
+
+ void OnComplete(const StatsReports& reports) override {
+ RTC_DCHECK(completion_handler_);
+ NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()];
+ for (const auto* report : reports) {
+ RTC_OBJC_TYPE(RTCLegacyStatsReport) *statsReport =
+ [[RTC_OBJC_TYPE(RTCLegacyStatsReport) alloc] initWithNativeReport:*report];
+ [stats addObject:statsReport];
+ }
+ completion_handler_(stats);
+ completion_handler_ = nil;
+ }
+
+ private:
+ void (^completion_handler_)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats);
+};
+} // namespace webrtc
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnection)
+(Stats)
+
+ - (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler
+ : (RTCStatisticsCompletionHandler)completionHandler {
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
+ self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector);
+}
+
+- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
+ completionHandler:(RTCStatisticsCompletionHandler)completionHandler {
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
+ self.nativePeerConnection->GetStats(receiver.nativeRtpReceiver, collector);
+}
+
+- (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler {
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
+ self.nativePeerConnection->GetStats(collector.get());
+}
+
+- (void)statsForTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack
+ statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
+ completionHandler:
+ (void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler {
+ rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::StatsObserverAdapter>(completionHandler);
+ webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
+ [[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
+ self.nativePeerConnection->GetStats(
+ observer.get(), mediaStreamTrack.nativeTrack.get(), nativeOutputLevel);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.h
new file mode 100644
index 0000000000..55af6868fd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.h
@@ -0,0 +1,398 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCConfiguration);
+@class RTC_OBJC_TYPE(RTCDataChannel);
+@class RTC_OBJC_TYPE(RTCDataChannelConfiguration);
+@class RTC_OBJC_TYPE(RTCIceCandidate);
+@class RTC_OBJC_TYPE(RTCIceCandidateErrorEvent);
+@class RTC_OBJC_TYPE(RTCMediaConstraints);
+@class RTC_OBJC_TYPE(RTCMediaStream);
+@class RTC_OBJC_TYPE(RTCMediaStreamTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCRtpReceiver);
+@class RTC_OBJC_TYPE(RTCRtpSender);
+@class RTC_OBJC_TYPE(RTCRtpTransceiver);
+@class RTC_OBJC_TYPE(RTCRtpTransceiverInit);
+@class RTC_OBJC_TYPE(RTCSessionDescription);
+@class RTC_OBJC_TYPE(RTCStatisticsReport);
+@class RTC_OBJC_TYPE(RTCLegacyStatsReport);
+
+typedef NS_ENUM(NSInteger, RTCRtpMediaType);
+
+NS_ASSUME_NONNULL_BEGIN
+
+extern NSString *const kRTCPeerConnectionErrorDomain;
+extern int const kRTCSessionDescriptionErrorCode;
+
+/** Represents the signaling state of the peer connection. */
+typedef NS_ENUM(NSInteger, RTCSignalingState) {
+ RTCSignalingStateStable,
+ RTCSignalingStateHaveLocalOffer,
+ RTCSignalingStateHaveLocalPrAnswer,
+ RTCSignalingStateHaveRemoteOffer,
+ RTCSignalingStateHaveRemotePrAnswer,
+ // Not an actual state, represents the total number of states.
+ RTCSignalingStateClosed,
+};
+
+/** Represents the ice connection state of the peer connection. */
+typedef NS_ENUM(NSInteger, RTCIceConnectionState) {
+ RTCIceConnectionStateNew,
+ RTCIceConnectionStateChecking,
+ RTCIceConnectionStateConnected,
+ RTCIceConnectionStateCompleted,
+ RTCIceConnectionStateFailed,
+ RTCIceConnectionStateDisconnected,
+ RTCIceConnectionStateClosed,
+ RTCIceConnectionStateCount,
+};
+
+/** Represents the combined ice+dtls connection state of the peer connection. */
+typedef NS_ENUM(NSInteger, RTCPeerConnectionState) {
+ RTCPeerConnectionStateNew,
+ RTCPeerConnectionStateConnecting,
+ RTCPeerConnectionStateConnected,
+ RTCPeerConnectionStateDisconnected,
+ RTCPeerConnectionStateFailed,
+ RTCPeerConnectionStateClosed,
+};
+
+/** Represents the ice gathering state of the peer connection. */
+typedef NS_ENUM(NSInteger, RTCIceGatheringState) {
+ RTCIceGatheringStateNew,
+ RTCIceGatheringStateGathering,
+ RTCIceGatheringStateComplete,
+};
+
+/** Represents the stats output level. */
+typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) {
+ RTCStatsOutputLevelStandard,
+ RTCStatsOutputLevelDebug,
+};
+
+typedef void (^RTCCreateSessionDescriptionCompletionHandler)(RTC_OBJC_TYPE(RTCSessionDescription) *
+ _Nullable sdp,
+ NSError *_Nullable error);
+
+typedef void (^RTCSetSessionDescriptionCompletionHandler)(NSError *_Nullable error);
+
+@class RTC_OBJC_TYPE(RTCPeerConnection);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCPeerConnectionDelegate)<NSObject>
+
+ /** Called when the SignalingState changed. */
+ - (void)peerConnection
+ : (RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeSignalingState
+ : (RTCSignalingState)stateChanged;
+
+/** Called when media is received on a new stream from remote peer. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
+
+/** Called when a remote peer closes a stream.
+ * This is not called when RTCSdpSemanticsUnifiedPlan is specified.
+ */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
+
+/** Called when negotiation is needed, for example ICE has restarted. */
+- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection;
+
+/** Called any time the IceConnectionState changes. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeIceConnectionState:(RTCIceConnectionState)newState;
+
+/** Called any time the IceGatheringState changes. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeIceGatheringState:(RTCIceGatheringState)newState;
+
+/** New ice candidate has been found. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate;
+
+/** Called when a group of local Ice candidates have been removed. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
+
+/** New data channel has been opened. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel;
+
+/** Called when signaling indicates a transceiver will be receiving media from
+ * the remote endpoint.
+ * This is only called with RTCSdpSemanticsUnifiedPlan specified.
+ */
+@optional
+/** Called any time the IceConnectionState changes following standardized
+ * transition. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeStandardizedIceConnectionState:(RTCIceConnectionState)newState;
+
+/** Called any time the PeerConnectionState changes. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeConnectionState:(RTCPeerConnectionState)newState;
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver;
+
+/** Called when a receiver and its track are created. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didAddReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver
+ streams:(NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *)mediaStreams;
+
+/** Called when the receiver and its track are removed. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver;
+
+/** Called when the selected ICE candidate pair is changed. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local
+ remoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote
+ lastReceivedMs:(int)lastDataReceivedMs
+ changeReason:(NSString *)reason;
+
+/** Called when gathering of an ICE candidate failed. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didFailToGatherIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event;
+
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCPeerConnection) : NSObject
+
+/** The object that will be notifed about events such as state changes and
+ * streams being added or removed.
+ */
+@property(nonatomic, weak, nullable) id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)> delegate;
+/** This property is not available with RTCSdpSemanticsUnifiedPlan. Please use
+ * `senders` instead.
+ */
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *localStreams;
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * localDescription;
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * remoteDescription;
+@property(nonatomic, readonly) RTCSignalingState signalingState;
+@property(nonatomic, readonly) RTCIceConnectionState iceConnectionState;
+@property(nonatomic, readonly) RTCPeerConnectionState connectionState;
+@property(nonatomic, readonly) RTCIceGatheringState iceGatheringState;
+@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCConfiguration) * configuration;
+
+/** Gets all RTCRtpSenders associated with this peer connection.
+ * Note: reading this property returns different instances of RTCRtpSender.
+ * Use isEqual: instead of == to compare RTCRtpSender instances.
+ */
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpSender) *> *senders;
+
+/** Gets all RTCRtpReceivers associated with this peer connection.
+ * Note: reading this property returns different instances of RTCRtpReceiver.
+ * Use isEqual: instead of == to compare RTCRtpReceiver instances.
+ */
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers;
+
+/** Gets all RTCRtpTransceivers associated with this peer connection.
+ * Note: reading this property returns different instances of
+ * RTCRtpTransceiver. Use isEqual: instead of == to compare
+ * RTCRtpTransceiver instances. This is only available with
+ * RTCSdpSemanticsUnifiedPlan specified.
+ */
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpTransceiver) *> *transceivers;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Sets the PeerConnection's global configuration to `configuration`.
+ * Any changes to STUN/TURN servers or ICE candidate policy will affect the
+ * next gathering phase, and cause the next call to createOffer to generate
+ * new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies
+ * cannot be changed with this method.
+ */
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration;
+
+/** Terminate all media and close the transport. */
+- (void)close;
+
+/** Provide a remote candidate to the ICE Agent. */
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate
+ DEPRECATED_MSG_ATTRIBUTE("Please use addIceCandidate:completionHandler: instead");
+
+/** Provide a remote candidate to the ICE Agent. */
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate
+ completionHandler:(void (^)(NSError *_Nullable error))completionHandler;
+
+/** Remove a group of remote candidates from the ICE Agent. */
+- (void)removeIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
+
+/** Add a new media stream to be sent on this peer connection.
+ * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
+ * addTrack instead.
+ */
+- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
+
+/** Remove the given media stream from this peer connection.
+ * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
+ * removeTrack instead.
+ */
+- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
+
+/** Add a new media stream track to be sent on this peer connection, and return
+ * the newly created RTCRtpSender. The RTCRtpSender will be
+ * associated with the streams specified in the `streamIds` list.
+ *
+ * Errors: If an error occurs, returns nil. An error can occur if:
+ * - A sender already exists for the track.
+ * - The peer connection is closed.
+ */
+- (nullable RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+ streamIds:(NSArray<NSString *> *)streamIds;
+
+/** With PlanB semantics, removes an RTCRtpSender from this peer connection.
+ *
+ * With UnifiedPlan semantics, sets sender's track to null and removes the
+ * send component from the associated RTCRtpTransceiver's direction.
+ *
+ * Returns YES on success.
+ */
+- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender;
+
+/** addTransceiver creates a new RTCRtpTransceiver and adds it to the set of
+ * transceivers. Adding a transceiver will cause future calls to CreateOffer
+ * to add a media description for the corresponding transceiver.
+ *
+ * The initial value of `mid` in the returned transceiver is nil. Setting a
+ * new session description may change it to a non-nil value.
+ *
+ * https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver
+ *
+ * Optionally, an RtpTransceiverInit structure can be specified to configure
+ * the transceiver from construction. If not specified, the transceiver will
+ * default to having a direction of kSendRecv and not be part of any streams.
+ *
+ * These methods are only available when Unified Plan is enabled (see
+ * RTCConfiguration).
+ */
+
+/** Adds a transceiver with a sender set to transmit the given track. The kind
+ * of the transceiver (and sender/receiver) will be derived from the kind of
+ * the track.
+ */
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack:
+ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track;
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+ addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+ init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init;
+
+/** Adds a transceiver with the given kind. Can either be RTCRtpMediaTypeAudio
+ * or RTCRtpMediaTypeVideo.
+ */
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType;
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+ addTransceiverOfType:(RTCRtpMediaType)mediaType
+ init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init;
+
+/** Tells the PeerConnection that ICE should be restarted. This triggers a need
+ * for negotiation and subsequent offerForConstraints:completionHandler call will act as if
+ * RTCOfferAnswerOptions::ice_restart is true.
+ */
+- (void)restartIce;
+
+/** Generate an SDP offer. */
+- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler;
+
+/** Generate an SDP answer. */
+- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler;
+
+/** Apply the supplied RTCSessionDescription as the local description. */
+- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler;
+
+/** Creates an offer or answer (depending on current signaling state) and sets
+ * it as the local session description. */
+- (void)setLocalDescriptionWithCompletionHandler:
+ (RTCSetSessionDescriptionCompletionHandler)completionHandler;
+
+/** Apply the supplied RTCSessionDescription as the remote description. */
+- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler;
+
+/** Limits the bandwidth allocated for all RTP streams sent by this
+ * PeerConnection. Nil parameters will be unchanged. Setting
+ * `currentBitrateBps` will force the available bitrate estimate to the given
+ * value. Returns YES if the parameters were successfully updated.
+ */
+- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps
+ currentBitrateBps:(nullable NSNumber *)currentBitrateBps
+ maxBitrateBps:(nullable NSNumber *)maxBitrateBps;
+
+/** Start or stop recording an Rtc EventLog. */
+- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes;
+- (void)stopRtcEventLog;
+
+@end
+
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(Media)
+
+ /** Create an RTCRtpSender with the specified kind and media stream ID.
+ * See RTCMediaStreamTrack.h for available kinds.
+ * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
+ * addTransceiver instead.
+ */
+ - (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind : (NSString *)kind streamId
+ : (NSString *)streamId;
+
+@end
+
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(DataChannel)
+
+ /** Create a new data channel with the given label and configuration. */
+ - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel
+ : (NSString *)label configuration : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration;
+
+@end
+
+typedef void (^RTCStatisticsCompletionHandler)(RTC_OBJC_TYPE(RTCStatisticsReport) *);
+
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(Stats)
+
+ /** Gather stats for the given RTCMediaStreamTrack. If `mediaStreamTrack` is nil
+ * statistics are gathered for all tracks.
+ */
+ - (void)statsForTrack
+ : (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel
+ : (RTCStatsOutputLevel)statsOutputLevel completionHandler
+ : (nullable void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler;
+
+/** Gather statistic through the v2 statistics API. */
+- (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler;
+
+/** Spec-compliant getStats() performing the stats selection algorithm with the
+ * sender.
+ */
+- (void)statisticsForSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender
+ completionHandler:(RTCStatisticsCompletionHandler)completionHandler;
+
+/** Spec-compliant getStats() performing the stats selection algorithm with the
+ * receiver.
+ */
+- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
+ completionHandler:(RTCStatisticsCompletionHandler)completionHandler;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm
new file mode 100644
index 0000000000..f4db472380
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm
@@ -0,0 +1,939 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "RTCConfiguration+Private.h"
+#import "RTCDataChannel+Private.h"
+#import "RTCIceCandidate+Private.h"
+#import "RTCIceCandidateErrorEvent+Private.h"
+#import "RTCLegacyStatsReport+Private.h"
+#import "RTCMediaConstraints+Private.h"
+#import "RTCMediaStream+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCRtpReceiver+Private.h"
+#import "RTCRtpSender+Private.h"
+#import "RTCRtpTransceiver+Private.h"
+#import "RTCSessionDescription+Private.h"
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include <memory>
+
+#include "api/jsep_ice_candidate.h"
+#include "api/rtc_event_log_output_file.h"
+#include "api/set_local_description_observer_interface.h"
+#include "api/set_remote_description_observer_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "sdk/objc/native/api/ssl_certificate_verifier.h"
+
+NSString *const kRTCPeerConnectionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)";
+int const kRTCPeerConnnectionSessionDescriptionError = -1;
+
+namespace {
+
+class SetSessionDescriptionObserver : public webrtc::SetLocalDescriptionObserverInterface,
+ public webrtc::SetRemoteDescriptionObserverInterface {
+ public:
+ SetSessionDescriptionObserver(RTCSetSessionDescriptionCompletionHandler completionHandler) {
+ completion_handler_ = completionHandler;
+ }
+
+ virtual void OnSetLocalDescriptionComplete(webrtc::RTCError error) override {
+ OnCompelete(error);
+ }
+
+ virtual void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override {
+ OnCompelete(error);
+ }
+
+ private:
+ void OnCompelete(webrtc::RTCError error) {
+ RTC_DCHECK(completion_handler_ != nil);
+ if (error.ok()) {
+ completion_handler_(nil);
+ } else {
+ // TODO(hta): Add handling of error.type()
+ NSString *str = [NSString stringForStdString:error.message()];
+ NSError *err = [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
+ code:kRTCPeerConnnectionSessionDescriptionError
+ userInfo:@{NSLocalizedDescriptionKey : str}];
+ completion_handler_(err);
+ }
+ completion_handler_ = nil;
+ }
+ RTCSetSessionDescriptionCompletionHandler completion_handler_;
+};
+
+} // anonymous namespace
+
+namespace webrtc {
+
+class CreateSessionDescriptionObserverAdapter
+ : public CreateSessionDescriptionObserver {
+ public:
+ CreateSessionDescriptionObserverAdapter(void (^completionHandler)(
+ RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, NSError *error)) {
+ completion_handler_ = completionHandler;
+ }
+
+ ~CreateSessionDescriptionObserverAdapter() override { completion_handler_ = nil; }
+
+ void OnSuccess(SessionDescriptionInterface *desc) override {
+ RTC_DCHECK(completion_handler_);
+ std::unique_ptr<webrtc::SessionDescriptionInterface> description =
+ std::unique_ptr<webrtc::SessionDescriptionInterface>(desc);
+ RTC_OBJC_TYPE(RTCSessionDescription) *session =
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description.get()];
+ completion_handler_(session, nil);
+ completion_handler_ = nil;
+ }
+
+ void OnFailure(RTCError error) override {
+ RTC_DCHECK(completion_handler_);
+ // TODO(hta): Add handling of error.type()
+ NSString *str = [NSString stringForStdString:error.message()];
+ NSError* err =
+ [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
+ code:kRTCPeerConnnectionSessionDescriptionError
+ userInfo:@{ NSLocalizedDescriptionKey : str }];
+ completion_handler_(nil, err);
+ completion_handler_ = nil;
+ }
+
+ private:
+ void (^completion_handler_)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription,
+ NSError *error);
+};
+
+PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) *
+ peerConnection) {
+ peer_connection_ = peerConnection;
+}
+
+PeerConnectionDelegateAdapter::~PeerConnectionDelegateAdapter() {
+ peer_connection_ = nil;
+}
+
+void PeerConnectionDelegateAdapter::OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) {
+ RTCSignalingState state =
+ [[RTC_OBJC_TYPE(RTCPeerConnection) class] signalingStateForNativeState:new_state];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnection:peer_connection
+ didChangeSignalingState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnAddStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+ [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+ nativeMediaStream:stream];
+ [peer_connection.delegate peerConnection:peer_connection
+ didAddStream:mediaStream];
+}
+
+void PeerConnectionDelegateAdapter::OnRemoveStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+ [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+ nativeMediaStream:stream];
+
+ [peer_connection.delegate peerConnection:peer_connection
+ didRemoveStream:mediaStream];
+}
+
+void PeerConnectionDelegateAdapter::OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> nativeTransceiver) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver =
+ [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:peer_connection.factory
+ nativeRtpTransceiver:nativeTransceiver];
+ if ([peer_connection.delegate
+ respondsToSelector:@selector(peerConnection:didStartReceivingOnTransceiver:)]) {
+ [peer_connection.delegate peerConnection:peer_connection
+ didStartReceivingOnTransceiver:transceiver];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> data_channel) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCDataChannel) *dataChannel =
+ [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:peer_connection.factory
+ nativeDataChannel:data_channel];
+ [peer_connection.delegate peerConnection:peer_connection
+ didOpenDataChannel:dataChannel];
+}
+
+void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnectionShouldNegotiate:peer_connection];
+}
+
+void PeerConnectionDelegateAdapter::OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ RTCIceConnectionState state =
+ [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state];
+ [peer_connection_.delegate peerConnection:peer_connection_ didChangeIceConnectionState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ if ([peer_connection_.delegate
+ respondsToSelector:@selector(peerConnection:didChangeStandardizedIceConnectionState:)]) {
+ RTCIceConnectionState state =
+ [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state];
+ [peer_connection_.delegate peerConnection:peer_connection_
+ didChangeStandardizedIceConnectionState:state];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnConnectionChange(
+ PeerConnectionInterface::PeerConnectionState new_state) {
+ if ([peer_connection_.delegate
+ respondsToSelector:@selector(peerConnection:didChangeConnectionState:)]) {
+ RTCPeerConnectionState state =
+ [RTC_OBJC_TYPE(RTCPeerConnection) connectionStateForNativeState:new_state];
+ [peer_connection_.delegate peerConnection:peer_connection_ didChangeConnectionState:state];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ RTCIceGatheringState state =
+ [[RTC_OBJC_TYPE(RTCPeerConnection) class] iceGatheringStateForNativeState:new_state];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnection:peer_connection
+ didChangeIceGatheringState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnIceCandidate(
+ const IceCandidateInterface *candidate) {
+ RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate =
+ [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnection:peer_connection
+ didGenerateIceCandidate:iceCandidate];
+}
+
+void PeerConnectionDelegateAdapter::OnIceCandidateError(const std::string &address,
+ int port,
+ const std::string &url,
+ int error_code,
+ const std::string &error_text) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *event =
+ [[RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) alloc] initWithAddress:address
+ port:port
+ url:url
+ errorCode:error_code
+ errorText:error_text];
+ if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:
+ didFailToGatherIceCandidate:)]) {
+ [peer_connection.delegate peerConnection:peer_connection didFailToGatherIceCandidate:event];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) {
+ NSMutableArray* ice_candidates =
+ [NSMutableArray arrayWithCapacity:candidates.size()];
+ for (const auto& candidate : candidates) {
+ std::unique_ptr<JsepIceCandidate> candidate_wrapper(
+ new JsepIceCandidate(candidate.transport_name(), -1, candidate));
+ RTC_OBJC_TYPE(RTCIceCandidate) *ice_candidate =
+ [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate_wrapper.get()];
+ [ice_candidates addObject:ice_candidate];
+ }
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnection:peer_connection
+ didRemoveIceCandidates:ice_candidates];
+}
+
+void PeerConnectionDelegateAdapter::OnIceSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent &event) {
+ const auto &selected_pair = event.selected_candidate_pair;
+ auto local_candidate_wrapper = std::make_unique<JsepIceCandidate>(
+ selected_pair.local_candidate().transport_name(), -1, selected_pair.local_candidate());
+ RTC_OBJC_TYPE(RTCIceCandidate) *local_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc]
+ initWithNativeCandidate:local_candidate_wrapper.release()];
+ auto remote_candidate_wrapper = std::make_unique<JsepIceCandidate>(
+ selected_pair.remote_candidate().transport_name(), -1, selected_pair.remote_candidate());
+ RTC_OBJC_TYPE(RTCIceCandidate) *remote_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc]
+ initWithNativeCandidate:remote_candidate_wrapper.release()];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ NSString *nsstr_reason = [NSString stringForStdString:event.reason];
+ if ([peer_connection.delegate
+ respondsToSelector:@selector
+ (peerConnection:didChangeLocalCandidate:remoteCandidate:lastReceivedMs:changeReason:)]) {
+ [peer_connection.delegate peerConnection:peer_connection
+ didChangeLocalCandidate:local_candidate
+ remoteCandidate:remote_candidate
+ lastReceivedMs:event.last_data_received_ms
+ changeReason:nsstr_reason];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnAddTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>> &streams) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:
+ didAddReceiver:streams:)]) {
+ NSMutableArray *mediaStreams = [NSMutableArray arrayWithCapacity:streams.size()];
+ for (const auto &nativeStream : streams) {
+ RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+ [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+ nativeMediaStream:nativeStream];
+ [mediaStreams addObject:mediaStream];
+ }
+ RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver =
+ [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory
+ nativeRtpReceiver:receiver];
+
+ [peer_connection.delegate peerConnection:peer_connection
+ didAddReceiver:rtpReceiver
+ streams:mediaStreams];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didRemoveReceiver:)]) {
+ RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver =
+ [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory
+ nativeRtpReceiver:receiver];
+ [peer_connection.delegate peerConnection:peer_connection didRemoveReceiver:rtpReceiver];
+ }
+}
+
+} // namespace webrtc
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnection) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ NSMutableArray<RTC_OBJC_TYPE(RTCMediaStream) *> *_localStreams;
+ std::unique_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
+ std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
+ BOOL _hasStartedRtcEventLog;
+}
+
+@synthesize delegate = _delegate;
+@synthesize factory = _factory;
+
+- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ certificateVerifier:
+ (nullable id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)>)certificateVerifier
+ delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ NSParameterAssert(factory);
+ std::unique_ptr<webrtc::PeerConnectionDependencies> dependencies =
+ std::make_unique<webrtc::PeerConnectionDependencies>(nullptr);
+ if (certificateVerifier != nil) {
+ dependencies->tls_cert_verifier = webrtc::ObjCToNativeCertificateVerifier(certificateVerifier);
+ }
+ return [self initWithDependencies:factory
+ configuration:configuration
+ constraints:constraints
+ dependencies:std::move(dependencies)
+ delegate:delegate];
+}
+
+- (nullable instancetype)
+ initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
+ delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ NSParameterAssert(factory);
+ NSParameterAssert(dependencies.get());
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
+ [configuration createNativeConfiguration]);
+ if (!config) {
+ return nil;
+ }
+ if (self = [super init]) {
+ _observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
+ _nativeConstraints = constraints.nativeConstraints;
+ CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(), config.get());
+
+ webrtc::PeerConnectionDependencies deps = std::move(*dependencies.release());
+ deps.observer = _observer.get();
+ auto result = factory.nativeFactory->CreatePeerConnectionOrError(*config, std::move(deps));
+
+ if (!result.ok()) {
+ return nil;
+ }
+ _peerConnection = result.MoveValue();
+ _factory = factory;
+ _localStreams = [[NSMutableArray alloc] init];
+ _delegate = delegate;
+ }
+ return self;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *)localStreams {
+ return [_localStreams copy];
+}
+
+- (RTC_OBJC_TYPE(RTCSessionDescription) *)localDescription {
+ // It's only safe to operate on SessionDescriptionInterface on the signaling thread.
+ return _peerConnection->signaling_thread()->Invoke<RTC_OBJC_TYPE(RTCSessionDescription) *>(
+ RTC_FROM_HERE, [self] {
+ const webrtc::SessionDescriptionInterface *description =
+ _peerConnection->local_description();
+ return description ?
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] :
+ nil;
+ });
+}
+
+- (RTC_OBJC_TYPE(RTCSessionDescription) *)remoteDescription {
+ // It's only safe to operate on SessionDescriptionInterface on the signaling thread.
+ return _peerConnection->signaling_thread()->Invoke<RTC_OBJC_TYPE(RTCSessionDescription) *>(
+ RTC_FROM_HERE, [self] {
+ const webrtc::SessionDescriptionInterface *description =
+ _peerConnection->remote_description();
+ return description ?
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] :
+ nil;
+ });
+}
+
+- (RTCSignalingState)signalingState {
+ return [[self class]
+ signalingStateForNativeState:_peerConnection->signaling_state()];
+}
+
+- (RTCIceConnectionState)iceConnectionState {
+ return [[self class] iceConnectionStateForNativeState:
+ _peerConnection->ice_connection_state()];
+}
+
+- (RTCPeerConnectionState)connectionState {
+ return [[self class] connectionStateForNativeState:_peerConnection->peer_connection_state()];
+}
+
+- (RTCIceGatheringState)iceGatheringState {
+ return [[self class] iceGatheringStateForNativeState:
+ _peerConnection->ice_gathering_state()];
+}
+
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration {
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
+ [configuration createNativeConfiguration]);
+ if (!config) {
+ return NO;
+ }
+ CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
+ config.get());
+ return _peerConnection->SetConfiguration(*config).ok();
+}
+
+- (RTC_OBJC_TYPE(RTCConfiguration) *)configuration {
+ webrtc::PeerConnectionInterface::RTCConfiguration config =
+ _peerConnection->GetConfiguration();
+ return [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:config];
+}
+
+- (void)close {
+ _peerConnection->Close();
+}
+
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
+ std::unique_ptr<const webrtc::IceCandidateInterface> iceCandidate(
+ candidate.nativeCandidate);
+ _peerConnection->AddIceCandidate(iceCandidate.get());
+}
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate
+ completionHandler:(void (^)(NSError *_Nullable error))completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ _peerConnection->AddIceCandidate(
+ candidate.nativeCandidate, [completionHandler](const auto &error) {
+ if (error.ok()) {
+ completionHandler(nil);
+ } else {
+ NSString *str = [NSString stringForStdString:error.message()];
+ NSError *err = [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
+ code:static_cast<NSInteger>(error.type())
+ userInfo:@{NSLocalizedDescriptionKey : str}];
+ completionHandler(err);
+ }
+ });
+}
+- (void)removeIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)iceCandidates {
+ std::vector<cricket::Candidate> candidates;
+ for (RTC_OBJC_TYPE(RTCIceCandidate) * iceCandidate in iceCandidates) {
+ std::unique_ptr<const webrtc::IceCandidateInterface> candidate(
+ iceCandidate.nativeCandidate);
+ if (candidate) {
+ candidates.push_back(candidate->candidate());
+ // Need to fill the transport name from the sdp_mid.
+ candidates.back().set_transport_name(candidate->sdp_mid());
+ }
+ }
+ if (!candidates.empty()) {
+ _peerConnection->RemoveIceCandidates(candidates);
+ }
+}
+
+- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
+ if (!_peerConnection->AddStream(stream.nativeMediaStream.get())) {
+ RTCLogError(@"Failed to add stream: %@", stream);
+ return;
+ }
+ [_localStreams addObject:stream];
+}
+
+- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
+ _peerConnection->RemoveStream(stream.nativeMediaStream.get());
+ [_localStreams removeObject:stream];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+ streamIds:(NSArray<NSString *> *)streamIds {
+ std::vector<std::string> nativeStreamIds;
+ for (NSString *streamId in streamIds) {
+ nativeStreamIds.push_back([streamId UTF8String]);
+ }
+ webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenderOrError =
+ _peerConnection->AddTrack(track.nativeTrack, nativeStreamIds);
+ if (!nativeSenderOrError.ok()) {
+ RTCLogError(@"Failed to add track %@: %s", track, nativeSenderOrError.error().message());
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+ nativeRtpSender:nativeSenderOrError.MoveValue()];
+}
+
+- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
+ bool result = _peerConnection->RemoveTrackOrError(sender.nativeRtpSender).ok();
+ if (!result) {
+ RTCLogError(@"Failed to remote track %@", sender);
+ }
+ return result;
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack:
+ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ return [self addTransceiverWithTrack:track
+ init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+ addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+ init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init {
+ webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
+ _peerConnection->AddTransceiver(track.nativeTrack, init.nativeInit);
+ if (!nativeTransceiverOrError.ok()) {
+ RTCLogError(
+ @"Failed to add transceiver %@: %s", track, nativeTransceiverOrError.error().message());
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc]
+ initWithFactory:self.factory
+ nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType {
+ return [self addTransceiverOfType:mediaType
+ init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+ addTransceiverOfType:(RTCRtpMediaType)mediaType
+ init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init {
+ webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
+ _peerConnection->AddTransceiver(
+ [RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType:mediaType], init.nativeInit);
+ if (!nativeTransceiverOrError.ok()) {
+ RTCLogError(@"Failed to add transceiver %@: %s",
+ [RTC_OBJC_TYPE(RTCRtpReceiver) stringForMediaType:mediaType],
+ nativeTransceiverOrError.error().message());
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc]
+ initWithFactory:self.factory
+ nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
+}
+
+- (void)restartIce {
+ _peerConnection->RestartIce();
+}
+
+- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
+ CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
+
+ _peerConnection->CreateOffer(observer.get(), options);
+}
+
+- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
+ CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
+
+ _peerConnection->CreateAnswer(observer.get(), options);
+}
+
+- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
+ _peerConnection->SetLocalDescription(sdp.nativeDescription, observer);
+}
+
+- (void)setLocalDescriptionWithCompletionHandler:
+ (RTCSetSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
+ _peerConnection->SetLocalDescription(observer);
+}
+
+- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
+ _peerConnection->SetRemoteDescription(sdp.nativeDescription, observer);
+}
+
+- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps
+ currentBitrateBps:(nullable NSNumber *)currentBitrateBps
+ maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
+ webrtc::BitrateSettings params;
+ if (minBitrateBps != nil) {
+ params.min_bitrate_bps = absl::optional<int>(minBitrateBps.intValue);
+ }
+ if (currentBitrateBps != nil) {
+ params.start_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
+ }
+ if (maxBitrateBps != nil) {
+ params.max_bitrate_bps = absl::optional<int>(maxBitrateBps.intValue);
+ }
+ return _peerConnection->SetBitrate(params).ok();
+}
+
+- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath
+ maxSizeInBytes:(int64_t)maxSizeInBytes {
+ RTC_DCHECK(filePath.length);
+ RTC_DCHECK_GT(maxSizeInBytes, 0);
+ RTC_DCHECK(!_hasStartedRtcEventLog);
+ if (_hasStartedRtcEventLog) {
+ RTCLogError(@"Event logging already started.");
+ return NO;
+ }
+ FILE *f = fopen(filePath.UTF8String, "wb");
+ if (!f) {
+ RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
+ return NO;
+ }
+ // TODO(eladalon): It would be better to not allow negative values into PC.
+ const size_t max_size = (maxSizeInBytes < 0) ? webrtc::RtcEventLog::kUnlimitedOutput :
+ rtc::saturated_cast<size_t>(maxSizeInBytes);
+
+ _hasStartedRtcEventLog = _peerConnection->StartRtcEventLog(
+ std::make_unique<webrtc::RtcEventLogOutputFile>(f, max_size));
+ return _hasStartedRtcEventLog;
+}
+
+- (void)stopRtcEventLog {
+ _peerConnection->StopRtcEventLog();
+ _hasStartedRtcEventLog = NO;
+}
+
+- (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId {
+ std::string nativeKind = [NSString stdStringForString:kind];
+ std::string nativeStreamId = [NSString stdStringForString:streamId];
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeSender(
+ _peerConnection->CreateSender(nativeKind, nativeStreamId));
+ return nativeSender ? [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+ nativeRtpSender:nativeSender] :
+ nil;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCRtpSender) *> *)senders {
+ std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
+ _peerConnection->GetSenders());
+ NSMutableArray *senders = [[NSMutableArray alloc] init];
+ for (const auto &nativeSender : nativeSenders) {
+ RTC_OBJC_TYPE(RTCRtpSender) *sender =
+ [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+ nativeRtpSender:nativeSender];
+ [senders addObject:sender];
+ }
+ return senders;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *)receivers {
+ std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> nativeReceivers(
+ _peerConnection->GetReceivers());
+ NSMutableArray *receivers = [[NSMutableArray alloc] init];
+ for (const auto &nativeReceiver : nativeReceivers) {
+ RTC_OBJC_TYPE(RTCRtpReceiver) *receiver =
+ [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:self.factory
+ nativeRtpReceiver:nativeReceiver];
+ [receivers addObject:receiver];
+ }
+ return receivers;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCRtpTransceiver) *> *)transceivers {
+ std::vector<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceivers(
+ _peerConnection->GetTransceivers());
+ NSMutableArray *transceivers = [[NSMutableArray alloc] init];
+ for (const auto &nativeTransceiver : nativeTransceivers) {
+ RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver =
+ [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:self.factory
+ nativeRtpTransceiver:nativeTransceiver];
+ [transceivers addObject:transceiver];
+ }
+ return transceivers;
+}
+
+#pragma mark - Private
+
++ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
+ (RTCSignalingState)state {
+ switch (state) {
+ case RTCSignalingStateStable:
+ return webrtc::PeerConnectionInterface::kStable;
+ case RTCSignalingStateHaveLocalOffer:
+ return webrtc::PeerConnectionInterface::kHaveLocalOffer;
+ case RTCSignalingStateHaveLocalPrAnswer:
+ return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer;
+ case RTCSignalingStateHaveRemoteOffer:
+ return webrtc::PeerConnectionInterface::kHaveRemoteOffer;
+ case RTCSignalingStateHaveRemotePrAnswer:
+ return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer;
+ case RTCSignalingStateClosed:
+ return webrtc::PeerConnectionInterface::kClosed;
+ }
+}
+
++ (RTCSignalingState)signalingStateForNativeState:
+ (webrtc::PeerConnectionInterface::SignalingState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kStable:
+ return RTCSignalingStateStable;
+ case webrtc::PeerConnectionInterface::kHaveLocalOffer:
+ return RTCSignalingStateHaveLocalOffer;
+ case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
+ return RTCSignalingStateHaveLocalPrAnswer;
+ case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
+ return RTCSignalingStateHaveRemoteOffer;
+ case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
+ return RTCSignalingStateHaveRemotePrAnswer;
+ case webrtc::PeerConnectionInterface::kClosed:
+ return RTCSignalingStateClosed;
+ }
+}
+
++ (NSString *)stringForSignalingState:(RTCSignalingState)state {
+ switch (state) {
+ case RTCSignalingStateStable:
+ return @"STABLE";
+ case RTCSignalingStateHaveLocalOffer:
+ return @"HAVE_LOCAL_OFFER";
+ case RTCSignalingStateHaveLocalPrAnswer:
+ return @"HAVE_LOCAL_PRANSWER";
+ case RTCSignalingStateHaveRemoteOffer:
+ return @"HAVE_REMOTE_OFFER";
+ case RTCSignalingStateHaveRemotePrAnswer:
+ return @"HAVE_REMOTE_PRANSWER";
+ case RTCSignalingStateClosed:
+ return @"CLOSED";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeConnectionStateForState:
+ (RTCPeerConnectionState)state {
+ switch (state) {
+ case RTCPeerConnectionStateNew:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kNew;
+ case RTCPeerConnectionStateConnecting:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting;
+ case RTCPeerConnectionStateConnected:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kConnected;
+ case RTCPeerConnectionStateFailed:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kFailed;
+ case RTCPeerConnectionStateDisconnected:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected;
+ case RTCPeerConnectionStateClosed:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kClosed;
+ }
+}
+
++ (RTCPeerConnectionState)connectionStateForNativeState:
+ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kNew:
+ return RTCPeerConnectionStateNew;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting:
+ return RTCPeerConnectionStateConnecting;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kConnected:
+ return RTCPeerConnectionStateConnected;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kFailed:
+ return RTCPeerConnectionStateFailed;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected:
+ return RTCPeerConnectionStateDisconnected;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kClosed:
+ return RTCPeerConnectionStateClosed;
+ }
+}
+
++ (NSString *)stringForConnectionState:(RTCPeerConnectionState)state {
+ switch (state) {
+ case RTCPeerConnectionStateNew:
+ return @"NEW";
+ case RTCPeerConnectionStateConnecting:
+ return @"CONNECTING";
+ case RTCPeerConnectionStateConnected:
+ return @"CONNECTED";
+ case RTCPeerConnectionStateFailed:
+ return @"FAILED";
+ case RTCPeerConnectionStateDisconnected:
+ return @"DISCONNECTED";
+ case RTCPeerConnectionStateClosed:
+ return @"CLOSED";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::IceConnectionState)
+ nativeIceConnectionStateForState:(RTCIceConnectionState)state {
+ switch (state) {
+ case RTCIceConnectionStateNew:
+ return webrtc::PeerConnectionInterface::kIceConnectionNew;
+ case RTCIceConnectionStateChecking:
+ return webrtc::PeerConnectionInterface::kIceConnectionChecking;
+ case RTCIceConnectionStateConnected:
+ return webrtc::PeerConnectionInterface::kIceConnectionConnected;
+ case RTCIceConnectionStateCompleted:
+ return webrtc::PeerConnectionInterface::kIceConnectionCompleted;
+ case RTCIceConnectionStateFailed:
+ return webrtc::PeerConnectionInterface::kIceConnectionFailed;
+ case RTCIceConnectionStateDisconnected:
+ return webrtc::PeerConnectionInterface::kIceConnectionDisconnected;
+ case RTCIceConnectionStateClosed:
+ return webrtc::PeerConnectionInterface::kIceConnectionClosed;
+ case RTCIceConnectionStateCount:
+ return webrtc::PeerConnectionInterface::kIceConnectionMax;
+ }
+}
+
++ (RTCIceConnectionState)iceConnectionStateForNativeState:
+ (webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kIceConnectionNew:
+ return RTCIceConnectionStateNew;
+ case webrtc::PeerConnectionInterface::kIceConnectionChecking:
+ return RTCIceConnectionStateChecking;
+ case webrtc::PeerConnectionInterface::kIceConnectionConnected:
+ return RTCIceConnectionStateConnected;
+ case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
+ return RTCIceConnectionStateCompleted;
+ case webrtc::PeerConnectionInterface::kIceConnectionFailed:
+ return RTCIceConnectionStateFailed;
+ case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
+ return RTCIceConnectionStateDisconnected;
+ case webrtc::PeerConnectionInterface::kIceConnectionClosed:
+ return RTCIceConnectionStateClosed;
+ case webrtc::PeerConnectionInterface::kIceConnectionMax:
+ return RTCIceConnectionStateCount;
+ }
+}
+
++ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state {
+ switch (state) {
+ case RTCIceConnectionStateNew:
+ return @"NEW";
+ case RTCIceConnectionStateChecking:
+ return @"CHECKING";
+ case RTCIceConnectionStateConnected:
+ return @"CONNECTED";
+ case RTCIceConnectionStateCompleted:
+ return @"COMPLETED";
+ case RTCIceConnectionStateFailed:
+ return @"FAILED";
+ case RTCIceConnectionStateDisconnected:
+ return @"DISCONNECTED";
+ case RTCIceConnectionStateClosed:
+ return @"CLOSED";
+ case RTCIceConnectionStateCount:
+ return @"COUNT";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::IceGatheringState)
+ nativeIceGatheringStateForState:(RTCIceGatheringState)state {
+ switch (state) {
+ case RTCIceGatheringStateNew:
+ return webrtc::PeerConnectionInterface::kIceGatheringNew;
+ case RTCIceGatheringStateGathering:
+ return webrtc::PeerConnectionInterface::kIceGatheringGathering;
+ case RTCIceGatheringStateComplete:
+ return webrtc::PeerConnectionInterface::kIceGatheringComplete;
+ }
+}
+
++ (RTCIceGatheringState)iceGatheringStateForNativeState:
+ (webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kIceGatheringNew:
+ return RTCIceGatheringStateNew;
+ case webrtc::PeerConnectionInterface::kIceGatheringGathering:
+ return RTCIceGatheringStateGathering;
+ case webrtc::PeerConnectionInterface::kIceGatheringComplete:
+ return RTCIceGatheringStateComplete;
+ }
+}
+
++ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state {
+ switch (state) {
+ case RTCIceGatheringStateNew:
+ return @"NEW";
+ case RTCIceGatheringStateGathering:
+ return @"GATHERING";
+ case RTCIceGatheringStateComplete:
+ return @"COMPLETE";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::StatsOutputLevel)
+ nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level {
+ switch (level) {
+ case RTCStatsOutputLevelStandard:
+ return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard;
+ case RTCStatsOutputLevelDebug:
+ return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug;
+ }
+}
+
+- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)nativePeerConnection {
+ return _peerConnection;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
new file mode 100644
index 0000000000..f361b9f0ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory.h"
+
+#include "api/scoped_refptr.h"
+
+namespace webrtc {
+
+class AudioDeviceModule;
+class AudioEncoderFactory;
+class AudioDecoderFactory;
+class NetworkControllerFactoryInterface;
+class VideoEncoderFactory;
+class VideoDecoderFactory;
+class AudioProcessing;
+struct PeerConnectionDependencies;
+
+} // namespace webrtc
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * This class extension exposes methods that work directly with injectable C++ components.
+ */
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory)
+()
+
+ - (instancetype)initNative NS_DESIGNATED_INITIALIZER;
+
+/* Initializer used when WebRTC is compiled with no media support */
+- (instancetype)initWithNoMedia;
+
+/* Initialize object with injectable native audio/video encoder/decoder factories */
+- (instancetype)initWithNativeAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
+ nativeAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
+ nativeVideoEncoderFactory:
+ (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
+ nativeVideoDecoderFactory:
+ (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
+ audioDeviceModule:
+ (nullable webrtc::AudioDeviceModule *)audioDeviceModule
+ audioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule;
+
+- (instancetype)
+ initWithNativeAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
+ nativeAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
+ nativeVideoEncoderFactory:
+ (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
+ nativeVideoDecoderFactory:
+ (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
+ audioDeviceModule:(nullable webrtc::AudioDeviceModule *)audioDeviceModule
+ audioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
+ networkControllerFactory:(std::unique_ptr<webrtc::NetworkControllerFactoryInterface>)
+ networkControllerFactory;
+
+- (instancetype)
+ initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints, and
+ * dependencies.
+ */
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h
new file mode 100644
index 0000000000..9613646270
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory.h"
+
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "rtc_base/thread.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory)
+()
+
+ /**
+ * PeerConnectionFactoryInterface created and held by this
+ * RTCPeerConnectionFactory object. This is needed to pass to the underlying
+ * C++ APIs.
+ */
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> nativeFactory;
+
+@property(nonatomic, readonly) rtc::Thread* signalingThread;
+@property(nonatomic, readonly) rtc::Thread* workerThread;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h
new file mode 100644
index 0000000000..88aac990f2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCAudioSource);
+@class RTC_OBJC_TYPE(RTCAudioTrack);
+@class RTC_OBJC_TYPE(RTCConfiguration);
+@class RTC_OBJC_TYPE(RTCMediaConstraints);
+@class RTC_OBJC_TYPE(RTCMediaStream);
+@class RTC_OBJC_TYPE(RTCPeerConnection);
+@class RTC_OBJC_TYPE(RTCVideoSource);
+@class RTC_OBJC_TYPE(RTCVideoTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions);
+@protocol RTC_OBJC_TYPE
+(RTCPeerConnectionDelegate);
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory);
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory);
+@protocol RTC_OBJC_TYPE
+(RTCSSLCertificateVerifier);
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject
+
+/* Initialize object with default H264 video encoder/decoder factories */
+- (instancetype)init;
+
+/* Initialize object with injectable video encoder/decoder factories */
+- (instancetype)
+ initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory;
+
+/** Initialize an RTCAudioSource with constraints. */
+- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints:
+ (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints;
+
+/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source
+ * with no constraints.
+ */
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId;
+
+/** Initialize an RTCAudioTrack with a source and an id. */
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+ trackId:(NSString *)trackId;
+
+/** Initialize a generic RTCVideoSource. The RTCVideoSource should be
+ * passed to a RTCVideoCapturer implementation, e.g.
+ * RTCCameraVideoCapturer, in order to produce frames.
+ */
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource;
+
+/** Initialize a generic RTCVideoSource with he posibility of marking
+ * it as usable for screen sharing. The RTCVideoSource should be
+ * passed to a RTCVideoCapturer implementation, e.g.
+ * RTCCameraVideoCapturer, in order to produce frames.
+ */
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSourceForScreenCast:(BOOL)forScreenCast;
+
+/** Initialize an RTCVideoTrack with a source and an id. */
+- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+ trackId:(NSString *)trackId;
+
+/** Initialize an RTCMediaStream with an id. */
+- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints, and
+ * delegate.
+ */
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
+
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ certificateVerifier:
+ (id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)>)certificateVerifier
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
+
+/** Set the options to be used for subsequently created RTCPeerConnections */
+- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options;
+
+/** Start an AecDump recording. This API call will likely change in the future. */
+- (BOOL)startAecDumpWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes;
+
+/* Stop an active AecDump recording */
+- (void)stopAecDump;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
new file mode 100644
index 0000000000..84c5f020b5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
@@ -0,0 +1,322 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#import "RTCPeerConnectionFactory+Native.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCPeerConnectionFactoryOptions+Private.h"
+
+#import "RTCAudioSource+Private.h"
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaConstraints+Private.h"
+#import "RTCMediaStream+Private.h"
+#import "RTCPeerConnection+Private.h"
+#import "RTCVideoSource+Private.h"
+#import "RTCVideoTrack+Private.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoDecoderFactory.h"
+#import "base/RTCVideoEncoderFactory.h"
+#import "helpers/NSString+StdString.h"
+#include "rtc_base/checks.h"
+#include "sdk/objc/native/api/network_monitor_factory.h"
+#include "sdk/objc/native/api/ssl_certificate_verifier.h"
+#include "system_wrappers/include/field_trial.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/transport/field_trial_based_config.h"
+#import "components/video_codec/RTCVideoDecoderFactoryH264.h"
+#import "components/video_codec/RTCVideoEncoderFactoryH264.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+#include "sdk/objc/native/api/video_decoder_factory.h"
+#include "sdk/objc/native/api/video_encoder_factory.h"
+#include "sdk/objc/native/src/objc_video_decoder_factory.h"
+#include "sdk/objc/native/src/objc_video_encoder_factory.h"
+
+#if defined(WEBRTC_IOS)
+#import "sdk/objc/native/api/audio_device_module.h"
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) {
+ std::unique_ptr<rtc::Thread> _networkThread;
+ std::unique_ptr<rtc::Thread> _workerThread;
+ std::unique_ptr<rtc::Thread> _signalingThread;
+ BOOL _hasStartedAecDump;
+}
+
+@synthesize nativeFactory = _nativeFactory;
+
+- (rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule {
+#if defined(WEBRTC_IOS)
+ return webrtc::CreateAudioDeviceModule();
+#else
+ return nullptr;
+#endif
+}
+
+- (instancetype)init {
+ return [self
+ initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
+ nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
+ nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE(
+ RTCVideoEncoderFactoryH264) alloc] init])
+ nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(
+ RTCVideoDecoderFactoryH264) alloc] init])
+ audioDeviceModule:[self audioDeviceModule].get()
+ audioProcessingModule:nullptr];
+}
+
+- (instancetype)
+ initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory {
+ std::unique_ptr<webrtc::VideoEncoderFactory> native_encoder_factory;
+ std::unique_ptr<webrtc::VideoDecoderFactory> native_decoder_factory;
+ if (encoderFactory) {
+ native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory);
+ }
+ if (decoderFactory) {
+ native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory);
+ }
+ return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
+ nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
+ nativeVideoEncoderFactory:std::move(native_encoder_factory)
+ nativeVideoDecoderFactory:std::move(native_decoder_factory)
+ audioDeviceModule:[self audioDeviceModule].get()
+ audioProcessingModule:nullptr];
+}
+- (instancetype)initNative {
+ if (self = [super init]) {
+ _networkThread = rtc::Thread::CreateWithSocketServer();
+ _networkThread->SetName("network_thread", _networkThread.get());
+ BOOL result = _networkThread->Start();
+ RTC_DCHECK(result) << "Failed to start network thread.";
+
+ _workerThread = rtc::Thread::Create();
+ _workerThread->SetName("worker_thread", _workerThread.get());
+ result = _workerThread->Start();
+ RTC_DCHECK(result) << "Failed to start worker thread.";
+
+ _signalingThread = rtc::Thread::Create();
+ _signalingThread->SetName("signaling_thread", _signalingThread.get());
+ result = _signalingThread->Start();
+ RTC_DCHECK(result) << "Failed to start signaling thread.";
+ }
+ return self;
+}
+
+- (instancetype)initWithNoMedia {
+ if (self = [self initNative]) {
+ webrtc::PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = _networkThread.get();
+ dependencies.worker_thread = _workerThread.get();
+ dependencies.signaling_thread = _signalingThread.get();
+ if (webrtc::field_trial::IsEnabled("WebRTC-Network-UseNWPathMonitor")) {
+ dependencies.network_monitor_factory = webrtc::CreateNetworkMonitorFactory();
+ }
+ _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
+ NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
+ }
+ return self;
+}
+
+- (instancetype)initWithNativeAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
+ nativeAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
+ nativeVideoEncoderFactory:
+ (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
+ nativeVideoDecoderFactory:
+ (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
+ audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule
+ audioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule {
+ return [self initWithNativeAudioEncoderFactory:audioEncoderFactory
+ nativeAudioDecoderFactory:audioDecoderFactory
+ nativeVideoEncoderFactory:std::move(videoEncoderFactory)
+ nativeVideoDecoderFactory:std::move(videoDecoderFactory)
+ audioDeviceModule:audioDeviceModule
+ audioProcessingModule:audioProcessingModule
+ networkControllerFactory:nullptr];
+}
+- (instancetype)initWithNativeAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
+ nativeAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
+ nativeVideoEncoderFactory:
+ (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
+ nativeVideoDecoderFactory:
+ (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
+ audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule
+ audioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
+ networkControllerFactory:
+ (std::unique_ptr<webrtc::NetworkControllerFactoryInterface>)
+ networkControllerFactory {
+ if (self = [self initNative]) {
+ webrtc::PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = _networkThread.get();
+ dependencies.worker_thread = _workerThread.get();
+ dependencies.signaling_thread = _signalingThread.get();
+ if (webrtc::field_trial::IsEnabled("WebRTC-Network-UseNWPathMonitor")) {
+ dependencies.network_monitor_factory = webrtc::CreateNetworkMonitorFactory();
+ }
+ dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
+ dependencies.trials = std::make_unique<webrtc::FieldTrialBasedConfig>();
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.adm = std::move(audioDeviceModule);
+ media_deps.task_queue_factory = dependencies.task_queue_factory.get();
+ media_deps.audio_encoder_factory = std::move(audioEncoderFactory);
+ media_deps.audio_decoder_factory = std::move(audioDecoderFactory);
+ media_deps.video_encoder_factory = std::move(videoEncoderFactory);
+ media_deps.video_decoder_factory = std::move(videoDecoderFactory);
+ if (audioProcessingModule) {
+ media_deps.audio_processing = std::move(audioProcessingModule);
+ } else {
+ media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
+ }
+ media_deps.trials = dependencies.trials.get();
+ dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ dependencies.call_factory = webrtc::CreateCallFactory();
+ dependencies.event_log_factory =
+ std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
+ dependencies.network_controller_factory = std::move(networkControllerFactory);
+ _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
+ NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
+ }
+ return self;
+}
+
+- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints:
+ (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints {
+ std::unique_ptr<webrtc::MediaConstraints> nativeConstraints;
+ if (constraints) {
+ nativeConstraints = constraints.nativeConstraints;
+ }
+ cricket::AudioOptions options;
+ CopyConstraintsIntoAudioOptions(nativeConstraints.get(), &options);
+
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ _nativeFactory->CreateAudioSource(options);
+ return [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self nativeAudioSource:source];
+}
+
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId {
+ RTC_OBJC_TYPE(RTCAudioSource) *audioSource = [self audioSourceWithConstraints:nil];
+ return [self audioTrackWithSource:audioSource trackId:trackId];
+}
+
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+ trackId:(NSString *)trackId {
+ return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:self source:source trackId:trackId];
+}
+
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource {
+ return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self
+ signalingThread:_signalingThread.get()
+ workerThread:_workerThread.get()];
+}
+
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSourceForScreenCast:(BOOL)forScreenCast {
+ return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self
+ signalingThread:_signalingThread.get()
+ workerThread:_workerThread.get()
+ isScreenCast:forScreenCast];
+}
+
+- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+ trackId:(NSString *)trackId {
+ return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:self source:source trackId:trackId];
+}
+
+- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId {
+ return [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:self streamId:streamId];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ delegate:
+ (nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self
+ configuration:configuration
+ constraints:constraints
+ certificateVerifier:nil
+ delegate:delegate];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ certificateVerifier:
+ (id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)>)certificateVerifier
+ delegate:
+ (nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self
+ configuration:configuration
+ constraints:constraints
+ certificateVerifier:certificateVerifier
+ delegate:delegate];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
+ delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithDependencies:self
+ configuration:configuration
+ constraints:constraints
+ dependencies:std::move(dependencies)
+ delegate:delegate];
+}
+
+- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options {
+ RTC_DCHECK(options != nil);
+ _nativeFactory->SetOptions(options.nativeOptions);
+}
+
+- (BOOL)startAecDumpWithFilePath:(NSString *)filePath
+ maxSizeInBytes:(int64_t)maxSizeInBytes {
+ RTC_DCHECK(filePath.length);
+ RTC_DCHECK_GT(maxSizeInBytes, 0);
+
+ if (_hasStartedAecDump) {
+ RTCLogError(@"Aec dump already started.");
+ return NO;
+ }
+ FILE *f = fopen(filePath.UTF8String, "wb");
+ if (!f) {
+ RTCLogError(@"Error opening file: %@. Error: %s", filePath, strerror(errno));
+ return NO;
+ }
+ _hasStartedAecDump = _nativeFactory->StartAecDump(f, maxSizeInBytes);
+ return _hasStartedAecDump;
+}
+
+- (void)stopAecDump {
+ _nativeFactory->StopAecDump();
+ _hasStartedAecDump = NO;
+}
+
+- (rtc::Thread *)signalingThread {
+ return _signalingThread.get();
+}
+
+- (rtc::Thread *)workerThread {
+ return _workerThread.get();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h
new file mode 100644
index 0000000000..070a0e74a5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactoryBuilder.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCPeerConnectionFactoryBuilder (DefaultComponents)
+
++ (RTCPeerConnectionFactoryBuilder *)defaultBuilder;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
new file mode 100644
index 0000000000..522e520e12
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory+Native.h"
+#import "RTCPeerConnectionFactoryBuilder+DefaultComponents.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#import "components/video_codec/RTCVideoDecoderFactoryH264.h"
+#import "components/video_codec/RTCVideoEncoderFactoryH264.h"
+#include "sdk/objc/native/api/video_decoder_factory.h"
+#include "sdk/objc/native/api/video_encoder_factory.h"
+
+#if defined(WEBRTC_IOS)
+#import "sdk/objc/native/api/audio_device_module.h"
+#endif
+
+@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents)
+
++ (RTCPeerConnectionFactoryBuilder *)defaultBuilder {
+ RTCPeerConnectionFactoryBuilder *builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
+ auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory();
+ [builder setAudioEncoderFactory:audioEncoderFactory];
+
+ auto audioDecoderFactory = webrtc::CreateBuiltinAudioDecoderFactory();
+ [builder setAudioDecoderFactory:audioDecoderFactory];
+
+ auto videoEncoderFactory = webrtc::ObjCToNativeVideoEncoderFactory(
+ [[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]);
+ [builder setVideoEncoderFactory:std::move(videoEncoderFactory)];
+
+ auto videoDecoderFactory = webrtc::ObjCToNativeVideoDecoderFactory(
+ [[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]);
+ [builder setVideoDecoderFactory:std::move(videoDecoderFactory)];
+
+#if defined(WEBRTC_IOS)
+ [builder setAudioDeviceModule:webrtc::CreateAudioDeviceModule()];
+#endif
+ return builder;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h
new file mode 100644
index 0000000000..f0b0de156a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory.h"
+
+#include "api/scoped_refptr.h"
+
+namespace webrtc {
+
+class AudioDeviceModule;
+class AudioEncoderFactory;
+class AudioDecoderFactory;
+class VideoEncoderFactory;
+class VideoDecoderFactory;
+class AudioProcessing;
+
+} // namespace webrtc
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCPeerConnectionFactoryBuilder : NSObject
+
++ (RTCPeerConnectionFactoryBuilder *)builder;
+
+- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory;
+
+- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory;
+
+- (void)setVideoDecoderFactory:(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory;
+
+- (void)setAudioEncoderFactory:(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory;
+
+- (void)setAudioDecoderFactory:(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory;
+
+- (void)setAudioDeviceModule:(rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule;
+
+- (void)setAudioProcessingModule:(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
new file mode 100644
index 0000000000..627909a0e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactoryBuilder.h"
+#import "RTCPeerConnectionFactory+Native.h"
+
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+@implementation RTCPeerConnectionFactoryBuilder {
+ std::unique_ptr<webrtc::VideoEncoderFactory> _videoEncoderFactory;
+ std::unique_ptr<webrtc::VideoDecoderFactory> _videoDecoderFactory;
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> _audioEncoderFactory;
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> _audioDecoderFactory;
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
+ rtc::scoped_refptr<webrtc::AudioProcessing> _audioProcessingModule;
+}
+
++ (RTCPeerConnectionFactoryBuilder *)builder {
+ return [[RTCPeerConnectionFactoryBuilder alloc] init];
+}
+
+- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc];
+ return [factory initWithNativeAudioEncoderFactory:_audioEncoderFactory
+ nativeAudioDecoderFactory:_audioDecoderFactory
+ nativeVideoEncoderFactory:std::move(_videoEncoderFactory)
+ nativeVideoDecoderFactory:std::move(_videoDecoderFactory)
+ audioDeviceModule:_audioDeviceModule.get()
+ audioProcessingModule:_audioProcessingModule];
+}
+
+- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory {
+ _videoEncoderFactory = std::move(videoEncoderFactory);
+}
+
+- (void)setVideoDecoderFactory:(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory {
+ _videoDecoderFactory = std::move(videoDecoderFactory);
+}
+
+- (void)setAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory {
+ _audioEncoderFactory = audioEncoderFactory;
+}
+
+- (void)setAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory {
+ _audioDecoderFactory = audioDecoderFactory;
+}
+
+- (void)setAudioDeviceModule:(rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule {
+ _audioDeviceModule = audioDeviceModule;
+}
+
+- (void)setAudioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule {
+ _audioProcessingModule = audioProcessingModule;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h
new file mode 100644
index 0000000000..8832b23695
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactoryOptions.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions)
+()
+
+ /** Returns the equivalent native PeerConnectionFactoryInterface::Options
+ * structure. */
+ @property(nonatomic, readonly) webrtc::PeerConnectionFactoryInterface::Options nativeOptions;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h
new file mode 100644
index 0000000000..bfc54a5d7b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) : NSObject
+
+@property(nonatomic, assign) BOOL disableEncryption;
+
+@property(nonatomic, assign) BOOL disableNetworkMonitor;
+
+@property(nonatomic, assign) BOOL ignoreLoopbackNetworkAdapter;
+
+@property(nonatomic, assign) BOOL ignoreVPNNetworkAdapter;
+
+@property(nonatomic, assign) BOOL ignoreCellularNetworkAdapter;
+
+@property(nonatomic, assign) BOOL ignoreWiFiNetworkAdapter;
+
+@property(nonatomic, assign) BOOL ignoreEthernetNetworkAdapter;
+
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm
new file mode 100644
index 0000000000..5467bd5fc9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactoryOptions+Private.h"
+
+#include "rtc_base/network_constants.h"
+
+namespace {
+
+void setNetworkBit(webrtc::PeerConnectionFactoryInterface::Options* options,
+ rtc::AdapterType type,
+ bool ignore) {
+ if (ignore) {
+ options->network_ignore_mask |= type;
+ } else {
+ options->network_ignore_mask &= ~type;
+ }
+}
+} // namespace
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions)
+
+@synthesize disableEncryption = _disableEncryption;
+@synthesize disableNetworkMonitor = _disableNetworkMonitor;
+@synthesize ignoreLoopbackNetworkAdapter = _ignoreLoopbackNetworkAdapter;
+@synthesize ignoreVPNNetworkAdapter = _ignoreVPNNetworkAdapter;
+@synthesize ignoreCellularNetworkAdapter = _ignoreCellularNetworkAdapter;
+@synthesize ignoreWiFiNetworkAdapter = _ignoreWiFiNetworkAdapter;
+@synthesize ignoreEthernetNetworkAdapter = _ignoreEthernetNetworkAdapter;
+
+- (instancetype)init {
+ return [super init];
+}
+
+- (webrtc::PeerConnectionFactoryInterface::Options)nativeOptions {
+ webrtc::PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = self.disableEncryption;
+ options.disable_network_monitor = self.disableNetworkMonitor;
+
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_LOOPBACK, self.ignoreLoopbackNetworkAdapter);
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_VPN, self.ignoreVPNNetworkAdapter);
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_CELLULAR, self.ignoreCellularNetworkAdapter);
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_WIFI, self.ignoreWiFiNetworkAdapter);
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_ETHERNET, self.ignoreEthernetNetworkAdapter);
+
+ return options;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h
new file mode 100644
index 0000000000..c4d196cf79
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtcpParameters.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtcpParameters)
+()
+
+ /** Returns the equivalent native RtcpParameters structure. */
+ @property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters;
+
+/** Initialize the object with a native RtcpParameters structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.h
new file mode 100644
index 0000000000..2f7aad3aef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtcpParameters) : NSObject
+
+/** The Canonical Name used by RTCP. */
+@property(nonatomic, readonly, copy) NSString *cname;
+
+/** Whether reduced size RTCP is configured or compound RTCP. */
+@property(nonatomic, assign) BOOL isReducedSize;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.mm
new file mode 100644
index 0000000000..e92ee4b3e7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.mm
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtcpParameters+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtcpParameters)
+
+@synthesize cname = _cname;
+@synthesize isReducedSize = _isReducedSize;
+
+- (instancetype)init {
+ webrtc::RtcpParameters nativeParameters;
+ return [self initWithNativeParameters:nativeParameters];
+}
+
+- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters {
+ if (self = [super init]) {
+ _cname = [NSString stringForStdString:nativeParameters.cname];
+ _isReducedSize = nativeParameters.reduced_size;
+ }
+ return self;
+}
+
+- (webrtc::RtcpParameters)nativeParameters {
+ webrtc::RtcpParameters parameters;
+ parameters.cname = [NSString stdStringForString:_cname];
+ parameters.reduced_size = _isReducedSize;
+ return parameters;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h
new file mode 100644
index 0000000000..ff23cfd642
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpCodecParameters.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtpCodecParameters)
+()
+
+ /** Returns the equivalent native RtpCodecParameters structure. */
+ @property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
+
+/** Initialize the object with a native RtpCodecParameters structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtpCodecParameters &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
new file mode 100644
index 0000000000..6135223720
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXTERN const NSString *const kRTCRtxCodecName;
+RTC_EXTERN const NSString *const kRTCRedCodecName;
+RTC_EXTERN const NSString *const kRTCUlpfecCodecName;
+RTC_EXTERN const NSString *const kRTCFlexfecCodecName;
+RTC_EXTERN const NSString *const kRTCOpusCodecName;
+RTC_EXTERN const NSString *const kRTCIsacCodecName;
+RTC_EXTERN const NSString *const kRTCL16CodecName;
+RTC_EXTERN const NSString *const kRTCG722CodecName;
+RTC_EXTERN const NSString *const kRTCIlbcCodecName;
+RTC_EXTERN const NSString *const kRTCPcmuCodecName;
+RTC_EXTERN const NSString *const kRTCPcmaCodecName;
+RTC_EXTERN const NSString *const kRTCDtmfCodecName;
+RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName;
+RTC_EXTERN const NSString *const kRTCVp8CodecName;
+RTC_EXTERN const NSString *const kRTCVp9CodecName;
+RTC_EXTERN const NSString *const kRTCH264CodecName;
+
+/** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpCodecParameters) : NSObject
+
+/** The RTP payload type. */
+@property(nonatomic, assign) int payloadType;
+
+/**
+ * The codec MIME subtype. Valid types are listed in:
+ * http://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml#rtp-parameters-2
+ *
+ * Several supported types are represented by the constants above.
+ */
+@property(nonatomic, readonly, nonnull) NSString *name;
+
+/**
+ * The media type of this codec. Equivalent to MIME top-level type.
+ *
+ * Valid values are kRTCMediaStreamTrackKindAudio and
+ * kRTCMediaStreamTrackKindVideo.
+ */
+@property(nonatomic, readonly, nonnull) NSString *kind;
+
+/** The codec clock rate expressed in Hertz. */
+@property(nonatomic, readonly, nullable) NSNumber *clockRate;
+
+/**
+ * The number of channels (mono=1, stereo=2).
+ * Set to null for video codecs.
+ **/
+@property(nonatomic, readonly, nullable) NSNumber *numChannels;
+
+/** The "format specific parameters" field from the "a=fmtp" line in the SDP */
+@property(nonatomic, readonly, nonnull) NSDictionary *parameters;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm
new file mode 100644
index 0000000000..753667b635
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpCodecParameters+Private.h"
+
+#import "RTCMediaStreamTrack.h"
+#import "helpers/NSString+StdString.h"
+
+#include "media/base/media_constants.h"
+#include "rtc_base/checks.h"
+
+const NSString * const kRTCRtxCodecName = @(cricket::kRtxCodecName);
+const NSString * const kRTCRedCodecName = @(cricket::kRedCodecName);
+const NSString * const kRTCUlpfecCodecName = @(cricket::kUlpfecCodecName);
+const NSString * const kRTCFlexfecCodecName = @(cricket::kFlexfecCodecName);
+const NSString * const kRTCOpusCodecName = @(cricket::kOpusCodecName);
+const NSString * const kRTCIsacCodecName = @(cricket::kIsacCodecName);
+const NSString * const kRTCL16CodecName = @(cricket::kL16CodecName);
+const NSString * const kRTCG722CodecName = @(cricket::kG722CodecName);
+const NSString * const kRTCIlbcCodecName = @(cricket::kIlbcCodecName);
+const NSString * const kRTCPcmuCodecName = @(cricket::kPcmuCodecName);
+const NSString * const kRTCPcmaCodecName = @(cricket::kPcmaCodecName);
+const NSString * const kRTCDtmfCodecName = @(cricket::kDtmfCodecName);
+const NSString * const kRTCComfortNoiseCodecName =
+ @(cricket::kComfortNoiseCodecName);
+const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName);
+const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName);
+const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
+
+@implementation RTC_OBJC_TYPE (RTCRtpCodecParameters)
+
+@synthesize payloadType = _payloadType;
+@synthesize name = _name;
+@synthesize kind = _kind;
+@synthesize clockRate = _clockRate;
+@synthesize numChannels = _numChannels;
+@synthesize parameters = _parameters;
+
+- (instancetype)init {
+ webrtc::RtpCodecParameters nativeParameters;
+ return [self initWithNativeParameters:nativeParameters];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpCodecParameters &)nativeParameters {
+ if (self = [super init]) {
+ _payloadType = nativeParameters.payload_type;
+ _name = [NSString stringForStdString:nativeParameters.name];
+ switch (nativeParameters.kind) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ _kind = kRTCMediaStreamTrackKindAudio;
+ break;
+ case cricket::MEDIA_TYPE_VIDEO:
+ _kind = kRTCMediaStreamTrackKindVideo;
+ break;
+ case cricket::MEDIA_TYPE_DATA:
+ RTC_DCHECK_NOTREACHED();
+ break;
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ RTC_DCHECK_NOTREACHED();
+ break;
+ }
+ if (nativeParameters.clock_rate) {
+ _clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate];
+ }
+ if (nativeParameters.num_channels) {
+ _numChannels = [NSNumber numberWithInt:*nativeParameters.num_channels];
+ }
+ NSMutableDictionary *parameters = [NSMutableDictionary dictionary];
+ for (const auto &parameter : nativeParameters.parameters) {
+ [parameters setObject:[NSString stringForStdString:parameter.second]
+ forKey:[NSString stringForStdString:parameter.first]];
+ }
+ _parameters = parameters;
+ }
+ return self;
+}
+
+- (webrtc::RtpCodecParameters)nativeParameters {
+ webrtc::RtpCodecParameters parameters;
+ parameters.payload_type = _payloadType;
+ parameters.name = [NSString stdStringForString:_name];
+ // NSString pointer comparison is safe here since "kind" is readonly and only
+ // populated above.
+ if (_kind == kRTCMediaStreamTrackKindAudio) {
+ parameters.kind = cricket::MEDIA_TYPE_AUDIO;
+ } else if (_kind == kRTCMediaStreamTrackKindVideo) {
+ parameters.kind = cricket::MEDIA_TYPE_VIDEO;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ if (_clockRate != nil) {
+ parameters.clock_rate = absl::optional<int>(_clockRate.intValue);
+ }
+ if (_numChannels != nil) {
+ parameters.num_channels = absl::optional<int>(_numChannels.intValue);
+ }
+ for (NSString *paramKey in _parameters.allKeys) {
+ std::string key = [NSString stdStringForString:paramKey];
+ std::string value = [NSString stdStringForString:_parameters[paramKey]];
+ parameters.parameters[key] = value;
+ }
+ return parameters;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h
new file mode 100644
index 0000000000..d12ca624e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpEncodingParameters.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters)
+()
+
+ /** Returns the equivalent native RtpEncodingParameters structure. */
+ @property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
+
+/** Initialize the object with a native RtpEncodingParameters structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtpEncodingParameters &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h
new file mode 100644
index 0000000000..07f6b7a39c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Corresponds to webrtc::Priority. */
+typedef NS_ENUM(NSInteger, RTCPriority) {
+ RTCPriorityVeryLow,
+ RTCPriorityLow,
+ RTCPriorityMedium,
+ RTCPriorityHigh
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters) : NSObject
+
+/** The idenfifier for the encoding layer. This is used in simulcast. */
+@property(nonatomic, copy, nullable) NSString *rid;
+
+/** Controls whether the encoding is currently transmitted. */
+@property(nonatomic, assign) BOOL isActive;
+
+/** The maximum bitrate to use for the encoding, or nil if there is no
+ * limit.
+ */
+@property(nonatomic, copy, nullable) NSNumber *maxBitrateBps;
+
+/** The minimum bitrate to use for the encoding, or nil if there is no
+ * limit.
+ */
+@property(nonatomic, copy, nullable) NSNumber *minBitrateBps;
+
+/** The maximum framerate to use for the encoding, or nil if there is no
+ * limit.
+ */
+@property(nonatomic, copy, nullable) NSNumber *maxFramerate;
+
+/** The requested number of temporal layers to use for the encoding, or nil
+ * if the default should be used.
+ */
+@property(nonatomic, copy, nullable) NSNumber *numTemporalLayers;
+
+/** Scale the width and height down by this factor for video. If nil,
+ * implementation default scaling factor will be used.
+ */
+@property(nonatomic, copy, nullable) NSNumber *scaleResolutionDownBy;
+
+/** The SSRC being used by this encoding. */
+@property(nonatomic, readonly, nullable) NSNumber *ssrc;
+
+/** The relative bitrate priority. */
+@property(nonatomic, assign) double bitratePriority;
+
+/** The relative DiffServ Code Point priority. */
+@property(nonatomic, assign) RTCPriority networkPriority;
+
+/** Allow dynamic frame length changes for audio:
+ https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime */
+@property(nonatomic, assign) BOOL adaptiveAudioPacketTime;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm
new file mode 100644
index 0000000000..d6087dafb0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpEncodingParameters+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters)
+
+@synthesize rid = _rid;
+@synthesize isActive = _isActive;
+@synthesize maxBitrateBps = _maxBitrateBps;
+@synthesize minBitrateBps = _minBitrateBps;
+@synthesize maxFramerate = _maxFramerate;
+@synthesize numTemporalLayers = _numTemporalLayers;
+@synthesize scaleResolutionDownBy = _scaleResolutionDownBy;
+@synthesize ssrc = _ssrc;
+@synthesize bitratePriority = _bitratePriority;
+@synthesize networkPriority = _networkPriority;
+@synthesize adaptiveAudioPacketTime = _adaptiveAudioPacketTime;
+
+- (instancetype)init {
+ webrtc::RtpEncodingParameters nativeParameters;
+ return [self initWithNativeParameters:nativeParameters];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpEncodingParameters &)nativeParameters {
+ if (self = [super init]) {
+ if (!nativeParameters.rid.empty()) {
+ _rid = [NSString stringForStdString:nativeParameters.rid];
+ }
+ _isActive = nativeParameters.active;
+ if (nativeParameters.max_bitrate_bps) {
+ _maxBitrateBps =
+ [NSNumber numberWithInt:*nativeParameters.max_bitrate_bps];
+ }
+ if (nativeParameters.min_bitrate_bps) {
+ _minBitrateBps =
+ [NSNumber numberWithInt:*nativeParameters.min_bitrate_bps];
+ }
+ if (nativeParameters.max_framerate) {
+ _maxFramerate = [NSNumber numberWithInt:*nativeParameters.max_framerate];
+ }
+ if (nativeParameters.num_temporal_layers) {
+ _numTemporalLayers = [NSNumber numberWithInt:*nativeParameters.num_temporal_layers];
+ }
+ if (nativeParameters.scale_resolution_down_by) {
+ _scaleResolutionDownBy =
+ [NSNumber numberWithDouble:*nativeParameters.scale_resolution_down_by];
+ }
+ if (nativeParameters.ssrc) {
+ _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc];
+ }
+ _bitratePriority = nativeParameters.bitrate_priority;
+ _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters)
+ priorityFromNativePriority:nativeParameters.network_priority];
+ _adaptiveAudioPacketTime = nativeParameters.adaptive_ptime;
+ }
+ return self;
+}
+
+- (webrtc::RtpEncodingParameters)nativeParameters {
+ webrtc::RtpEncodingParameters parameters;
+ if (_rid != nil) {
+ parameters.rid = [NSString stdStringForString:_rid];
+ }
+ parameters.active = _isActive;
+ if (_maxBitrateBps != nil) {
+ parameters.max_bitrate_bps = absl::optional<int>(_maxBitrateBps.intValue);
+ }
+ if (_minBitrateBps != nil) {
+ parameters.min_bitrate_bps = absl::optional<int>(_minBitrateBps.intValue);
+ }
+ if (_maxFramerate != nil) {
+ parameters.max_framerate = absl::optional<int>(_maxFramerate.intValue);
+ }
+ if (_numTemporalLayers != nil) {
+ parameters.num_temporal_layers = absl::optional<int>(_numTemporalLayers.intValue);
+ }
+ if (_scaleResolutionDownBy != nil) {
+ parameters.scale_resolution_down_by =
+ absl::optional<double>(_scaleResolutionDownBy.doubleValue);
+ }
+ if (_ssrc != nil) {
+ parameters.ssrc = absl::optional<uint32_t>(_ssrc.unsignedLongValue);
+ }
+ parameters.bitrate_priority = _bitratePriority;
+ parameters.network_priority =
+ [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority];
+ parameters.adaptive_ptime = _adaptiveAudioPacketTime;
+ return parameters;
+}
+
++ (webrtc::Priority)nativePriorityFromPriority:(RTCPriority)networkPriority {
+ switch (networkPriority) {
+ case RTCPriorityVeryLow:
+ return webrtc::Priority::kVeryLow;
+ case RTCPriorityLow:
+ return webrtc::Priority::kLow;
+ case RTCPriorityMedium:
+ return webrtc::Priority::kMedium;
+ case RTCPriorityHigh:
+ return webrtc::Priority::kHigh;
+ }
+}
+
++ (RTCPriority)priorityFromNativePriority:(webrtc::Priority)nativePriority {
+ switch (nativePriority) {
+ case webrtc::Priority::kVeryLow:
+ return RTCPriorityVeryLow;
+ case webrtc::Priority::kLow:
+ return RTCPriorityLow;
+ case webrtc::Priority::kMedium:
+ return RTCPriorityMedium;
+ case webrtc::Priority::kHigh:
+ return RTCPriorityHigh;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h
new file mode 100644
index 0000000000..0e0fbba5ac
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpHeaderExtension.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension)
+()
+
+ /** Returns the equivalent native RtpExtension structure. */
+ @property(nonatomic, readonly) webrtc::RtpExtension nativeParameters;
+
+/** Initialize the object with a native RtpExtension structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h
new file mode 100644
index 0000000000..4000bf5372
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension) : NSObject
+
+/** The URI of the RTP header extension, as defined in RFC5285. */
+@property(nonatomic, readonly, copy) NSString *uri;
+
+/** The value put in the RTP packet to identify the header extension. */
+@property(nonatomic, readonly) int id;
+
+/** Whether the header extension is encrypted or not. */
+@property(nonatomic, readonly, getter=isEncrypted) BOOL encrypted;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm
new file mode 100644
index 0000000000..68093e92ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpHeaderExtension+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtpHeaderExtension)
+
+@synthesize uri = _uri;
+@synthesize id = _id;
+@synthesize encrypted = _encrypted;
+
+- (instancetype)init {
+ webrtc::RtpExtension nativeExtension;
+ return [self initWithNativeParameters:nativeExtension];
+}
+
+- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters {
+ if (self = [super init]) {
+ _uri = [NSString stringForStdString:nativeParameters.uri];
+ _id = nativeParameters.id;
+ _encrypted = nativeParameters.encrypt;
+ }
+ return self;
+}
+
+- (webrtc::RtpExtension)nativeParameters {
+ webrtc::RtpExtension extension;
+ extension.uri = [NSString stdStringForString:_uri];
+ extension.id = _id;
+ extension.encrypt = _encrypted;
+ return extension;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h
new file mode 100644
index 0000000000..139617f727
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpParameters.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtpParameters)
+()
+
+ /** Returns the equivalent native RtpParameters structure. */
+ @property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
+
+/** Initialize the object with a native RtpParameters structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtpParameters &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.h
new file mode 100644
index 0000000000..3d71c55ab9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCRtcpParameters.h"
+#import "RTCRtpCodecParameters.h"
+#import "RTCRtpEncodingParameters.h"
+#import "RTCRtpHeaderExtension.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Corresponds to webrtc::DegradationPreference. */
+typedef NS_ENUM(NSInteger, RTCDegradationPreference) {
+ RTCDegradationPreferenceDisabled,
+ RTCDegradationPreferenceMaintainFramerate,
+ RTCDegradationPreferenceMaintainResolution,
+ RTCDegradationPreferenceBalanced
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpParameters) : NSObject
+
+/** A unique identifier for the last set of parameters applied. */
+@property(nonatomic, copy) NSString *transactionId;
+
+/** Parameters used for RTCP. */
+@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCRtcpParameters) * rtcp;
+
+/** An array containing parameters for RTP header extensions. */
+@property(nonatomic, readonly, copy)
+ NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtension) *> *headerExtensions;
+
+/** The currently active encodings in the order of preference. */
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCRtpEncodingParameters) *> *encodings;
+
+/** The negotiated set of send codecs in order of preference. */
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCRtpCodecParameters) *> *codecs;
+
+/**
+ * Degradation preference in case of CPU adaptation or constrained bandwidth.
+ * If nil, implementation default degradation preference will be used.
+ */
+@property(nonatomic, copy, nullable) NSNumber *degradationPreference;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.mm
new file mode 100644
index 0000000000..2baf0ecd80
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.mm
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpParameters+Private.h"
+
+#import "RTCRtcpParameters+Private.h"
+#import "RTCRtpCodecParameters+Private.h"
+#import "RTCRtpEncodingParameters+Private.h"
+#import "RTCRtpHeaderExtension+Private.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtpParameters)
+
+@synthesize transactionId = _transactionId;
+@synthesize rtcp = _rtcp;
+@synthesize headerExtensions = _headerExtensions;
+@synthesize encodings = _encodings;
+@synthesize codecs = _codecs;
+@synthesize degradationPreference = _degradationPreference;
+
+- (instancetype)init {
+ webrtc::RtpParameters nativeParameters;
+ return [self initWithNativeParameters:nativeParameters];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpParameters &)nativeParameters {
+ if (self = [super init]) {
+ _transactionId = [NSString stringForStdString:nativeParameters.transaction_id];
+ _rtcp =
+ [[RTC_OBJC_TYPE(RTCRtcpParameters) alloc] initWithNativeParameters:nativeParameters.rtcp];
+
+ NSMutableArray *headerExtensions = [[NSMutableArray alloc] init];
+ for (const auto &headerExtension : nativeParameters.header_extensions) {
+ [headerExtensions addObject:[[RTC_OBJC_TYPE(RTCRtpHeaderExtension) alloc]
+ initWithNativeParameters:headerExtension]];
+ }
+ _headerExtensions = headerExtensions;
+
+ NSMutableArray *encodings = [[NSMutableArray alloc] init];
+ for (const auto &encoding : nativeParameters.encodings) {
+ [encodings addObject:[[RTC_OBJC_TYPE(RTCRtpEncodingParameters) alloc]
+ initWithNativeParameters:encoding]];
+ }
+ _encodings = encodings;
+
+ NSMutableArray *codecs = [[NSMutableArray alloc] init];
+ for (const auto &codec : nativeParameters.codecs) {
+ [codecs
+ addObject:[[RTC_OBJC_TYPE(RTCRtpCodecParameters) alloc] initWithNativeParameters:codec]];
+ }
+ _codecs = codecs;
+
+ _degradationPreference = [RTC_OBJC_TYPE(RTCRtpParameters)
+ degradationPreferenceFromNativeDegradationPreference:nativeParameters
+ .degradation_preference];
+ }
+ return self;
+}
+
+- (webrtc::RtpParameters)nativeParameters {
+ webrtc::RtpParameters parameters;
+ parameters.transaction_id = [NSString stdStringForString:_transactionId];
+ parameters.rtcp = [_rtcp nativeParameters];
+ for (RTC_OBJC_TYPE(RTCRtpHeaderExtension) * headerExtension in _headerExtensions) {
+ parameters.header_extensions.push_back(headerExtension.nativeParameters);
+ }
+ for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in _encodings) {
+ parameters.encodings.push_back(encoding.nativeParameters);
+ }
+ for (RTC_OBJC_TYPE(RTCRtpCodecParameters) * codec in _codecs) {
+ parameters.codecs.push_back(codec.nativeParameters);
+ }
+ if (_degradationPreference) {
+ parameters.degradation_preference = [RTC_OBJC_TYPE(RTCRtpParameters)
+ nativeDegradationPreferenceFromDegradationPreference:(RTCDegradationPreference)
+ _degradationPreference.intValue];
+ }
+ return parameters;
+}
+
++ (webrtc::DegradationPreference)nativeDegradationPreferenceFromDegradationPreference:
+ (RTCDegradationPreference)degradationPreference {
+ switch (degradationPreference) {
+ case RTCDegradationPreferenceDisabled:
+ return webrtc::DegradationPreference::DISABLED;
+ case RTCDegradationPreferenceMaintainFramerate:
+ return webrtc::DegradationPreference::MAINTAIN_FRAMERATE;
+ case RTCDegradationPreferenceMaintainResolution:
+ return webrtc::DegradationPreference::MAINTAIN_RESOLUTION;
+ case RTCDegradationPreferenceBalanced:
+ return webrtc::DegradationPreference::BALANCED;
+ }
+}
+
++ (NSNumber *)degradationPreferenceFromNativeDegradationPreference:
+ (absl::optional<webrtc::DegradationPreference>)nativeDegradationPreference {
+ if (!nativeDegradationPreference.has_value()) {
+ return nil;
+ }
+
+ switch (*nativeDegradationPreference) {
+ case webrtc::DegradationPreference::DISABLED:
+ return @(RTCDegradationPreferenceDisabled);
+ case webrtc::DegradationPreference::MAINTAIN_FRAMERATE:
+ return @(RTCDegradationPreferenceMaintainFramerate);
+ case webrtc::DegradationPreference::MAINTAIN_RESOLUTION:
+ return @(RTCDegradationPreferenceMaintainResolution);
+ case webrtc::DegradationPreference::BALANCED:
+ return @(RTCDegradationPreferenceBalanced);
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h
new file mode 100644
index 0000000000..c15ce70079
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpReceiver.h"
+
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/scoped_refptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * This class extension exposes methods that work directly with injectable C++ components.
+ */
+@interface RTC_OBJC_TYPE (RTCRtpReceiver)
+()
+
+ /** Sets a user defined frame decryptor that will decrypt the entire frame.
+ * This will decrypt the entire frame using the user provided decryption
+ * mechanism regardless of whether SRTP is enabled or not.
+ */
+ - (void)setFrameDecryptor : (rtc::scoped_refptr<webrtc::FrameDecryptorInterface>)frameDecryptor;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h
new file mode 100644
index 0000000000..6aed0b4bc5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpReceiver.h"
+
+#include "api/rtp_receiver_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+namespace webrtc {
+
+class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface {
+ public:
+ RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver);
+
+ void OnFirstPacketReceived(cricket::MediaType media_type) override;
+
+ private:
+ __weak RTC_OBJC_TYPE(RTCRtpReceiver) * receiver_;
+};
+
+} // namespace webrtc
+
+@interface RTC_OBJC_TYPE (RTCRtpReceiver)
+()
+
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
+
+/** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpReceiver:(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver
+ NS_DESIGNATED_INITIALIZER;
+
++ (RTCRtpMediaType)mediaTypeForNativeMediaType:(cricket::MediaType)nativeMediaType;
+
++ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType;
+
++ (NSString*)stringForMediaType:(RTCRtpMediaType)mediaType;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.h
new file mode 100644
index 0000000000..1e407fd71b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMediaStreamTrack.h"
+#import "RTCRtpParameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Represents the media type of the RtpReceiver. */
+typedef NS_ENUM(NSInteger, RTCRtpMediaType) {
+ RTCRtpMediaTypeAudio,
+ RTCRtpMediaTypeVideo,
+ RTCRtpMediaTypeData,
+ RTCRtpMediaTypeUnsupported,
+};
+
+@class RTC_OBJC_TYPE(RTCRtpReceiver);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCRtpReceiverDelegate)<NSObject>
+
+ /** Called when the first RTP packet is received.
+ *
+ * Note: Currently if there are multiple RtpReceivers of the same media type,
+ * they will all call OnFirstPacketReceived at once.
+ *
+ * For example, if we create three audio receivers, A/B/C, they will listen to
+ * the same signal from the underneath network layer. Whenever the first audio packet
+ * is received, the underneath signal will be fired. All the receivers A/B/C will be
+ * notified and the callback of the receiver's delegate will be called.
+ *
+ * The process is the same for video receivers.
+ */
+ - (void)rtpReceiver
+ : (RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver didReceiveFirstPacketForMediaType
+ : (RTCRtpMediaType)mediaType;
+
+@end
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCRtpReceiver)<NSObject>
+
+ /** A unique identifier for this receiver. */
+ @property(nonatomic, readonly) NSString *receiverId;
+
+/** The currently active RTCRtpParameters, as defined in
+ * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
+ *
+ * The WebRTC specification only defines RTCRtpParameters in terms of senders,
+ * but this API also applies them to receivers, similar to ORTC:
+ * http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*.
+ */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpParameters) * parameters;
+
+/** The RTCMediaStreamTrack associated with the receiver.
+ * Note: reading this property returns a new instance of
+ * RTCMediaStreamTrack. Use isEqual: instead of == to compare
+ * RTCMediaStreamTrack instances.
+ */
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track;
+
+/** The delegate for this RtpReceiver. */
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCRtpReceiverDelegate)> delegate;
+
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpReceiver) : NSObject <RTC_OBJC_TYPE(RTCRtpReceiver)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.mm
new file mode 100644
index 0000000000..60af86ac1b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.mm
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpReceiver+Private.h"
+
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpParameters+Private.h"
+#import "RTCRtpReceiver+Native.h"
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "api/media_stream_interface.h"
+
+namespace webrtc {
+
+RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver) {
+ RTC_CHECK(receiver);
+ receiver_ = receiver;
+}
+
+void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
+ cricket::MediaType media_type) {
+ RTCRtpMediaType packet_media_type =
+ [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:media_type];
+ RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = receiver_;
+ [receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type];
+}
+
+} // namespace webrtc
+
+@implementation RTC_OBJC_TYPE (RTCRtpReceiver) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> _nativeRtpReceiver;
+ std::unique_ptr<webrtc::RtpReceiverDelegateAdapter> _observer;
+}
+
+@synthesize delegate = _delegate;
+
+- (NSString *)receiverId {
+ return [NSString stringForStdString:_nativeRtpReceiver->id()];
+}
+
+- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
+ return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc]
+ initWithNativeParameters:_nativeRtpReceiver->GetParameters()];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
+ _nativeRtpReceiver->track());
+ if (nativeTrack) {
+ return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack
+ factory:_factory];
+ }
+ return nil;
+}
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpReceiver) {\n receiverId: %@\n}", self.receiverId];
+}
+
+- (void)dealloc {
+ if (_nativeRtpReceiver) {
+ _nativeRtpReceiver->SetObserver(nullptr);
+ }
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (object == nil) {
+ return NO;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = (RTC_OBJC_TYPE(RTCRtpReceiver) *)object;
+ return _nativeRtpReceiver == receiver.nativeRtpReceiver;
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeRtpReceiver.get();
+}
+
+#pragma mark - Native
+
+- (void)setFrameDecryptor:(rtc::scoped_refptr<webrtc::FrameDecryptorInterface>)frameDecryptor {
+ _nativeRtpReceiver->SetFrameDecryptor(frameDecryptor);
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
+ return _nativeRtpReceiver;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpReceiver:
+ (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeRtpReceiver = nativeRtpReceiver;
+ RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpReceiver)(%p): created receiver: %@", self, self.description);
+ _observer.reset(new webrtc::RtpReceiverDelegateAdapter(self));
+ _nativeRtpReceiver->SetObserver(_observer.get());
+ }
+ return self;
+}
+
++ (RTCRtpMediaType)mediaTypeForNativeMediaType:
+ (cricket::MediaType)nativeMediaType {
+ switch (nativeMediaType) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ return RTCRtpMediaTypeAudio;
+ case cricket::MEDIA_TYPE_VIDEO:
+ return RTCRtpMediaTypeVideo;
+ case cricket::MEDIA_TYPE_DATA:
+ return RTCRtpMediaTypeData;
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ return RTCRtpMediaTypeUnsupported;
+ }
+}
+
++ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType {
+ switch (mediaType) {
+ case RTCRtpMediaTypeAudio:
+ return cricket::MEDIA_TYPE_AUDIO;
+ case RTCRtpMediaTypeVideo:
+ return cricket::MEDIA_TYPE_VIDEO;
+ case RTCRtpMediaTypeData:
+ return cricket::MEDIA_TYPE_DATA;
+ case RTCRtpMediaTypeUnsupported:
+ return cricket::MEDIA_TYPE_UNSUPPORTED;
+ }
+}
+
++ (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType {
+ switch (mediaType) {
+ case RTCRtpMediaTypeAudio:
+ return @"AUDIO";
+ case RTCRtpMediaTypeVideo:
+ return @"VIDEO";
+ case RTCRtpMediaTypeData:
+ return @"DATA";
+ case RTCRtpMediaTypeUnsupported:
+ return @"UNSUPPORTED";
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Native.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Native.h
new file mode 100644
index 0000000000..249d5c5e09
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Native.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpSender.h"
+
+#include "api/crypto/frame_encryptor_interface.h"
+#include "api/scoped_refptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * This class extension exposes methods that work directly with injectable C++ components.
+ */
+@interface RTC_OBJC_TYPE (RTCRtpSender)
+()
+
+ /** Sets a defined frame encryptor that will encrypt the entire frame
+ * before it is sent across the network. This will encrypt the entire frame
+ * using the user provided encryption mechanism regardless of whether SRTP is
+ * enabled or not.
+ */
+ - (void)setFrameEncryptor : (rtc::scoped_refptr<webrtc::FrameEncryptorInterface>)frameEncryptor;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Private.h
new file mode 100644
index 0000000000..6fdb42bb22
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Private.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpSender.h"
+
+#include "api/rtp_sender_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface RTC_OBJC_TYPE (RTCRtpSender)
+()
+
+ @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
+
+/** Initialize an RTCRtpSender with a native RtpSenderInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.h
new file mode 100644
index 0000000000..fcdf199869
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCDtmfSender.h"
+#import "RTCMacros.h"
+#import "RTCMediaStreamTrack.h"
+#import "RTCRtpParameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCRtpSender)<NSObject>
+
+/** A unique identifier for this sender. */
+@property(nonatomic, readonly) NSString *senderId;
+
+/** The currently active RTCRtpParameters, as defined in
+ * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
+ */
+@property(nonatomic, copy) RTC_OBJC_TYPE(RTCRtpParameters) * parameters;
+
+/** The RTCMediaStreamTrack associated with the sender.
+ * Note: reading this property returns a new instance of
+ * RTCMediaStreamTrack. Use isEqual: instead of == to compare
+ * RTCMediaStreamTrack instances.
+ */
+@property(nonatomic, copy, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track;
+
+/** IDs of streams associated with the RTP sender */
+@property(nonatomic, copy) NSArray<NSString *> *streamIds;
+
+/** The RTCDtmfSender accociated with the RTP sender. */
+@property(nonatomic, readonly, nullable) id<RTC_OBJC_TYPE(RTCDtmfSender)> dtmfSender;
+
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpSender) : NSObject <RTC_OBJC_TYPE(RTCRtpSender)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.mm
new file mode 100644
index 0000000000..4fadb30f49
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.mm
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpSender+Private.h"
+
+#import "RTCDtmfSender+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpParameters+Private.h"
+#import "RTCRtpSender+Native.h"
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "api/media_stream_interface.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtpSender) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
+}
+
+@synthesize dtmfSender = _dtmfSender;
+
+- (NSString *)senderId {
+ return [NSString stringForStdString:_nativeRtpSender->id()];
+}
+
+- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
+ return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc]
+ initWithNativeParameters:_nativeRtpSender->GetParameters()];
+}
+
+- (void)setParameters:(RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
+ if (!_nativeRtpSender->SetParameters(parameters.nativeParameters).ok()) {
+ RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set parameters: %@", self, parameters);
+ }
+}
+
+- (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
+ _nativeRtpSender->track());
+ if (nativeTrack) {
+ return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack
+ factory:_factory];
+ }
+ return nil;
+}
+
+- (void)setTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ if (!_nativeRtpSender->SetTrack(track.nativeTrack.get())) {
+ RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set track %@", self, track);
+ }
+}
+
+- (NSArray<NSString *> *)streamIds {
+ std::vector<std::string> nativeStreamIds = _nativeRtpSender->stream_ids();
+ NSMutableArray *streamIds = [NSMutableArray arrayWithCapacity:nativeStreamIds.size()];
+ for (const auto &s : nativeStreamIds) {
+ [streamIds addObject:[NSString stringForStdString:s]];
+ }
+ return streamIds;
+}
+
+- (void)setStreamIds:(NSArray<NSString *> *)streamIds {
+ std::vector<std::string> nativeStreamIds;
+ for (NSString *streamId in streamIds) {
+ nativeStreamIds.push_back([streamId UTF8String]);
+ }
+ _nativeRtpSender->SetStreams(nativeStreamIds);
+}
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpSender) {\n senderId: %@\n}", self.senderId];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (object == nil) {
+ return NO;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ RTC_OBJC_TYPE(RTCRtpSender) *sender = (RTC_OBJC_TYPE(RTCRtpSender) *)object;
+ return _nativeRtpSender == sender.nativeRtpSender;
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeRtpSender.get();
+}
+
+#pragma mark - Native
+
+- (void)setFrameEncryptor:(rtc::scoped_refptr<webrtc::FrameEncryptorInterface>)frameEncryptor {
+ _nativeRtpSender->SetFrameEncryptor(frameEncryptor);
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
+ return _nativeRtpSender;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
+ NSParameterAssert(factory);
+ NSParameterAssert(nativeRtpSender);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeRtpSender = nativeRtpSender;
+ if (_nativeRtpSender->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender(
+ _nativeRtpSender->GetDtmfSender());
+ if (nativeDtmfSender) {
+ _dtmfSender =
+ [[RTC_OBJC_TYPE(RTCDtmfSender) alloc] initWithNativeDtmfSender:nativeDtmfSender];
+ }
+ }
+ RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): created sender: %@", self, self.description);
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h
new file mode 100644
index 0000000000..65d45fb88e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpTransceiver.h"
+
+#include "api/rtp_transceiver_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit)
+()
+
+ @property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit;
+
+@end
+
+@interface RTC_OBJC_TYPE (RTCRtpTransceiver)
+()
+
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::RtpTransceiverInterface> nativeRtpTransceiver;
+
+/** Initialize an RTCRtpTransceiver with a native RtpTransceiverInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpTransceiver:
+ (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver
+ NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection:
+ (RTCRtpTransceiverDirection)direction;
+
++ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection:
+ (webrtc::RtpTransceiverDirection)nativeDirection;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.h
new file mode 100644
index 0000000000..fd59013639
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCRtpReceiver.h"
+#import "RTCRtpSender.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+extern NSString *const kRTCRtpTransceiverErrorDomain;
+
+/** https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection */
+typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection) {
+ RTCRtpTransceiverDirectionSendRecv,
+ RTCRtpTransceiverDirectionSendOnly,
+ RTCRtpTransceiverDirectionRecvOnly,
+ RTCRtpTransceiverDirectionInactive,
+ RTCRtpTransceiverDirectionStopped
+};
+
+/** Structure for initializing an RTCRtpTransceiver in a call to
+ * RTCPeerConnection.addTransceiver.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) : NSObject
+
+/** Direction of the RTCRtpTransceiver. See RTCRtpTransceiver.direction. */
+@property(nonatomic) RTCRtpTransceiverDirection direction;
+
+/** The added RTCRtpTransceiver will be added to these streams. */
+@property(nonatomic) NSArray<NSString *> *streamIds;
+
+/** TODO(bugs.webrtc.org/7600): Not implemented. */
+@property(nonatomic) NSArray<RTC_OBJC_TYPE(RTCRtpEncodingParameters) *> *sendEncodings;
+
+@end
+
+@class RTC_OBJC_TYPE(RTCRtpTransceiver);
+
+/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the
+ * WebRTC specification. A transceiver represents a combination of an RTCRtpSender
+ * and an RTCRtpReceiver that share a common mid. As defined in JSEP, an
+ * RTCRtpTransceiver is said to be associated with a media description if its
+ * mid property is non-nil; otherwise, it is said to be disassociated.
+ * JSEP: https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24
+ *
+ * Note that RTCRtpTransceivers are only supported when using
+ * RTCPeerConnection with Unified Plan SDP.
+ *
+ * WebRTC specification for RTCRtpTransceiver, the JavaScript analog:
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCRtpTransceiver)<NSObject>
+
+ /** Media type of the transceiver. The sender and receiver will also have this
+ * type.
+ */
+ @property(nonatomic, readonly) RTCRtpMediaType mediaType;
+
+/** The mid attribute is the mid negotiated and present in the local and
+ * remote descriptions. Before negotiation is complete, the mid value may be
+ * nil. After rollbacks, the value may change from a non-nil value to nil.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-mid
+ */
+@property(nonatomic, readonly) NSString *mid;
+
+/** The sender attribute exposes the RTCRtpSender corresponding to the RTP
+ * media that may be sent with the transceiver's mid. The sender is always
+ * present, regardless of the direction of media.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender
+ */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpSender) * sender;
+
+/** The receiver attribute exposes the RTCRtpReceiver corresponding to the RTP
+ * media that may be received with the transceiver's mid. The receiver is
+ * always present, regardless of the direction of media.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver
+ */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpReceiver) * receiver;
+
+/** The isStopped attribute indicates that the sender of this transceiver will
+ * no longer send, and that the receiver will no longer receive. It is true if
+ * either stop has been called or if setting the local or remote description
+ * has caused the RTCRtpTransceiver to be stopped.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stopped
+ */
+@property(nonatomic, readonly) BOOL isStopped;
+
+/** The direction attribute indicates the preferred direction of this
+ * transceiver, which will be used in calls to createOffer and createAnswer.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction
+ */
+@property(nonatomic, readonly) RTCRtpTransceiverDirection direction;
+
+/** The currentDirection attribute indicates the current direction negotiated
+ * for this transceiver. If this transceiver has never been represented in an
+ * offer/answer exchange, or if the transceiver is stopped, the value is not
+ * present and this method returns NO.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection
+ */
+- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut;
+
+/** The stop method irreversibly stops the RTCRtpTransceiver. The sender of
+ * this transceiver will no longer send, the receiver will no longer receive.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop
+ */
+- (void)stopInternal;
+
+/** An update of directionality does not take effect immediately. Instead,
+ * future calls to createOffer and createAnswer mark the corresponding media
+ * descriptions as sendrecv, sendonly, recvonly, or inactive.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction
+ */
+- (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)error;
+
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpTransceiver) : NSObject <RTC_OBJC_TYPE(RTCRtpTransceiver)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm
new file mode 100644
index 0000000000..ae1cf79864
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpTransceiver+Private.h"
+
+#import "RTCRtpEncodingParameters+Private.h"
+#import "RTCRtpParameters+Private.h"
+#import "RTCRtpReceiver+Private.h"
+#import "RTCRtpSender+Private.h"
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+NSString *const kRTCRtpTransceiverErrorDomain = @"org.webrtc.RTCRtpTranceiver";
+
+@implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit)
+
+@synthesize direction = _direction;
+@synthesize streamIds = _streamIds;
+@synthesize sendEncodings = _sendEncodings;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ _direction = RTCRtpTransceiverDirectionSendRecv;
+ }
+ return self;
+}
+
+- (webrtc::RtpTransceiverInit)nativeInit {
+ webrtc::RtpTransceiverInit init;
+ init.direction =
+ [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:_direction];
+ for (NSString *streamId in _streamIds) {
+ init.stream_ids.push_back([streamId UTF8String]);
+ }
+ for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * sendEncoding in _sendEncodings) {
+ init.send_encodings.push_back(sendEncoding.nativeParameters);
+ }
+ return init;
+}
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCRtpTransceiver) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> _nativeRtpTransceiver;
+}
+
+- (RTCRtpMediaType)mediaType {
+ return [RTC_OBJC_TYPE(RTCRtpReceiver)
+ mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()];
+}
+
+- (NSString *)mid {
+ if (_nativeRtpTransceiver->mid()) {
+ return [NSString stringForStdString:*_nativeRtpTransceiver->mid()];
+ } else {
+ return nil;
+ }
+}
+
+@synthesize sender = _sender;
+@synthesize receiver = _receiver;
+
+- (BOOL)isStopped {
+ return _nativeRtpTransceiver->stopped();
+}
+
+- (RTCRtpTransceiverDirection)direction {
+ return [RTC_OBJC_TYPE(RTCRtpTransceiver)
+ rtpTransceiverDirectionFromNativeDirection:_nativeRtpTransceiver->direction()];
+}
+
+- (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)error {
+ webrtc::RTCError nativeError = _nativeRtpTransceiver->SetDirectionWithError(
+ [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:direction]);
+
+ if (!nativeError.ok() && error) {
+ *error = [NSError errorWithDomain:kRTCRtpTransceiverErrorDomain
+ code:static_cast<int>(nativeError.type())
+ userInfo:@{
+ @"message" : [NSString stringWithCString:nativeError.message()
+ encoding:NSUTF8StringEncoding]
+ }];
+ }
+}
+
+- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut {
+ if (_nativeRtpTransceiver->current_direction()) {
+ *currentDirectionOut = [RTC_OBJC_TYPE(RTCRtpTransceiver)
+ rtpTransceiverDirectionFromNativeDirection:*_nativeRtpTransceiver->current_direction()];
+ return YES;
+ } else {
+ return NO;
+ }
+}
+
+- (void)stopInternal {
+ _nativeRtpTransceiver->StopInternal();
+}
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpTransceiver) {\n sender: %@\n receiver: %@\n}",
+ _sender,
+ _receiver];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (object == nil) {
+ return NO;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = (RTC_OBJC_TYPE(RTCRtpTransceiver) *)object;
+ return _nativeRtpTransceiver == transceiver.nativeRtpTransceiver;
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeRtpTransceiver.get();
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
+ return _nativeRtpTransceiver;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpTransceiver:
+ (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
+ NSParameterAssert(factory);
+ NSParameterAssert(nativeRtpTransceiver);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeRtpTransceiver = nativeRtpTransceiver;
+ _sender = [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:_factory
+ nativeRtpSender:nativeRtpTransceiver->sender()];
+ _receiver =
+ [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:_factory
+ nativeRtpReceiver:nativeRtpTransceiver->receiver()];
+ RTCLogInfo(
+ @"RTC_OBJC_TYPE(RTCRtpTransceiver)(%p): created transceiver: %@", self, self.description);
+ }
+ return self;
+}
+
++ (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection:
+ (RTCRtpTransceiverDirection)direction {
+ switch (direction) {
+ case RTCRtpTransceiverDirectionSendRecv:
+ return webrtc::RtpTransceiverDirection::kSendRecv;
+ case RTCRtpTransceiverDirectionSendOnly:
+ return webrtc::RtpTransceiverDirection::kSendOnly;
+ case RTCRtpTransceiverDirectionRecvOnly:
+ return webrtc::RtpTransceiverDirection::kRecvOnly;
+ case RTCRtpTransceiverDirectionInactive:
+ return webrtc::RtpTransceiverDirection::kInactive;
+ case RTCRtpTransceiverDirectionStopped:
+ return webrtc::RtpTransceiverDirection::kStopped;
+ }
+}
+
++ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection:
+ (webrtc::RtpTransceiverDirection)nativeDirection {
+ switch (nativeDirection) {
+ case webrtc::RtpTransceiverDirection::kSendRecv:
+ return RTCRtpTransceiverDirectionSendRecv;
+ case webrtc::RtpTransceiverDirection::kSendOnly:
+ return RTCRtpTransceiverDirectionSendOnly;
+ case webrtc::RtpTransceiverDirection::kRecvOnly:
+ return RTCRtpTransceiverDirectionRecvOnly;
+ case webrtc::RtpTransceiverDirection::kInactive:
+ return RTCRtpTransceiverDirectionInactive;
+ case webrtc::RtpTransceiverDirection::kStopped:
+ return RTCRtpTransceiverDirectionStopped;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.h
new file mode 100644
index 0000000000..f68bc5e9e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+/**
+ * Initialize and clean up the SSL library. Failure is fatal. These call the
+ * corresponding functions in webrtc/rtc_base/ssladapter.h.
+ */
+RTC_EXTERN BOOL RTCInitializeSSL(void);
+RTC_EXTERN BOOL RTCCleanupSSL(void);
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.mm
new file mode 100644
index 0000000000..430249577b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.mm
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSSLAdapter.h"
+
+#include "rtc_base/checks.h"
+#include "rtc_base/ssl_adapter.h"
+
+BOOL RTCInitializeSSL(void) {
+ BOOL initialized = rtc::InitializeSSL();
+ RTC_DCHECK(initialized);
+ return initialized;
+}
+
+BOOL RTCCleanupSSL(void) {
+ BOOL cleanedUp = rtc::CleanupSSL();
+ RTC_DCHECK(cleanedUp);
+ return cleanedUp;
+}
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h
new file mode 100644
index 0000000000..aa087e557f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription.h"
+
+#include "api/jsep.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCSessionDescription)
+()
+
+ /**
+ * The native SessionDescriptionInterface representation of this
+ * RTCSessionDescription object. This is needed to pass to the underlying C++
+ * APIs.
+ */
+ @property(nonatomic,
+ readonly) std::unique_ptr<webrtc::SessionDescriptionInterface> nativeDescription;
+
+/**
+ * Initialize an RTCSessionDescription from a native
+ * SessionDescriptionInterface. No ownership is taken of the native session
+ * description.
+ */
+- (instancetype)initWithNativeDescription:
+ (const webrtc::SessionDescriptionInterface *)nativeDescription;
+
++ (std::string)stdStringForType:(RTCSdpType)type;
+
++ (RTCSdpType)typeForStdString:(const std::string &)string;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.h
new file mode 100644
index 0000000000..8a9479d5cf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+/**
+ * Represents the session description type. This exposes the same types that are
+ * in C++, which doesn't include the rollback type that is in the W3C spec.
+ */
+typedef NS_ENUM(NSInteger, RTCSdpType) {
+ RTCSdpTypeOffer,
+ RTCSdpTypePrAnswer,
+ RTCSdpTypeAnswer,
+ RTCSdpTypeRollback,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCSessionDescription) : NSObject
+
+/** The type of session description. */
+@property(nonatomic, readonly) RTCSdpType type;
+
+/** The SDP string representation of this session description. */
+@property(nonatomic, readonly) NSString *sdp;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Initialize a session description with a type and SDP string. */
+- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp NS_DESIGNATED_INITIALIZER;
+
++ (NSString *)stringForType:(RTCSdpType)type;
+
++ (RTCSdpType)typeForString:(NSString *)string;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.mm
new file mode 100644
index 0000000000..539c90b14c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.mm
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription+Private.h"
+
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCSessionDescription)
+
+@synthesize type = _type;
+@synthesize sdp = _sdp;
+
++ (NSString *)stringForType:(RTCSdpType)type {
+ std::string string = [[self class] stdStringForType:type];
+ return [NSString stringForStdString:string];
+}
+
++ (RTCSdpType)typeForString:(NSString *)string {
+ std::string typeString = string.stdString;
+ return [[self class] typeForStdString:typeString];
+}
+
+- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
+ if (self = [super init]) {
+ _type = type;
+ _sdp = [sdp copy];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCSessionDescription):\n%@\n%@",
+ [[self class] stringForType:_type],
+ _sdp];
+}
+
+#pragma mark - Private
+
+- (std::unique_ptr<webrtc::SessionDescriptionInterface>)nativeDescription {
+ webrtc::SdpParseError error;
+
+ std::unique_ptr<webrtc::SessionDescriptionInterface> description(webrtc::CreateSessionDescription(
+ [[self class] stdStringForType:_type], _sdp.stdString, &error));
+
+ if (!description) {
+ RTCLogError(@"Failed to create session description: %s\nline: %s",
+ error.description.c_str(),
+ error.line.c_str());
+ }
+
+ return description;
+}
+
+- (instancetype)initWithNativeDescription:
+ (const webrtc::SessionDescriptionInterface *)nativeDescription {
+ NSParameterAssert(nativeDescription);
+ std::string sdp;
+ nativeDescription->ToString(&sdp);
+ RTCSdpType type = [[self class] typeForStdString:nativeDescription->type()];
+
+ return [self initWithType:type
+ sdp:[NSString stringForStdString:sdp]];
+}
+
++ (std::string)stdStringForType:(RTCSdpType)type {
+ switch (type) {
+ case RTCSdpTypeOffer:
+ return webrtc::SessionDescriptionInterface::kOffer;
+ case RTCSdpTypePrAnswer:
+ return webrtc::SessionDescriptionInterface::kPrAnswer;
+ case RTCSdpTypeAnswer:
+ return webrtc::SessionDescriptionInterface::kAnswer;
+ case RTCSdpTypeRollback:
+ return webrtc::SessionDescriptionInterface::kRollback;
+ }
+}
+
++ (RTCSdpType)typeForStdString:(const std::string &)string {
+ if (string == webrtc::SessionDescriptionInterface::kOffer) {
+ return RTCSdpTypeOffer;
+ } else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) {
+ return RTCSdpTypePrAnswer;
+ } else if (string == webrtc::SessionDescriptionInterface::kAnswer) {
+ return RTCSdpTypeAnswer;
+ } else if (string == webrtc::SessionDescriptionInterface::kRollback) {
+ return RTCSdpTypeRollback;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ return RTCSdpTypeOffer;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h
new file mode 100644
index 0000000000..47c5241d51
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h
@@ -0,0 +1,19 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCStatisticsReport.h"
+
+#include "api/stats/rtc_stats_report.h"
+
+@interface RTC_OBJC_TYPE (RTCStatisticsReport) (Private)
+
+- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.h
new file mode 100644
index 0000000000..06dbf48d88
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCStatistics);
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** A statistics report. Encapsulates a number of RTCStatistics objects. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCStatisticsReport) : NSObject
+
+/** The timestamp of the report in microseconds since 1970-01-01T00:00:00Z. */
+@property(nonatomic, readonly) CFTimeInterval timestamp_us;
+
+/** RTCStatistics objects by id. */
+@property(nonatomic, readonly) NSDictionary<NSString *, RTC_OBJC_TYPE(RTCStatistics) *> *statistics;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/** A part of a report (a subreport) covering a certain area. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCStatistics) : NSObject
+
+/** The id of this subreport, e.g. "RTCMediaStreamTrack_receiver_2". */
+@property(nonatomic, readonly) NSString *id;
+
+/** The timestamp of the subreport in microseconds since 1970-01-01T00:00:00Z. */
+@property(nonatomic, readonly) CFTimeInterval timestamp_us;
+
+/** The type of the subreport, e.g. "track", "codec". */
+@property(nonatomic, readonly) NSString *type;
+
+/** The keys and values of the subreport, e.g. "totalFramesDuration = 5.551".
+ The values are either NSNumbers or NSStrings or NSArrays encapsulating NSNumbers
+ or NSStrings, or NSDictionary of NSString keys to NSNumber values. */
+@property(nonatomic, readonly) NSDictionary<NSString *, NSObject *> *values;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
new file mode 100644
index 0000000000..28ef326b99
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
@@ -0,0 +1,193 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCStatisticsReport+Private.h"
+
+#include "helpers/NSString+StdString.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+/** Converts a single value to a suitable NSNumber, NSString or NSArray containing NSNumbers
+ or NSStrings, or NSDictionary of NSString keys to NSNumber values.*/
+NSObject *ValueFromStatsMember(const RTCStatsMemberInterface *member) {
+ if (member->is_defined()) {
+ switch (member->type()) {
+ case RTCStatsMemberInterface::kBool:
+ return [NSNumber numberWithBool:*member->cast_to<RTCStatsMember<bool>>()];
+ case RTCStatsMemberInterface::kInt32:
+ return [NSNumber numberWithInt:*member->cast_to<RTCStatsMember<int32_t>>()];
+ case RTCStatsMemberInterface::kUint32:
+ return [NSNumber numberWithUnsignedInt:*member->cast_to<RTCStatsMember<uint32_t>>()];
+ case RTCStatsMemberInterface::kInt64:
+ return [NSNumber numberWithLong:*member->cast_to<RTCStatsMember<int64_t>>()];
+ case RTCStatsMemberInterface::kUint64:
+ return [NSNumber numberWithUnsignedLong:*member->cast_to<RTCStatsMember<uint64_t>>()];
+ case RTCStatsMemberInterface::kDouble:
+ return [NSNumber numberWithDouble:*member->cast_to<RTCStatsMember<double>>()];
+ case RTCStatsMemberInterface::kString:
+ return [NSString stringForStdString:*member->cast_to<RTCStatsMember<std::string>>()];
+ case RTCStatsMemberInterface::kSequenceBool: {
+ std::vector<bool> sequence = *member->cast_to<RTCStatsMember<std::vector<bool>>>();
+ NSMutableArray *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (auto item : sequence) {
+ [array addObject:[NSNumber numberWithBool:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceInt32: {
+ std::vector<int32_t> sequence = *member->cast_to<RTCStatsMember<std::vector<int32_t>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithInt:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceUint32: {
+ std::vector<uint32_t> sequence = *member->cast_to<RTCStatsMember<std::vector<uint32_t>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithUnsignedInt:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceInt64: {
+ std::vector<int64_t> sequence = *member->cast_to<RTCStatsMember<std::vector<int64_t>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithLong:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceUint64: {
+ std::vector<uint64_t> sequence = *member->cast_to<RTCStatsMember<std::vector<uint64_t>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithUnsignedLong:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceDouble: {
+ std::vector<double> sequence = *member->cast_to<RTCStatsMember<std::vector<double>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithDouble:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceString: {
+ std::vector<std::string> sequence =
+ *member->cast_to<RTCStatsMember<std::vector<std::string>>>();
+ NSMutableArray<NSString *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSString stringForStdString:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kMapStringUint64: {
+ std::map<std::string, uint64_t> map =
+ *member->cast_to<RTCStatsMember<std::map<std::string, uint64_t>>>();
+ NSMutableDictionary<NSString *, NSNumber *> *dictionary =
+ [NSMutableDictionary dictionaryWithCapacity:map.size()];
+ for (const auto &item : map) {
+ dictionary[[NSString stringForStdString:item.first]] = @(item.second);
+ }
+ return [dictionary copy];
+ }
+ case RTCStatsMemberInterface::kMapStringDouble: {
+ std::map<std::string, double> map =
+ *member->cast_to<RTCStatsMember<std::map<std::string, double>>>();
+ NSMutableDictionary<NSString *, NSNumber *> *dictionary =
+ [NSMutableDictionary dictionaryWithCapacity:map.size()];
+ for (const auto &item : map) {
+ dictionary[[NSString stringForStdString:item.first]] = @(item.second);
+ }
+ return [dictionary copy];
+ }
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ }
+
+ return nil;
+}
+} // namespace webrtc
+
+@implementation RTC_OBJC_TYPE (RTCStatistics)
+
+@synthesize id = _id;
+@synthesize timestamp_us = _timestamp_us;
+@synthesize type = _type;
+@synthesize values = _values;
+
+- (instancetype)initWithStatistics:(const webrtc::RTCStats &)statistics {
+ if (self = [super init]) {
+ _id = [NSString stringForStdString:statistics.id()];
+ _timestamp_us = statistics.timestamp_us();
+ _type = [NSString stringWithCString:statistics.type() encoding:NSUTF8StringEncoding];
+
+ NSMutableDictionary<NSString *, NSObject *> *values = [NSMutableDictionary dictionary];
+ for (const webrtc::RTCStatsMemberInterface *member : statistics.Members()) {
+ NSObject *value = ValueFromStatsMember(member);
+ if (value) {
+ NSString *name = [NSString stringWithCString:member->name() encoding:NSUTF8StringEncoding];
+ RTC_DCHECK(name.length > 0);
+ RTC_DCHECK(!values[name]);
+ values[name] = value;
+ }
+ }
+ _values = [values copy];
+ }
+
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"id = %@, type = %@, timestamp = %.0f, values = %@",
+ self.id,
+ self.type,
+ self.timestamp_us,
+ self.values];
+}
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCStatisticsReport)
+
+@synthesize timestamp_us = _timestamp_us;
+@synthesize statistics = _statistics;
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"timestamp = %.0f, statistics = %@", self.timestamp_us, self.statistics];
+}
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCStatisticsReport) (Private)
+
+- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report {
+ if (self = [super init]) {
+ _timestamp_us = report.timestamp_us();
+
+ NSMutableDictionary *statisticsById =
+ [NSMutableDictionary dictionaryWithCapacity:report.size()];
+ for (const auto &stat : report) {
+ RTC_OBJC_TYPE(RTCStatistics) *statistics =
+ [[RTC_OBJC_TYPE(RTCStatistics) alloc] initWithStatistics:stat];
+ statisticsById[statistics.id] = statistics;
+ }
+ _statistics = [statisticsById copy];
+ }
+
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.h
new file mode 100644
index 0000000000..5c66e5a63a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+RTC_EXTERN void RTCSetupInternalTracer(void);
+/** Starts capture to specified file. Must be a valid writable path.
+ * Returns YES if capture starts.
+ */
+RTC_EXTERN BOOL RTCStartInternalCapture(NSString* filePath);
+RTC_EXTERN void RTCStopInternalCapture(void);
+RTC_EXTERN void RTCShutdownInternalTracer(void);
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.mm
new file mode 100644
index 0000000000..72f9f4da13
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.mm
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCTracing.h"
+
+#include "rtc_base/event_tracer.h"
+
+void RTCSetupInternalTracer(void) {
+ rtc::tracing::SetupInternalTracer();
+}
+
+BOOL RTCStartInternalCapture(NSString *filePath) {
+ return rtc::tracing::StartInternalCapture(filePath.UTF8String);
+}
+
+void RTCStopInternalCapture(void) {
+ rtc::tracing::StopInternalCapture();
+}
+
+void RTCShutdownInternalTracer(void) {
+ rtc::tracing::ShutdownInternalTracer();
+}
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h
new file mode 100644
index 0000000000..5eff996c4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "base/RTCVideoCodecInfo.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/* Interface for converting to/from internal C++ formats. */
+@interface RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(Private)
+
+ - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format;
+- (webrtc::SdpVideoFormat)nativeSdpVideoFormat;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm
new file mode 100644
index 0000000000..2eb8d366d2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoCodecInfo+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(Private)
+
+ - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format {
+ NSMutableDictionary *params = [NSMutableDictionary dictionary];
+ for (auto it = format.parameters.begin(); it != format.parameters.end(); ++it) {
+ [params setObject:[NSString stringForStdString:it->second]
+ forKey:[NSString stringForStdString:it->first]];
+ }
+ return [self initWithName:[NSString stringForStdString:format.name] parameters:params];
+}
+
+- (webrtc::SdpVideoFormat)nativeSdpVideoFormat {
+ std::map<std::string, std::string> parameters;
+ for (NSString *paramKey in self.parameters.allKeys) {
+ std::string key = [NSString stdStringForString:paramKey];
+ std::string value = [NSString stdStringForString:self.parameters[paramKey]];
+ parameters[key] = value;
+ }
+
+ return webrtc::SdpVideoFormat([NSString stdStringForString:self.name], parameters);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h
new file mode 100644
index 0000000000..8323b18dc1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "base/RTCVideoEncoderSettings.h"
+
+#include "modules/video_coding/include/video_codec_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/* Interfaces for converting to/from internal C++ formats. */
+@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings)
+(Private)
+
+ - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *__nullable)videoCodec;
+- (webrtc::VideoCodec)nativeVideoCodec;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm
new file mode 100644
index 0000000000..dec3a61090
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoEncoderSettings+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings)
+(Private)
+
+ - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *)videoCodec {
+ if (self = [super init]) {
+ if (videoCodec) {
+ const char *codecName = CodecTypeToPayloadString(videoCodec->codecType);
+ self.name = [NSString stringWithUTF8String:codecName];
+
+ self.width = videoCodec->width;
+ self.height = videoCodec->height;
+ self.startBitrate = videoCodec->startBitrate;
+ self.maxBitrate = videoCodec->maxBitrate;
+ self.minBitrate = videoCodec->minBitrate;
+ self.maxFramerate = videoCodec->maxFramerate;
+ self.qpMax = videoCodec->qpMax;
+ self.mode = (RTCVideoCodecMode)videoCodec->mode;
+ }
+ }
+
+ return self;
+}
+
+- (webrtc::VideoCodec)nativeVideoCodec {
+ webrtc::VideoCodec videoCodec;
+ videoCodec.width = self.width;
+ videoCodec.height = self.height;
+ videoCodec.startBitrate = self.startBitrate;
+ videoCodec.maxBitrate = self.maxBitrate;
+ videoCodec.minBitrate = self.minBitrate;
+ videoCodec.maxBitrate = self.maxBitrate;
+ videoCodec.qpMax = self.qpMax;
+ videoCodec.mode = (webrtc::VideoCodecMode)self.mode;
+
+ return videoCodec;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource+Private.h
new file mode 100644
index 0000000000..8e475dd21e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource+Private.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoSource.h"
+
+#import "RTCMediaSource+Private.h"
+
+#include "api/media_stream_interface.h"
+#include "rtc_base/thread.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCVideoSource)
+()
+
+ /**
+ * The VideoTrackSourceInterface object passed to this RTCVideoSource during
+ * construction.
+ */
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeVideoSource;
+
+/** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeVideoSource:
+ (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource
+ NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type NS_UNAVAILABLE;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ signalingThread:(rtc::Thread *)signalingThread
+ workerThread:(rtc::Thread *)workerThread;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ signalingThread:(rtc::Thread *)signalingThread
+ workerThread:(rtc::Thread *)workerThread
+ isScreenCast:(BOOL)isScreenCast;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.h
new file mode 100644
index 0000000000..cdef8b89a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMediaSource.h"
+#import "RTCVideoCapturer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+
+@interface RTC_OBJC_TYPE (RTCVideoSource) : RTC_OBJC_TYPE(RTCMediaSource) <RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Calling this function will cause frames to be scaled down to the
+ * requested resolution. Also, frames will be cropped to match the
+ * requested aspect ratio, and frames will be dropped to match the
+ * requested fps. The requested aspect ratio is orientation agnostic and
+ * will be adjusted to maintain the input orientation, so it doesn't
+ * matter if e.g. 1280x720 or 720x1280 is requested.
+ */
+- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.mm
new file mode 100644
index 0000000000..486ca93771
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.mm
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoSource+Private.h"
+
+#include "pc/video_track_source_proxy.h"
+#include "rtc_base/checks.h"
+#include "sdk/objc/native/src/objc_video_track_source.h"
+
+static webrtc::ObjCVideoTrackSource *getObjCVideoSource(
+ const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
+ webrtc::VideoTrackSourceProxy *proxy_source =
+ static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
+ return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
+}
+
+// TODO(magjed): Refactor this class and target ObjCVideoTrackSource only once
+// RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more
+// info.
+@implementation RTC_OBJC_TYPE (RTCVideoSource) {
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeVideoSource:
+ (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
+ RTC_DCHECK(factory);
+ RTC_DCHECK(nativeVideoSource);
+ if (self = [super initWithFactory:factory
+ nativeMediaSource:nativeVideoSource
+ type:RTCMediaSourceTypeVideo]) {
+ _nativeVideoSource = nativeVideoSource;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ signalingThread:(rtc::Thread *)signalingThread
+ workerThread:(rtc::Thread *)workerThread {
+ return [self initWithFactory:factory
+ signalingThread:signalingThread
+ workerThread:workerThread
+ isScreenCast:NO];
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ signalingThread:(rtc::Thread *)signalingThread
+ workerThread:(rtc::Thread *)workerThread
+ isScreenCast:(BOOL)isScreenCast {
+ rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource =
+ rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(isScreenCast);
+
+ return [self initWithFactory:factory
+ nativeVideoSource:webrtc::VideoTrackSourceProxy::Create(
+ signalingThread, workerThread, objCVideoTrackSource)];
+}
+
+- (NSString *)description {
+ NSString *stateString = [[self class] stringForState:self.state];
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCVideoSource)( %p ): %@", self, stateString];
+}
+
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+ didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
+}
+
+- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps {
+ getObjCVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps);
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
+ return _nativeVideoSource;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h
new file mode 100644
index 0000000000..f1a8d7e4ed
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoTrack.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCVideoTrack)
+()
+
+ /** VideoTrackInterface created or passed in at construction. */
+ @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
+
+/** Initialize an RTCVideoTrack with its source and an id. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ source:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+ trackId:(NSString *)trackId;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.h
new file mode 100644
index 0000000000..5382b7169f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCVideoSource);
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack)
+
+/** The video source for this video track. */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Register a renderer that will render all frames received on this track. */
+- (void)addRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer;
+
+/** Deregister a renderer. */
+- (void)removeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.mm
new file mode 100644
index 0000000000..fb015c6207
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.mm
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoTrack+Private.h"
+
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoSource+Private.h"
+#import "api/RTCVideoRendererAdapter+Private.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoTrack) {
+ rtc::Thread *_workerThread;
+ NSMutableArray *_adapters /* accessed on _workerThread */;
+}
+
+@synthesize source = _source;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ source:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+ trackId:(NSString *)trackId {
+ NSParameterAssert(factory);
+ NSParameterAssert(source);
+ NSParameterAssert(trackId.length);
+ std::string nativeId = [NSString stdStringForString:trackId];
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
+ factory.nativeFactory->CreateVideoTrack(nativeId, source.nativeVideoSource.get());
+ if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
+ _source = source;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeMediaTrack
+ type:(RTCMediaStreamTrackType)type {
+ NSParameterAssert(factory);
+ NSParameterAssert(nativeMediaTrack);
+ NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
+ if (self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]) {
+ _adapters = [NSMutableArray array];
+ _workerThread = factory.workerThread;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ for (RTCVideoRendererAdapter *adapter in _adapters) {
+ self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer);
+ }
+}
+
+- (RTC_OBJC_TYPE(RTCVideoSource) *)source {
+ if (!_source) {
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source(
+ self.nativeVideoTrack->GetSource());
+ if (source) {
+ _source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory
+ nativeVideoSource:source];
+ }
+ }
+ return _source;
+}
+
+- (void)addRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer {
+ if (!_workerThread->IsCurrent()) {
+ _workerThread->Invoke<void>(RTC_FROM_HERE, [renderer, self] { [self addRenderer:renderer]; });
+ return;
+ }
+
+ // Make sure we don't have this renderer yet.
+ for (RTCVideoRendererAdapter *adapter in _adapters) {
+ if (adapter.videoRenderer == renderer) {
+ RTC_LOG(LS_INFO) << "|renderer| is already attached to this track";
+ return;
+ }
+ }
+ // Create a wrapper that provides a native pointer for us.
+ RTCVideoRendererAdapter* adapter =
+ [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer];
+ [_adapters addObject:adapter];
+ self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer,
+ rtc::VideoSinkWants());
+}
+
+- (void)removeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer {
+ if (!_workerThread->IsCurrent()) {
+ _workerThread->Invoke<void>(RTC_FROM_HERE,
+ [renderer, self] { [self removeRenderer:renderer]; });
+ return;
+ }
+ __block NSUInteger indexToRemove = NSNotFound;
+ [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter,
+ NSUInteger idx,
+ BOOL *stop) {
+ if (adapter.videoRenderer == renderer) {
+ indexToRemove = idx;
+ *stop = YES;
+ }
+ }];
+ if (indexToRemove == NSNotFound) {
+ RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added";
+ return;
+ }
+ RTCVideoRendererAdapter *adapterToRemove =
+ [_adapters objectAtIndex:indexToRemove];
+ self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer);
+ [_adapters removeObjectAtIndex:indexToRemove];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
+ return rtc::scoped_refptr<webrtc::VideoTrackInterface>(
+ static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get()));
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.h
new file mode 100644
index 0000000000..8b17a75aef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+RTC_EXTERN NSString* const kRTCVideoCodecVp8Name;
+RTC_EXTERN NSString* const kRTCVideoCodecVp9Name;
+RTC_EXTERN NSString* const kRTCVideoCodecAv1Name;
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm
new file mode 100644
index 0000000000..1ab236a2c2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "RTCVideoCodecConstants.h"
+
+#include "media/base/media_constants.h"
+
+NSString *const kRTCVideoCodecVp8Name = @(cricket::kVp8CodecName);
+NSString *const kRTCVideoCodecVp9Name = @(cricket::kVp9CodecName);
+NSString *const kRTCVideoCodecAv1Name = @(cricket::kAv1CodecName);
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.h
new file mode 100644
index 0000000000..3f6a689564
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderAV1) : NSObject
+
+/* This returns a AV1 decoder that can be returned from a RTCVideoDecoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)av1Decoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.mm
new file mode 100644
index 0000000000..81f5f93eec
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.mm
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderAV1.h"
+#import "RTCWrappedNativeVideoDecoder.h"
+
+#include "modules/video_coding/codecs/av1/dav1d_decoder.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderAV1)
+
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)av1Decoder {
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) alloc]
+ initWithNativeDecoder:std::unique_ptr<webrtc::VideoDecoder>(webrtc::CreateDav1dDecoder())];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h
new file mode 100644
index 0000000000..a118b25ed7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderVP8) : NSObject
+
+/* This returns a VP8 decoder that can be returned from a RTCVideoDecoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp8Decoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm
new file mode 100644
index 0000000000..c150cf6d3a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderVP8.h"
+#import "RTCWrappedNativeVideoDecoder.h"
+
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP8)
+
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp8Decoder {
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) alloc]
+ initWithNativeDecoder:std::unique_ptr<webrtc::VideoDecoder>(webrtc::VP8Decoder::Create())];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h
new file mode 100644
index 0000000000..de7e62012b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderVP9) : NSObject
+
+/* This returns a VP9 decoder that can be returned from a RTCVideoDecoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp9Decoder;
+
++ (bool)isSupported;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm
new file mode 100644
index 0000000000..05446d436d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderVP9.h"
+#import "RTCWrappedNativeVideoDecoder.h"
+
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP9)
+
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp9Decoder {
+ std::unique_ptr<webrtc::VideoDecoder> nativeDecoder(webrtc::VP9Decoder::Create());
+ if (nativeDecoder == nullptr) {
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) alloc]
+ initWithNativeDecoder:std::move(nativeDecoder)];
+}
+
++ (bool)isSupported {
+#if defined(RTC_ENABLE_VP9)
+ return true;
+#else
+ return false;
+#endif
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h
new file mode 100644
index 0000000000..8aa55e4bfa
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderAV1) : NSObject
+
+/* This returns a AV1 encoder that can be returned from a RTCVideoEncoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)av1Encoder;
+
++ (bool)isSupported;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm
new file mode 100644
index 0000000000..d2fe65293b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderAV1.h"
+#import "RTCWrappedNativeVideoEncoder.h"
+#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderAV1)
+
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)av1Encoder {
+ std::unique_ptr<webrtc::VideoEncoder> nativeEncoder(webrtc::CreateLibaomAv1Encoder());
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc]
+ initWithNativeEncoder:std::move(nativeEncoder)];
+}
+
++ (bool)isSupported {
+ return true;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h
new file mode 100644
index 0000000000..e136a5bda8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderVP8) : NSObject
+
+/* This returns a VP8 encoder that can be returned from a RTCVideoEncoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp8Encoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm
new file mode 100644
index 0000000000..d72f705813
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderVP8.h"
+#import "RTCWrappedNativeVideoEncoder.h"
+
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP8)
+
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp8Encoder {
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc]
+ initWithNativeEncoder:std::unique_ptr<webrtc::VideoEncoder>(webrtc::VP8Encoder::Create())];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h
new file mode 100644
index 0000000000..f7dac6117d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderVP9) : NSObject
+
+/* This returns a VP9 encoder that can be returned from a RTCVideoEncoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp9Encoder;
+
++ (bool)isSupported;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm
new file mode 100644
index 0000000000..18a9353f7e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderVP9.h"
+#import "RTCWrappedNativeVideoEncoder.h"
+
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP9)
+
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp9Encoder {
+ std::unique_ptr<webrtc::VideoEncoder> nativeEncoder(webrtc::VP9Encoder::Create());
+ if (nativeEncoder == nullptr) {
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc]
+ initWithNativeEncoder:std::move(nativeEncoder)];
+}
+
++ (bool)isSupported {
+#if defined(RTC_ENABLE_VP9)
+ return true;
+#else
+ return false;
+#endif
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h
new file mode 100644
index 0000000000..3a9b39e959
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoDecoder.h"
+
+#include "api/video_codecs/video_decoder.h"
+#include "media/base/codec.h"
+
+@interface RTC_OBJC_TYPE (RTCWrappedNativeVideoDecoder) : NSObject <RTC_OBJC_TYPE (RTCVideoDecoder)>
+
+- (instancetype)initWithNativeDecoder:(std::unique_ptr<webrtc::VideoDecoder>)decoder;
+
+/* This moves the ownership of the wrapped decoder to the caller. */
+- (std::unique_ptr<webrtc::VideoDecoder>)releaseWrappedDecoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm
new file mode 100644
index 0000000000..261874d20b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCWrappedNativeVideoDecoder.h"
+#import "base/RTCMacros.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCWrappedNativeVideoDecoder) {
+ std::unique_ptr<webrtc::VideoDecoder> _wrappedDecoder;
+}
+
+- (instancetype)initWithNativeDecoder:(std::unique_ptr<webrtc::VideoDecoder>)decoder {
+ if (self = [super init]) {
+ _wrappedDecoder = std::move(decoder);
+ }
+
+ return self;
+}
+
+- (std::unique_ptr<webrtc::VideoDecoder>)releaseWrappedDecoder {
+ return std::move(_wrappedDecoder);
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoDecoder)
+
+- (void)setCallback:(RTCVideoDecoderCallback)callback {
+ RTC_DCHECK_NOTREACHED();
+}
+
+- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSInteger)releaseDecoder {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage
+ missingFrames:(BOOL)missingFrames
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ renderTimeMs:(int64_t)renderTimeMs {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSString *)implementationName {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h
new file mode 100644
index 0000000000..8df9ceec35
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoEncoder.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder.h"
+#include "media/base/codec.h"
+
+@interface RTC_OBJC_TYPE (RTCWrappedNativeVideoEncoder) : NSObject <RTC_OBJC_TYPE (RTCVideoEncoder)>
+
+- (instancetype)initWithNativeEncoder:(std::unique_ptr<webrtc::VideoEncoder>)encoder;
+
+/* This moves the ownership of the wrapped encoder to the caller. */
+- (std::unique_ptr<webrtc::VideoEncoder>)releaseWrappedEncoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
new file mode 100644
index 0000000000..4160572814
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCWrappedNativeVideoEncoder.h"
+#import "base/RTCMacros.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCWrappedNativeVideoEncoder) {
+ std::unique_ptr<webrtc::VideoEncoder> _wrappedEncoder;
+}
+
+- (instancetype)initWithNativeEncoder:(std::unique_ptr<webrtc::VideoEncoder>)encoder {
+ if (self = [super init]) {
+ _wrappedEncoder = std::move(encoder);
+ }
+
+ return self;
+}
+
+- (std::unique_ptr<webrtc::VideoEncoder>)releaseWrappedEncoder {
+ return std::move(_wrappedEncoder);
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoEncoder)
+
+- (void)setCallback:(RTCVideoEncoderCallback)callback {
+ RTC_DCHECK_NOTREACHED();
+}
+
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
+ numberOfCores:(int)numberOfCores {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSInteger)releaseEncoder {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ frameTypes:(NSArray<NSNumber *> *)frameTypes {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSString *)implementationName {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+- (NSInteger)resolutionAlignment {
+ RTC_DCHECK_NOTREACHED();
+ return 1;
+}
+
+- (BOOL)applyAlignmentToAllSimulcastLayers {
+ RTC_DCHECK_NOTREACHED();
+ return NO;
+}
+
+- (BOOL)supportsNativeHandle {
+ RTC_DCHECK_NOTREACHED();
+ return NO;
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h
new file mode 100644
index 0000000000..20dc807991
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNativeI420Buffer.h"
+
+#include "api/video/i420_buffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCI420Buffer)
+() {
+ @protected
+ rtc::scoped_refptr<webrtc::I420BufferInterface> _i420Buffer;
+}
+
+/** Initialize an RTCI420Buffer with its backing I420BufferInterface. */
+- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer;
+- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h
new file mode 100644
index 0000000000..3afe2090a2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCI420Buffer.h"
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** RTCI420Buffer implements the RTCI420Buffer protocol */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCI420Buffer) : NSObject<RTC_OBJC_TYPE(RTCI420Buffer)>
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm
new file mode 100644
index 0000000000..d38d72cfd2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNativeI420Buffer+Private.h"
+
+#include "api/video/i420_buffer.h"
+
+#if !defined(NDEBUG) && defined(WEBRTC_IOS)
+#import <UIKit/UIKit.h>
+#include "libyuv/include/libyuv.h"
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCI420Buffer)
+
+- (instancetype)initWithWidth:(int)width height:(int)height {
+ if (self = [super init]) {
+ _i420Buffer = webrtc::I420Buffer::Create(width, height);
+ }
+
+ return self;
+}
+
+- (instancetype)initWithWidth:(int)width
+ height:(int)height
+ dataY:(const uint8_t *)dataY
+ dataU:(const uint8_t *)dataU
+ dataV:(const uint8_t *)dataV {
+ if (self = [super init]) {
+ _i420Buffer = webrtc::I420Buffer::Copy(
+ width, height, dataY, width, dataU, (width + 1) / 2, dataV, (width + 1) / 2);
+ }
+ return self;
+}
+
+- (instancetype)initWithWidth:(int)width
+ height:(int)height
+ strideY:(int)strideY
+ strideU:(int)strideU
+ strideV:(int)strideV {
+ if (self = [super init]) {
+ _i420Buffer = webrtc::I420Buffer::Create(width, height, strideY, strideU, strideV);
+ }
+
+ return self;
+}
+
+- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
+ if (self = [super init]) {
+ _i420Buffer = i420Buffer;
+ }
+
+ return self;
+}
+
+- (int)width {
+ return _i420Buffer->width();
+}
+
+- (int)height {
+ return _i420Buffer->height();
+}
+
+- (int)strideY {
+ return _i420Buffer->StrideY();
+}
+
+- (int)strideU {
+ return _i420Buffer->StrideU();
+}
+
+- (int)strideV {
+ return _i420Buffer->StrideV();
+}
+
+- (int)chromaWidth {
+ return _i420Buffer->ChromaWidth();
+}
+
+- (int)chromaHeight {
+ return _i420Buffer->ChromaHeight();
+}
+
+- (const uint8_t *)dataY {
+ return _i420Buffer->DataY();
+}
+
+- (const uint8_t *)dataU {
+ return _i420Buffer->DataU();
+}
+
+- (const uint8_t *)dataV {
+ return _i420Buffer->DataV();
+}
+
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420 {
+ return self;
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer {
+ return _i420Buffer;
+}
+
+#pragma mark - Debugging
+
+#if !defined(NDEBUG) && defined(WEBRTC_IOS)
+- (id)debugQuickLookObject {
+ UIGraphicsBeginImageContext(CGSizeMake(_i420Buffer->width(), _i420Buffer->height()));
+ CGContextRef c = UIGraphicsGetCurrentContext();
+ uint8_t *ctxData = (uint8_t *)CGBitmapContextGetData(c);
+
+ libyuv::I420ToARGB(_i420Buffer->DataY(),
+ _i420Buffer->StrideY(),
+ _i420Buffer->DataU(),
+ _i420Buffer->StrideU(),
+ _i420Buffer->DataV(),
+ _i420Buffer->StrideV(),
+ ctxData,
+ CGBitmapContextGetBytesPerRow(c),
+ CGBitmapContextGetWidth(c),
+ CGBitmapContextGetHeight(c));
+
+ UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
+ UIGraphicsEndImageContext();
+
+ return image;
+}
+#endif
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h
new file mode 100644
index 0000000000..053a10a304
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMutableI420Buffer.h"
+#import "RTCNativeI420Buffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Mutable version of RTCI420Buffer */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMutableI420Buffer) : RTC_OBJC_TYPE(RTCI420Buffer)<RTC_OBJC_TYPE(RTCMutableI420Buffer)>
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm
new file mode 100644
index 0000000000..1e669bcb9c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNativeMutableI420Buffer.h"
+
+#import "RTCNativeI420Buffer+Private.h"
+
+#include "api/video/i420_buffer.h"
+
+@implementation RTC_OBJC_TYPE (RTCMutableI420Buffer)
+
+- (uint8_t *)mutableDataY {
+ return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataY();
+}
+
+- (uint8_t *)mutableDataU {
+ return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataU();
+}
+
+- (uint8_t *)mutableDataV {
+ return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataV();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCCodecSpecificInfo.h b/third_party/libwebrtc/sdk/objc/base/RTCCodecSpecificInfo.h
new file mode 100644
index 0000000000..5e7800e524
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCCodecSpecificInfo.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Implement this protocol to pass codec specific info from the encoder.
+ * Corresponds to webrtc::CodecSpecificInfo.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCCodecSpecificInfo)<NSObject> @end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.h b/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.h
new file mode 100644
index 0000000000..28529e5906
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Represents an encoded frame's type. */
+typedef NS_ENUM(NSUInteger, RTCFrameType) {
+ RTCFrameTypeEmptyFrame = 0,
+ RTCFrameTypeAudioFrameSpeech = 1,
+ RTCFrameTypeAudioFrameCN = 2,
+ RTCFrameTypeVideoFrameKey = 3,
+ RTCFrameTypeVideoFrameDelta = 4,
+};
+
+typedef NS_ENUM(NSUInteger, RTCVideoContentType) {
+ RTCVideoContentTypeUnspecified,
+ RTCVideoContentTypeScreenshare,
+};
+
+/** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCEncodedImage) : NSObject
+
+@property(nonatomic, strong) NSData *buffer;
+@property(nonatomic, assign) int32_t encodedWidth;
+@property(nonatomic, assign) int32_t encodedHeight;
+@property(nonatomic, assign) uint32_t timeStamp;
+@property(nonatomic, assign) int64_t captureTimeMs;
+@property(nonatomic, assign) int64_t ntpTimeMs;
+@property(nonatomic, assign) uint8_t flags;
+@property(nonatomic, assign) int64_t encodeStartMs;
+@property(nonatomic, assign) int64_t encodeFinishMs;
+@property(nonatomic, assign) RTCFrameType frameType;
+@property(nonatomic, assign) RTCVideoRotation rotation;
+@property(nonatomic, strong) NSNumber *qp;
+@property(nonatomic, assign) RTCVideoContentType contentType;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m b/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m
new file mode 100644
index 0000000000..ad8441aabd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCEncodedImage.h"
+
+@implementation RTC_OBJC_TYPE (RTCEncodedImage)
+
+@synthesize buffer = _buffer;
+@synthesize encodedWidth = _encodedWidth;
+@synthesize encodedHeight = _encodedHeight;
+@synthesize timeStamp = _timeStamp;
+@synthesize captureTimeMs = _captureTimeMs;
+@synthesize ntpTimeMs = _ntpTimeMs;
+@synthesize flags = _flags;
+@synthesize encodeStartMs = _encodeStartMs;
+@synthesize encodeFinishMs = _encodeFinishMs;
+@synthesize frameType = _frameType;
+@synthesize rotation = _rotation;
+@synthesize qp = _qp;
+@synthesize contentType = _contentType;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCI420Buffer.h b/third_party/libwebrtc/sdk/objc/base/RTCI420Buffer.h
new file mode 100644
index 0000000000..b97f05a5ba
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCI420Buffer.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCYUVPlanarBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Protocol for RTCYUVPlanarBuffers containing I420 data */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCI420Buffer)<RTC_OBJC_TYPE(RTCYUVPlanarBuffer)> @end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCLogging.h b/third_party/libwebrtc/sdk/objc/base/RTCLogging.h
new file mode 100644
index 0000000000..754945c8f2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCLogging.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+// Subset of rtc::LoggingSeverity.
+typedef NS_ENUM(NSInteger, RTCLoggingSeverity) {
+ RTCLoggingSeverityVerbose,
+ RTCLoggingSeverityInfo,
+ RTCLoggingSeverityWarning,
+ RTCLoggingSeverityError,
+ RTCLoggingSeverityNone,
+};
+
+// Wrapper for C++ RTC_LOG(sev) macros.
+// Logs the log string to the webrtc logstream for the given severity.
+RTC_EXTERN void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string);
+
+// Wrapper for rtc::LogMessage::LogToDebug.
+// Sets the minimum severity to be logged to console.
+RTC_EXTERN void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
+
+// Returns the filename with the path prefix removed.
+RTC_EXTERN NSString* RTCFileName(const char* filePath);
+
+// Some convenience macros.
+
+#define RTCLogString(format, ...) \
+ [NSString stringWithFormat:@"(%@:%d %s): " format, RTCFileName(__FILE__), \
+ __LINE__, __FUNCTION__, ##__VA_ARGS__]
+
+#define RTCLogFormat(severity, format, ...) \
+ do { \
+ NSString* log_string = RTCLogString(format, ##__VA_ARGS__); \
+ RTCLogEx(severity, log_string); \
+ } while (false)
+
+#define RTCLogVerbose(format, ...) \
+ RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__)
+
+#define RTCLogInfo(format, ...) \
+ RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__)
+
+#define RTCLogWarning(format, ...) \
+ RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__)
+
+#define RTCLogError(format, ...) \
+ RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__)
+
+#if !defined(NDEBUG)
+#define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__)
+#else
+#define RTCLogDebug(format, ...) \
+ do { \
+ } while (false)
+#endif
+
+#define RTCLog(format, ...) RTCLogInfo(format, ##__VA_ARGS__)
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCLogging.mm b/third_party/libwebrtc/sdk/objc/base/RTCLogging.mm
new file mode 100644
index 0000000000..e8dae02efb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCLogging.mm
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCLogging.h"
+
+#include "rtc_base/logging.h"
+
+rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) {
+ switch (severity) {
+ case RTCLoggingSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case RTCLoggingSeverityInfo:
+ return rtc::LS_INFO;
+ case RTCLoggingSeverityWarning:
+ return rtc::LS_WARNING;
+ case RTCLoggingSeverityError:
+ return rtc::LS_ERROR;
+ case RTCLoggingSeverityNone:
+ return rtc::LS_NONE;
+ }
+}
+
+void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) {
+ if (log_string.length) {
+ const char* utf8_string = log_string.UTF8String;
+ RTC_LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string;
+ }
+}
+
+void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) {
+ rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity));
+}
+
+NSString* RTCFileName(const char* file_path) {
+ NSString* ns_file_path =
+ [[NSString alloc] initWithBytesNoCopy:const_cast<char*>(file_path)
+ length:strlen(file_path)
+ encoding:NSUTF8StringEncoding
+ freeWhenDone:NO];
+ return ns_file_path.lastPathComponent;
+}
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCMacros.h b/third_party/libwebrtc/sdk/objc/base/RTCMacros.h
new file mode 100644
index 0000000000..469e3c93bd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCMacros.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_BASE_RTCMACROS_H_
+#define SDK_OBJC_BASE_RTCMACROS_H_
+
+#ifdef WEBRTC_ENABLE_OBJC_SYMBOL_EXPORT
+
+#if defined(WEBRTC_LIBRARY_IMPL)
+#define RTC_OBJC_EXPORT __attribute__((visibility("default")))
+#endif
+
+#endif // WEBRTC_ENABLE_OBJC_SYMBOL_EXPORT
+
+#ifndef RTC_OBJC_EXPORT
+#define RTC_OBJC_EXPORT
+#endif
+
+// Internal macros used to correctly concatenate symbols.
+#define RTC_SYMBOL_CONCAT_HELPER(a, b) a##b
+#define RTC_SYMBOL_CONCAT(a, b) RTC_SYMBOL_CONCAT_HELPER(a, b)
+
+// RTC_OBJC_TYPE_PREFIX
+//
+// Macro used to prepend a prefix to the API types that are exported with
+// RTC_OBJC_EXPORT.
+//
+// Clients can patch the definition of this macro locally and build
+// WebRTC.framework with their own prefix in case symbol clashing is a
+// problem.
+//
+// This macro must only be defined here and not on via compiler flag to
+// ensure it has a unique value.
+#define RTC_OBJC_TYPE_PREFIX
+
+// RCT_OBJC_TYPE
+//
+// Macro used internally to declare API types. Declaring an API type without
+// using this macro will not include the declared type in the set of types
+// that will be affected by the configurable RTC_OBJC_TYPE_PREFIX.
+#define RTC_OBJC_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name)
+
+#if defined(__cplusplus)
+#define RTC_EXTERN extern "C" RTC_OBJC_EXPORT
+#else
+#define RTC_EXTERN extern RTC_OBJC_EXPORT
+#endif
+
+#ifdef __OBJC__
+#define RTC_FWD_DECL_OBJC_CLASS(classname) @class classname
+#else
+#define RTC_FWD_DECL_OBJC_CLASS(classname) typedef struct objc_object classname
+#endif
+
+#endif // SDK_OBJC_BASE_RTCMACROS_H_
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCMutableI420Buffer.h b/third_party/libwebrtc/sdk/objc/base/RTCMutableI420Buffer.h
new file mode 100644
index 0000000000..cde721980b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCMutableI420Buffer.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCI420Buffer.h"
+#import "RTCMutableYUVPlanarBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Extension of the I420 buffer with mutable data access */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCMutableI420Buffer)<RTC_OBJC_TYPE(RTCI420Buffer), RTC_OBJC_TYPE(RTCMutableYUVPlanarBuffer)> @end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCMutableYUVPlanarBuffer.h b/third_party/libwebrtc/sdk/objc/base/RTCMutableYUVPlanarBuffer.h
new file mode 100644
index 0000000000..bd14e3bca3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCMutableYUVPlanarBuffer.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCYUVPlanarBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Extension of the YUV planar data buffer with mutable data access */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCMutableYUVPlanarBuffer)<RTC_OBJC_TYPE(RTCYUVPlanarBuffer)>
+
+ @property(nonatomic, readonly) uint8_t *mutableDataY;
+@property(nonatomic, readonly) uint8_t *mutableDataU;
+@property(nonatomic, readonly) uint8_t *mutableDataV;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCSSLCertificateVerifier.h b/third_party/libwebrtc/sdk/objc/base/RTCSSLCertificateVerifier.h
new file mode 100644
index 0000000000..53da0cceff
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCSSLCertificateVerifier.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
+(RTCSSLCertificateVerifier)<NSObject>
+
+ /** The certificate to verify */
+ - (BOOL)verify : (NSData *)derCertificate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.h
new file mode 100644
index 0000000000..a1ffdcf38e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoFrame.h"
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCVideoCapturer);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoCapturerDelegate)<NSObject> -
+ (void)capturer : (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame
+ : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoCapturer) : NSObject
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)> delegate;
+
+- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m b/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m
new file mode 100644
index 0000000000..ca31a731f0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoCapturer.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoCapturer)
+
+@synthesize delegate = _delegate;
+
+- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
+ if (self = [super init]) {
+ _delegate = delegate;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.h
new file mode 100644
index 0000000000..fa28958f25
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Holds information to identify a codec. Corresponds to webrtc::SdpVideoFormat. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) : NSObject <NSCoding>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+- (instancetype)initWithName:(NSString *)name;
+
+- (instancetype)initWithName:(NSString *)name
+ parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters
+ NS_DESIGNATED_INITIALIZER;
+
+- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+
+@property(nonatomic, readonly) NSString *name;
+@property(nonatomic, readonly) NSDictionary<NSString *, NSString *> *parameters;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m b/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m
new file mode 100644
index 0000000000..ce26ae1de3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoCodecInfo.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
+
+@synthesize name = _name;
+@synthesize parameters = _parameters;
+
+- (instancetype)initWithName:(NSString *)name {
+ return [self initWithName:name parameters:nil];
+}
+
+- (instancetype)initWithName:(NSString *)name
+ parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters {
+ if (self = [super init]) {
+ _name = name;
+ _parameters = (parameters ? parameters : @{});
+ }
+
+ return self;
+}
+
+- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ if (!info ||
+ ![self.name isEqualToString:info.name] ||
+ ![self.parameters isEqualToDictionary:info.parameters]) {
+ return NO;
+ }
+ return YES;
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object)
+ return YES;
+ if (![object isKindOfClass:[self class]])
+ return NO;
+ return [self isEqualToCodecInfo:object];
+}
+
+- (NSUInteger)hash {
+ return [self.name hash] ^ [self.parameters hash];
+}
+
+#pragma mark - NSCoding
+
+- (instancetype)initWithCoder:(NSCoder *)decoder {
+ return [self initWithName:[decoder decodeObjectForKey:@"name"]
+ parameters:[decoder decodeObjectForKey:@"parameters"]];
+}
+
+- (void)encodeWithCoder:(NSCoder *)encoder {
+ [encoder encodeObject:_name forKey:@"name"];
+ [encoder encodeObject:_parameters forKey:@"parameters"];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoder.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoder.h
new file mode 100644
index 0000000000..ccddd42d42
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoder.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCodecSpecificInfo.h"
+#import "RTCEncodedImage.h"
+#import "RTCMacros.h"
+#import "RTCVideoEncoderSettings.h"
+#import "RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Callback block for decoder. */
+typedef void (^RTCVideoDecoderCallback)(RTC_OBJC_TYPE(RTCVideoFrame) * frame);
+
+/** Protocol for decoder implementations. */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoder)<NSObject>
+
+ - (void)setCallback : (RTCVideoDecoderCallback)callback;
+- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores;
+- (NSInteger)releaseDecoder;
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage
+ missingFrames:(BOOL)missingFrames
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ renderTimeMs:(int64_t)renderTimeMs;
+- (NSString *)implementationName;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoderFactory.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoderFactory.h
new file mode 100644
index 0000000000..8d90138521
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoderFactory.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoCodecInfo.h"
+#import "RTCVideoDecoder.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory)<NSObject>
+
+ - (nullable id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder
+ : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)
+ supportedCodecs; // TODO(andersc): "supportedFormats" instead?
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoder.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoder.h
new file mode 100644
index 0000000000..2445d432d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoder.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCodecSpecificInfo.h"
+#import "RTCEncodedImage.h"
+#import "RTCMacros.h"
+#import "RTCVideoEncoderQpThresholds.h"
+#import "RTCVideoEncoderSettings.h"
+#import "RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Callback block for encoder. */
+typedef BOOL (^RTCVideoEncoderCallback)(RTC_OBJC_TYPE(RTCEncodedImage) * frame,
+ id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> info);
+
+/** Protocol for encoder implementations. */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoder)<NSObject>
+
+- (void)setCallback:(nullable RTCVideoEncoderCallback)callback;
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
+ numberOfCores:(int)numberOfCores;
+- (NSInteger)releaseEncoder;
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ frameTypes:(NSArray<NSNumber *> *)frameTypes;
+- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate;
+- (NSString *)implementationName;
+
+/** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to
+ * keep the QP from the encoded images within the given range. Returning nil from this function
+ * disables quality scaling. */
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings;
+
+/** Resolutions should be aligned to this value. */
+@property(nonatomic, readonly) NSInteger resolutionAlignment;
+
+/** If enabled, resolution alignment is applied to all simulcast layers simultaneously so that when
+ scaled, all resolutions comply with 'resolutionAlignment'. */
+@property(nonatomic, readonly) BOOL applyAlignmentToAllSimulcastLayers;
+
+/** If YES, the receiver is expected to resample/scale the source texture to the expected output
+ size. */
+@property(nonatomic, readonly) BOOL supportsNativeHandle;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderFactory.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderFactory.h
new file mode 100644
index 0000000000..a73cd77990
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderFactory.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoCodecInfo.h"
+#import "RTCVideoEncoder.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** RTCVideoEncoderFactory is an Objective-C version of
+ webrtc::VideoEncoderFactory::VideoEncoderSelector.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderSelector)<NSObject>
+
+ - (void)registerCurrentEncoderInfo : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate:(NSInteger)bitrate;
+- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBrokenEncoder;
+
+@optional
+- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForResolutionChangeBySize:(CGSize)size;
+
+@end
+
+/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory)<NSObject>
+
+ - (nullable id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder
+ : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)
+ supportedCodecs; // TODO(andersc): "supportedFormats" instead?
+
+@optional
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)implementations;
+- (nullable id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)>)encoderSelector;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.h
new file mode 100644
index 0000000000..1a6e9e88ab
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) : NSObject
+
+- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high;
+
+@property(nonatomic, readonly) NSInteger low;
+@property(nonatomic, readonly) NSInteger high;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m
new file mode 100644
index 0000000000..fb7012f44f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoEncoderQpThresholds.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds)
+
+@synthesize low = _low;
+@synthesize high = _high;
+
+- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high {
+ if (self = [super init]) {
+ _low = low;
+ _high = high;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.h
new file mode 100644
index 0000000000..ae792eab71
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef NS_ENUM(NSUInteger, RTCVideoCodecMode) {
+ RTCVideoCodecModeRealtimeVideo,
+ RTCVideoCodecModeScreensharing,
+};
+
+/** Settings for encoder. Corresponds to webrtc::VideoCodec. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings) : NSObject
+
+@property(nonatomic, strong) NSString *name;
+
+@property(nonatomic, assign) unsigned short width;
+@property(nonatomic, assign) unsigned short height;
+
+@property(nonatomic, assign) unsigned int startBitrate; // kilobits/sec.
+@property(nonatomic, assign) unsigned int maxBitrate;
+@property(nonatomic, assign) unsigned int minBitrate;
+
+@property(nonatomic, assign) uint32_t maxFramerate;
+
+@property(nonatomic, assign) unsigned int qpMax;
+@property(nonatomic, assign) RTCVideoCodecMode mode;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m
new file mode 100644
index 0000000000..f66cd2cf77
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoEncoderSettings.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings)
+
+@synthesize name = _name;
+@synthesize width = _width;
+@synthesize height = _height;
+@synthesize startBitrate = _startBitrate;
+@synthesize maxBitrate = _maxBitrate;
+@synthesize minBitrate = _minBitrate;
+@synthesize maxFramerate = _maxFramerate;
+@synthesize qpMax = _qpMax;
+@synthesize mode = _mode;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.h
new file mode 100644
index 0000000000..f5638d27cf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef NS_ENUM(NSInteger, RTCVideoRotation) {
+ RTCVideoRotation_0 = 0,
+ RTCVideoRotation_90 = 90,
+ RTCVideoRotation_180 = 180,
+ RTCVideoRotation_270 = 270,
+};
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer);
+
+// RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame.
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoFrame) : NSObject
+
+/** Width without rotation applied. */
+@property(nonatomic, readonly) int width;
+
+/** Height without rotation applied. */
+@property(nonatomic, readonly) int height;
+@property(nonatomic, readonly) RTCVideoRotation rotation;
+
+/** Timestamp in nanoseconds. */
+@property(nonatomic, readonly) int64_t timeStampNs;
+
+/** Timestamp 90 kHz. */
+@property(nonatomic, assign) int32_t timeStamp;
+
+@property(nonatomic, readonly) id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> buffer;
+
+- (instancetype)init NS_UNAVAILABLE;
+- (instancetype) new NS_UNAVAILABLE;
+
+/** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp.
+ * Deprecated - initialize with a RTCCVPixelBuffer instead
+ */
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs
+ DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead");
+
+/** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and
+ * scaling. Cropping will be applied first on the pixel buffer, followed by
+ * scaling to the final resolution of scaledWidth x scaledHeight.
+ */
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ scaledWidth:(int)scaledWidth
+ scaledHeight:(int)scaledHeight
+ cropWidth:(int)cropWidth
+ cropHeight:(int)cropHeight
+ cropX:(int)cropX
+ cropY:(int)cropY
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs
+ DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead");
+
+/** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp.
+ */
+- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)frameBuffer
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs;
+
+/** Return a frame that is guaranteed to be I420, i.e. it is possible to access
+ * the YUV data on it.
+ */
+- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm
new file mode 100644
index 0000000000..e162238d73
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoFrame.h"
+
+#import "RTCI420Buffer.h"
+#import "RTCVideoFrameBuffer.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoFrame) {
+ RTCVideoRotation _rotation;
+ int64_t _timeStampNs;
+}
+
+@synthesize buffer = _buffer;
+@synthesize timeStamp;
+
+- (int)width {
+ return _buffer.width;
+}
+
+- (int)height {
+ return _buffer.height;
+}
+
+- (RTCVideoRotation)rotation {
+ return _rotation;
+}
+
+- (int64_t)timeStampNs {
+ return _timeStampNs;
+}
+
+- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame {
+ return [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:[_buffer toI420]
+ rotation:_rotation
+ timeStampNs:_timeStampNs];
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs {
+ // Deprecated.
+ return nil;
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ scaledWidth:(int)scaledWidth
+ scaledHeight:(int)scaledHeight
+ cropWidth:(int)cropWidth
+ cropHeight:(int)cropHeight
+ cropX:(int)cropX
+ cropY:(int)cropY
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs {
+ // Deprecated.
+ return nil;
+}
+
+- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)buffer
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs {
+ if (self = [super init]) {
+ _buffer = buffer;
+ _rotation = rotation;
+ _timeStampNs = timeStampNs;
+ }
+
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoFrameBuffer.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrameBuffer.h
new file mode 100644
index 0000000000..82d057eea0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrameBuffer.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@protocol RTC_OBJC_TYPE
+(RTCI420Buffer);
+
+// RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer.
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer)<NSObject>
+
+ @property(nonatomic, readonly) int width;
+@property(nonatomic, readonly) int height;
+
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoRenderer.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoRenderer.h
new file mode 100644
index 0000000000..0f763295ad
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoRenderer.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#endif
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCVideoFrame);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer)<NSObject>
+
+ /** The size of the frame. */
+ - (void)setSize : (CGSize)size;
+
+/** The frame to be displayed. */
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+
+@end
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoViewDelegate)
+
+ - (void)videoView : (id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize
+ : (CGSize)size;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCYUVPlanarBuffer.h b/third_party/libwebrtc/sdk/objc/base/RTCYUVPlanarBuffer.h
new file mode 100644
index 0000000000..be01b915f5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCYUVPlanarBuffer.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoFrameBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Protocol for RTCVideoFrameBuffers containing YUV planar data. */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCYUVPlanarBuffer)<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>
+
+ @property(nonatomic, readonly) int chromaWidth;
+@property(nonatomic, readonly) int chromaHeight;
+@property(nonatomic, readonly) const uint8_t *dataY;
+@property(nonatomic, readonly) const uint8_t *dataU;
+@property(nonatomic, readonly) const uint8_t *dataV;
+@property(nonatomic, readonly) int strideY;
+@property(nonatomic, readonly) int strideU;
+@property(nonatomic, readonly) int strideV;
+
+- (instancetype)initWithWidth:(int)width
+ height:(int)height
+ dataY:(const uint8_t *)dataY
+ dataU:(const uint8_t *)dataU
+ dataV:(const uint8_t *)dataV;
+- (instancetype)initWithWidth:(int)width height:(int)height;
+- (instancetype)initWithWidth:(int)width
+ height:(int)height
+ strideY:(int)strideY
+ strideU:(int)strideU
+ strideV:(int)strideV;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Configuration.mm
new file mode 100644
index 0000000000..449f31e9dd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Configuration.mm
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSession+Private.h"
+#import "RTCAudioSessionConfiguration.h"
+
+#import "base/RTCLogging.h"
+
+@implementation RTC_OBJC_TYPE (RTCAudioSession)
+(Configuration)
+
+ - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
+ : (NSError **)outError {
+ return [self setConfiguration:configuration
+ active:NO
+ shouldSetActive:NO
+ error:outError];
+}
+
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
+ active:(BOOL)active
+ error:(NSError **)outError {
+ return [self setConfiguration:configuration
+ active:active
+ shouldSetActive:YES
+ error:outError];
+}
+
+#pragma mark - Private
+
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
+ active:(BOOL)active
+ shouldSetActive:(BOOL)shouldSetActive
+ error:(NSError **)outError {
+ NSParameterAssert(configuration);
+ if (outError) {
+ *outError = nil;
+ }
+
+ // Provide an error even if there isn't one so we can log it. We will not
+ // return immediately on error in this function and instead try to set
+ // everything we can.
+ NSError *error = nil;
+
+ if (self.category != configuration.category ||
+ self.categoryOptions != configuration.categoryOptions) {
+ NSError *categoryError = nil;
+ if (![self setCategory:configuration.category
+ withOptions:configuration.categoryOptions
+ error:&categoryError]) {
+ RTCLogError(@"Failed to set category: %@",
+ categoryError.localizedDescription);
+ error = categoryError;
+ } else {
+ RTCLog(@"Set category to: %@", configuration.category);
+ }
+ }
+
+ if (self.mode != configuration.mode) {
+ NSError *modeError = nil;
+ if (![self setMode:configuration.mode error:&modeError]) {
+ RTCLogError(@"Failed to set mode: %@",
+ modeError.localizedDescription);
+ error = modeError;
+ } else {
+ RTCLog(@"Set mode to: %@", configuration.mode);
+ }
+ }
+
+ // Sometimes category options don't stick after setting mode.
+ if (self.categoryOptions != configuration.categoryOptions) {
+ NSError *categoryError = nil;
+ if (![self setCategory:configuration.category
+ withOptions:configuration.categoryOptions
+ error:&categoryError]) {
+ RTCLogError(@"Failed to set category options: %@",
+ categoryError.localizedDescription);
+ error = categoryError;
+ } else {
+ RTCLog(@"Set category options to: %ld",
+ (long)configuration.categoryOptions);
+ }
+ }
+
+ if (self.preferredSampleRate != configuration.sampleRate) {
+ NSError *sampleRateError = nil;
+ if (![self setPreferredSampleRate:configuration.sampleRate
+ error:&sampleRateError]) {
+ RTCLogError(@"Failed to set preferred sample rate: %@",
+ sampleRateError.localizedDescription);
+ if (!self.ignoresPreferredAttributeConfigurationErrors) {
+ error = sampleRateError;
+ }
+ } else {
+ RTCLog(@"Set preferred sample rate to: %.2f",
+ configuration.sampleRate);
+ }
+ }
+
+ if (self.preferredIOBufferDuration != configuration.ioBufferDuration) {
+ NSError *bufferDurationError = nil;
+ if (![self setPreferredIOBufferDuration:configuration.ioBufferDuration
+ error:&bufferDurationError]) {
+ RTCLogError(@"Failed to set preferred IO buffer duration: %@",
+ bufferDurationError.localizedDescription);
+ if (!self.ignoresPreferredAttributeConfigurationErrors) {
+ error = bufferDurationError;
+ }
+ } else {
+ RTCLog(@"Set preferred IO buffer duration to: %f",
+ configuration.ioBufferDuration);
+ }
+ }
+
+ if (shouldSetActive) {
+ NSError *activeError = nil;
+ if (![self setActive:active error:&activeError]) {
+ RTCLogError(@"Failed to setActive to %d: %@",
+ active, activeError.localizedDescription);
+ error = activeError;
+ }
+ }
+
+ if (self.isActive &&
+ // TODO(tkchin): Figure out which category/mode numChannels is valid for.
+ [self.mode isEqualToString:AVAudioSessionModeVoiceChat]) {
+ // Try to set the preferred number of hardware audio channels. These calls
+ // must be done after setting the audio session’s category and mode and
+ // activating the session.
+ NSInteger inputNumberOfChannels = configuration.inputNumberOfChannels;
+ if (self.inputNumberOfChannels != inputNumberOfChannels) {
+ NSError *inputChannelsError = nil;
+ if (![self setPreferredInputNumberOfChannels:inputNumberOfChannels
+ error:&inputChannelsError]) {
+ RTCLogError(@"Failed to set preferred input number of channels: %@",
+ inputChannelsError.localizedDescription);
+ if (!self.ignoresPreferredAttributeConfigurationErrors) {
+ error = inputChannelsError;
+ }
+ } else {
+ RTCLog(@"Set input number of channels to: %ld",
+ (long)inputNumberOfChannels);
+ }
+ }
+ NSInteger outputNumberOfChannels = configuration.outputNumberOfChannels;
+ if (self.outputNumberOfChannels != outputNumberOfChannels) {
+ NSError *outputChannelsError = nil;
+ if (![self setPreferredOutputNumberOfChannels:outputNumberOfChannels
+ error:&outputChannelsError]) {
+ RTCLogError(@"Failed to set preferred output number of channels: %@",
+ outputChannelsError.localizedDescription);
+ if (!self.ignoresPreferredAttributeConfigurationErrors) {
+ error = outputChannelsError;
+ }
+ } else {
+ RTCLog(@"Set output number of channels to: %ld",
+ (long)outputNumberOfChannels);
+ }
+ }
+ }
+
+ if (outError) {
+ *outError = error;
+ }
+
+ return error == nil;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Private.h b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Private.h
new file mode 100644
index 0000000000..2be1b9fb3d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Private.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSession.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration);
+
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+()
+
+ /** Number of times setActive:YES has succeeded without a balanced call to
+ * setActive:NO.
+ */
+ @property(nonatomic, readonly) int activationCount;
+
+/** The number of times `beginWebRTCSession` was called without a balanced call
+ * to `endWebRTCSession`.
+ */
+@property(nonatomic, readonly) int webRTCSessionCount;
+
+/** Convenience BOOL that checks useManualAudio and isAudioEnebled. */
+@property(readonly) BOOL canPlayOrRecord;
+
+/** Tracks whether we have been sent an interruption event that hasn't been matched by either an
+ * interrupted end event or a foreground event.
+ */
+@property(nonatomic, assign) BOOL isInterrupted;
+
+/** Adds the delegate to the list of delegates, and places it at the front of
+ * the list. This delegate will be notified before other delegates of
+ * audio events.
+ */
+- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
+
+/** Signals RTCAudioSession that a WebRTC session is about to begin and
+ * audio configuration is needed. Will configure the audio session for WebRTC
+ * if not already configured and if configuration is not delayed.
+ * Successful calls must be balanced by a call to endWebRTCSession.
+ */
+- (BOOL)beginWebRTCSession:(NSError **)outError;
+
+/** Signals RTCAudioSession that a WebRTC session is about to end and audio
+ * unconfiguration is needed. Will unconfigure the audio session for WebRTC
+ * if this is the last unmatched call and if configuration is not delayed.
+ */
+- (BOOL)endWebRTCSession:(NSError **)outError;
+
+/** Configure the audio session for WebRTC. This call will fail if the session
+ * is already configured. On other failures, we will attempt to restore the
+ * previously used audio session configuration.
+ * `lockForConfiguration` must be called first.
+ * Successful calls to configureWebRTCSession must be matched by calls to
+ * `unconfigureWebRTCSession`.
+ */
+- (BOOL)configureWebRTCSession:(NSError **)outError;
+
+/** Unconfigures the session for WebRTC. This will attempt to restore the
+ * audio session to the settings used before `configureWebRTCSession` was
+ * called.
+ * `lockForConfiguration` must be called first.
+ */
+- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
+
+/** Returns a configuration error with the given description. */
+- (NSError *)configurationErrorWithDescription:(NSString *)description;
+
+/** Notifies the receiver that a playout glitch was detected. */
+- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
+
+/** Notifies the receiver that there was an error when starting an audio unit. */
+- (void)notifyAudioUnitStartFailedWithError:(OSStatus)error;
+
+// Properties and methods for tests.
+- (void)notifyDidBeginInterruption;
+- (void)notifyDidEndInterruptionWithShouldResumeSession:(BOOL)shouldResumeSession;
+- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
+- (void)notifyMediaServicesWereLost;
+- (void)notifyMediaServicesWereReset;
+- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
+- (void)notifyDidStartPlayOrRecord;
+- (void)notifyDidStopPlayOrRecord;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.h b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.h
new file mode 100644
index 0000000000..3b83b27ba5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.h
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+extern NSString *const kRTCAudioSessionErrorDomain;
+/** Method that requires lock was called without lock. */
+extern NSInteger const kRTCAudioSessionErrorLockRequired;
+/** Unknown configuration error occurred. */
+extern NSInteger const kRTCAudioSessionErrorConfiguration;
+
+@class RTC_OBJC_TYPE(RTCAudioSession);
+@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration);
+
+// Surfaces AVAudioSession events. WebRTC will listen directly for notifications
+// from AVAudioSession and handle them before calling these delegate methods,
+// at which point applications can perform additional processing if required.
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCAudioSessionDelegate)<NSObject>
+
+ @optional
+/** Called on a system notification thread when AVAudioSession starts an
+ * interruption event.
+ */
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+/** Called on a system notification thread when AVAudioSession ends an
+ * interruption event.
+ */
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ shouldResumeSession:(BOOL)shouldResumeSession;
+
+/** Called on a system notification thread when AVAudioSession changes the
+ * route.
+ */
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ reason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
+
+/** Called on a system notification thread when AVAudioSession media server
+ * terminates.
+ */
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+/** Called on a system notification thread when AVAudioSession media server
+ * restarts.
+ */
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+// TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification.
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
+
+/** Called on a WebRTC thread when the audio device is notified to begin
+ * playback or recording.
+ */
+- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+/** Called on a WebRTC thread when the audio device is notified to stop
+ * playback or recording.
+ */
+- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+/** Called when the AVAudioSession output volume value changes. */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didChangeOutputVolume:(float)outputVolume;
+
+/** Called when the audio device detects a playout glitch. The argument is the
+ * number of glitches detected so far in the current audio playout session.
+ */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
+
+/** Called when the audio session is about to change the active state.
+ */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession willSetActive:(BOOL)active;
+
+/** Called after the audio session sucessfully changed the active state.
+ */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didSetActive:(BOOL)active;
+
+/** Called after the audio session failed to change the active state.
+ */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ failedToSetActive:(BOOL)active
+ error:(NSError *)error;
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ audioUnitStartFailedWithError:(NSError *)error;
+
+@end
+
+/** This is a protocol used to inform RTCAudioSession when the audio session
+ * activation state has changed outside of RTCAudioSession. The current known use
+ * case of this is when CallKit activates the audio session for the application
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCAudioSessionActivationDelegate)<NSObject>
+
+ /** Called when the audio session is activated outside of the app by iOS. */
+ - (void)audioSessionDidActivate : (AVAudioSession *)session;
+
+/** Called when the audio session is deactivated outside of the app by iOS. */
+- (void)audioSessionDidDeactivate:(AVAudioSession *)session;
+
+@end
+
+/** Proxy class for AVAudioSession that adds a locking mechanism similar to
+ * AVCaptureDevice. This is used to that interleaving configurations between
+ * WebRTC and the application layer are avoided.
+ *
+ * RTCAudioSession also coordinates activation so that the audio session is
+ * activated only once. See `setActive:error:`.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCAudioSession) : NSObject <RTC_OBJC_TYPE(RTCAudioSessionActivationDelegate)>
+
+/** Convenience property to access the AVAudioSession singleton. Callers should
+ * not call setters on AVAudioSession directly, but other method invocations
+ * are fine.
+ */
+@property(nonatomic, readonly) AVAudioSession *session;
+
+/** Our best guess at whether the session is active based on results of calls to
+ * AVAudioSession.
+ */
+@property(nonatomic, readonly) BOOL isActive;
+
+/** If YES, WebRTC will not initialize the audio unit automatically when an
+ * audio track is ready for playout or recording. Instead, applications should
+ * call setIsAudioEnabled. If NO, WebRTC will initialize the audio unit
+ * as soon as an audio track is ready for playout or recording.
+ */
+@property(nonatomic, assign) BOOL useManualAudio;
+
+/** This property is only effective if useManualAudio is YES.
+ * Represents permission for WebRTC to initialize the VoIP audio unit.
+ * When set to NO, if the VoIP audio unit used by WebRTC is active, it will be
+ * stopped and uninitialized. This will stop incoming and outgoing audio.
+ * When set to YES, WebRTC will initialize and start the audio unit when it is
+ * needed (e.g. due to establishing an audio connection).
+ * This property was introduced to work around an issue where if an AVPlayer is
+ * playing audio while the VoIP audio unit is initialized, its audio would be
+ * either cut off completely or played at a reduced volume. By preventing
+ * the audio unit from being initialized until after the audio has completed,
+ * we are able to prevent the abrupt cutoff.
+ */
+@property(nonatomic, assign) BOOL isAudioEnabled;
+
+// Proxy properties.
+@property(readonly) NSString *category;
+@property(readonly) AVAudioSessionCategoryOptions categoryOptions;
+@property(readonly) NSString *mode;
+@property(readonly) BOOL secondaryAudioShouldBeSilencedHint;
+@property(readonly) AVAudioSessionRouteDescription *currentRoute;
+@property(readonly) NSInteger maximumInputNumberOfChannels;
+@property(readonly) NSInteger maximumOutputNumberOfChannels;
+@property(readonly) float inputGain;
+@property(readonly) BOOL inputGainSettable;
+@property(readonly) BOOL inputAvailable;
+@property(readonly, nullable) NSArray<AVAudioSessionDataSourceDescription *> *inputDataSources;
+@property(readonly, nullable) AVAudioSessionDataSourceDescription *inputDataSource;
+@property(readonly, nullable) NSArray<AVAudioSessionDataSourceDescription *> *outputDataSources;
+@property(readonly, nullable) AVAudioSessionDataSourceDescription *outputDataSource;
+@property(readonly) double sampleRate;
+@property(readonly) double preferredSampleRate;
+@property(readonly) NSInteger inputNumberOfChannels;
+@property(readonly) NSInteger outputNumberOfChannels;
+@property(readonly) float outputVolume;
+@property(readonly) NSTimeInterval inputLatency;
+@property(readonly) NSTimeInterval outputLatency;
+@property(readonly) NSTimeInterval IOBufferDuration;
+@property(readonly) NSTimeInterval preferredIOBufferDuration;
+
+/**
+ When YES, calls to -setConfiguration:error: and -setConfiguration:active:error: ignore errors in
+ configuring the audio session's "preferred" attributes (e.g. preferredInputNumberOfChannels).
+ Typically, configurations to preferred attributes are optimizations, and ignoring this type of
+ configuration error allows code flow to continue along the happy path when these optimization are
+ not available. The default value of this property is NO.
+ */
+@property(nonatomic) BOOL ignoresPreferredAttributeConfigurationErrors;
+
+/** Default constructor. */
++ (instancetype)sharedInstance;
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Adds a delegate, which is held weakly. */
+- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
+/** Removes an added delegate. */
+- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
+
+/** Request exclusive access to the audio session for configuration. This call
+ * will block if the lock is held by another object.
+ */
+- (void)lockForConfiguration;
+/** Relinquishes exclusive access to the audio session. */
+- (void)unlockForConfiguration;
+
+/** If `active`, activates the audio session if it isn't already active.
+ * Successful calls must be balanced with a setActive:NO when activation is no
+ * longer required. If not `active`, deactivates the audio session if one is
+ * active and this is the last balanced call. When deactivating, the
+ * AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation option is passed to
+ * AVAudioSession.
+ */
+- (BOOL)setActive:(BOOL)active error:(NSError **)outError;
+
+// The following methods are proxies for the associated methods on
+// AVAudioSession. `lockForConfiguration` must be called before using them
+// otherwise they will fail with kRTCAudioSessionErrorLockRequired.
+
+- (BOOL)setCategory:(NSString *)category
+ withOptions:(AVAudioSessionCategoryOptions)options
+ error:(NSError **)outError;
+- (BOOL)setMode:(NSString *)mode error:(NSError **)outError;
+- (BOOL)setInputGain:(float)gain error:(NSError **)outError;
+- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError;
+- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration error:(NSError **)outError;
+- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count error:(NSError **)outError;
+- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count error:(NSError **)outError;
+- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride error:(NSError **)outError;
+- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort error:(NSError **)outError;
+- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
+ error:(NSError **)outError;
+- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
+ error:(NSError **)outError;
+@end
+
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+(Configuration)
+
+ /** Applies the configuration to the current session. Attempts to set all
+ * properties even if previous ones fail. Only the last error will be
+ * returned.
+ * `lockForConfiguration` must be called first.
+ */
+ - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
+ : (NSError **)outError;
+
+/** Convenience method that calls both setConfiguration and setActive.
+ * `lockForConfiguration` must be called first.
+ */
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
+ active:(BOOL)active
+ error:(NSError **)outError;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.mm b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.mm
new file mode 100644
index 0000000000..550a426d36
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.mm
@@ -0,0 +1,1000 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSession+Private.h"
+
+#import <UIKit/UIKit.h>
+
+#include <atomic>
+#include <vector>
+
+#include "absl/base/attributes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/synchronization/mutex.h"
+
+#import "RTCAudioSessionConfiguration.h"
+#import "base/RTCLogging.h"
+
+#if !defined(ABSL_HAVE_THREAD_LOCAL)
+#error ABSL_HAVE_THREAD_LOCAL should be defined for MacOS / iOS Targets.
+#endif
+
+NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)";
+NSInteger const kRTCAudioSessionErrorLockRequired = -1;
+NSInteger const kRTCAudioSessionErrorConfiguration = -2;
+NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
+
+namespace {
+// Since webrtc::Mutex is not a reentrant lock and cannot check if the mutex is locked,
+// we need a separate variable to check that the mutex is locked in the RTCAudioSession.
+ABSL_CONST_INIT thread_local bool mutex_locked = false;
+} // namespace
+
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+() @property(nonatomic,
+ readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
+@end
+
+// This class needs to be thread-safe because it is accessed from many threads.
+// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
+// lock contention so coarse locks should be fine for now.
+@implementation RTC_OBJC_TYPE (RTCAudioSession) {
+ webrtc::Mutex _mutex;
+ AVAudioSession *_session;
+ std::atomic<int> _activationCount;
+ std::atomic<int> _webRTCSessionCount;
+ BOOL _isActive;
+ BOOL _useManualAudio;
+ BOOL _isAudioEnabled;
+ BOOL _canPlayOrRecord;
+ BOOL _isInterrupted;
+}
+
+@synthesize session = _session;
+@synthesize delegates = _delegates;
+@synthesize ignoresPreferredAttributeConfigurationErrors =
+ _ignoresPreferredAttributeConfigurationErrors;
+
++ (instancetype)sharedInstance {
+ static dispatch_once_t onceToken;
+ static RTC_OBJC_TYPE(RTCAudioSession) *sharedInstance = nil;
+ dispatch_once(&onceToken, ^{
+ sharedInstance = [[self alloc] init];
+ });
+ return sharedInstance;
+}
+
+- (instancetype)init {
+ return [self initWithAudioSession:[AVAudioSession sharedInstance]];
+}
+
+/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
+- (instancetype)initWithAudioSession:(id)audioSession {
+ if (self = [super init]) {
+ _session = audioSession;
+
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+ [center addObserver:self
+ selector:@selector(handleInterruptionNotification:)
+ name:AVAudioSessionInterruptionNotification
+ object:nil];
+ [center addObserver:self
+ selector:@selector(handleRouteChangeNotification:)
+ name:AVAudioSessionRouteChangeNotification
+ object:nil];
+ [center addObserver:self
+ selector:@selector(handleMediaServicesWereLost:)
+ name:AVAudioSessionMediaServicesWereLostNotification
+ object:nil];
+ [center addObserver:self
+ selector:@selector(handleMediaServicesWereReset:)
+ name:AVAudioSessionMediaServicesWereResetNotification
+ object:nil];
+ // Posted on the main thread when the primary audio from other applications
+ // starts and stops. Foreground applications may use this notification as a
+ // hint to enable or disable audio that is secondary.
+ [center addObserver:self
+ selector:@selector(handleSilenceSecondaryAudioHintNotification:)
+ name:AVAudioSessionSilenceSecondaryAudioHintNotification
+ object:nil];
+ // Also track foreground event in order to deal with interruption ended situation.
+ [center addObserver:self
+ selector:@selector(handleApplicationDidBecomeActive:)
+ name:UIApplicationDidBecomeActiveNotification
+ object:nil];
+ [_session addObserver:self
+ forKeyPath:kRTCAudioSessionOutputVolumeSelector
+ options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
+ context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
+
+ RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self);
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+ [_session removeObserver:self
+ forKeyPath:kRTCAudioSessionOutputVolumeSelector
+ context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
+ RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self);
+}
+
+- (NSString *)description {
+ NSString *format = @"RTC_OBJC_TYPE(RTCAudioSession): {\n"
+ " category: %@\n"
+ " categoryOptions: %ld\n"
+ " mode: %@\n"
+ " isActive: %d\n"
+ " sampleRate: %.2f\n"
+ " IOBufferDuration: %f\n"
+ " outputNumberOfChannels: %ld\n"
+ " inputNumberOfChannels: %ld\n"
+ " outputLatency: %f\n"
+ " inputLatency: %f\n"
+ " outputVolume: %f\n"
+ "}";
+ NSString *description = [NSString stringWithFormat:format,
+ self.category, (long)self.categoryOptions, self.mode,
+ self.isActive, self.sampleRate, self.IOBufferDuration,
+ self.outputNumberOfChannels, self.inputNumberOfChannels,
+ self.outputLatency, self.inputLatency, self.outputVolume];
+ return description;
+}
+
+- (void)setIsActive:(BOOL)isActive {
+ @synchronized(self) {
+ _isActive = isActive;
+ }
+}
+
+- (BOOL)isActive {
+ @synchronized(self) {
+ return _isActive;
+ }
+}
+
+- (void)setUseManualAudio:(BOOL)useManualAudio {
+ @synchronized(self) {
+ if (_useManualAudio == useManualAudio) {
+ return;
+ }
+ _useManualAudio = useManualAudio;
+ }
+ [self updateCanPlayOrRecord];
+}
+
+- (BOOL)useManualAudio {
+ @synchronized(self) {
+ return _useManualAudio;
+ }
+}
+
+- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
+ @synchronized(self) {
+ if (_isAudioEnabled == isAudioEnabled) {
+ return;
+ }
+ _isAudioEnabled = isAudioEnabled;
+ }
+ [self updateCanPlayOrRecord];
+}
+
+- (BOOL)isAudioEnabled {
+ @synchronized(self) {
+ return _isAudioEnabled;
+ }
+}
+
+- (void)setIgnoresPreferredAttributeConfigurationErrors:
+ (BOOL)ignoresPreferredAttributeConfigurationErrors {
+ @synchronized(self) {
+ if (_ignoresPreferredAttributeConfigurationErrors ==
+ ignoresPreferredAttributeConfigurationErrors) {
+ return;
+ }
+ _ignoresPreferredAttributeConfigurationErrors = ignoresPreferredAttributeConfigurationErrors;
+ }
+}
+
+- (BOOL)ignoresPreferredAttributeConfigurationErrors {
+ @synchronized(self) {
+ return _ignoresPreferredAttributeConfigurationErrors;
+ }
+}
+
+// TODO(tkchin): Check for duplicates.
+- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
+ RTCLog(@"Adding delegate: (%p)", delegate);
+ if (!delegate) {
+ return;
+ }
+ @synchronized(self) {
+ _delegates.push_back(delegate);
+ [self removeZeroedDelegates];
+ }
+}
+
+- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
+ RTCLog(@"Removing delegate: (%p)", delegate);
+ if (!delegate) {
+ return;
+ }
+ @synchronized(self) {
+ _delegates.erase(std::remove(_delegates.begin(),
+ _delegates.end(),
+ delegate),
+ _delegates.end());
+ [self removeZeroedDelegates];
+ }
+}
+
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wthread-safety-analysis"
+
+- (void)lockForConfiguration {
+ RTC_CHECK(!mutex_locked);
+ _mutex.Lock();
+ mutex_locked = true;
+}
+
+- (void)unlockForConfiguration {
+ mutex_locked = false;
+ _mutex.Unlock();
+}
+
+#pragma clang diagnostic pop
+
+#pragma mark - AVAudioSession proxy methods
+
+- (NSString *)category {
+ return self.session.category;
+}
+
+- (AVAudioSessionCategoryOptions)categoryOptions {
+ return self.session.categoryOptions;
+}
+
+- (NSString *)mode {
+ return self.session.mode;
+}
+
+- (BOOL)secondaryAudioShouldBeSilencedHint {
+ return self.session.secondaryAudioShouldBeSilencedHint;
+}
+
+- (AVAudioSessionRouteDescription *)currentRoute {
+ return self.session.currentRoute;
+}
+
+- (NSInteger)maximumInputNumberOfChannels {
+ return self.session.maximumInputNumberOfChannels;
+}
+
+- (NSInteger)maximumOutputNumberOfChannels {
+ return self.session.maximumOutputNumberOfChannels;
+}
+
+- (float)inputGain {
+ return self.session.inputGain;
+}
+
+- (BOOL)inputGainSettable {
+ return self.session.inputGainSettable;
+}
+
+- (BOOL)inputAvailable {
+ return self.session.inputAvailable;
+}
+
+- (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources {
+ return self.session.inputDataSources;
+}
+
+- (AVAudioSessionDataSourceDescription *)inputDataSource {
+ return self.session.inputDataSource;
+}
+
+- (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources {
+ return self.session.outputDataSources;
+}
+
+- (AVAudioSessionDataSourceDescription *)outputDataSource {
+ return self.session.outputDataSource;
+}
+
+- (double)sampleRate {
+ return self.session.sampleRate;
+}
+
+- (double)preferredSampleRate {
+ return self.session.preferredSampleRate;
+}
+
+- (NSInteger)inputNumberOfChannels {
+ return self.session.inputNumberOfChannels;
+}
+
+- (NSInteger)outputNumberOfChannels {
+ return self.session.outputNumberOfChannels;
+}
+
+- (float)outputVolume {
+ return self.session.outputVolume;
+}
+
+- (NSTimeInterval)inputLatency {
+ return self.session.inputLatency;
+}
+
+- (NSTimeInterval)outputLatency {
+ return self.session.outputLatency;
+}
+
+- (NSTimeInterval)IOBufferDuration {
+ return self.session.IOBufferDuration;
+}
+
+- (NSTimeInterval)preferredIOBufferDuration {
+ return self.session.preferredIOBufferDuration;
+}
+
+- (BOOL)setActive:(BOOL)active
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ int activationCount = _activationCount.load();
+ if (!active && activationCount == 0) {
+ RTCLogWarning(@"Attempting to deactivate without prior activation.");
+ }
+ [self notifyWillSetActive:active];
+ BOOL success = YES;
+ BOOL isActive = self.isActive;
+ // Keep a local error so we can log it.
+ NSError *error = nil;
+ BOOL shouldSetActive =
+ (active && !isActive) || (!active && isActive && activationCount == 1);
+ // Attempt to activate if we're not active.
+ // Attempt to deactivate if we're active and it's the last unbalanced call.
+ if (shouldSetActive) {
+ AVAudioSession *session = self.session;
+ // AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure
+ // that other audio sessions that were interrupted by our session can return
+ // to their active state. It is recommended for VoIP apps to use this
+ // option.
+ AVAudioSessionSetActiveOptions options =
+ active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
+ success = [session setActive:active
+ withOptions:options
+ error:&error];
+ if (outError) {
+ *outError = error;
+ }
+ }
+ if (success) {
+ if (active) {
+ if (shouldSetActive) {
+ self.isActive = active;
+ if (self.isInterrupted) {
+ self.isInterrupted = NO;
+ [self notifyDidEndInterruptionWithShouldResumeSession:YES];
+ }
+ }
+ [self incrementActivationCount];
+ [self notifyDidSetActive:active];
+ }
+ } else {
+ RTCLogError(@"Failed to setActive:%d. Error: %@",
+ active, error.localizedDescription);
+ [self notifyFailedToSetActive:active error:error];
+ }
+ // Set isActive and decrement activation count on deactivation
+ // whether or not it succeeded.
+ if (!active) {
+ self.isActive = active;
+ [self notifyDidSetActive:active];
+ [self decrementActivationCount];
+ }
+ RTCLog(@"Number of current activations: %d", _activationCount.load());
+ return success;
+}
+
+- (BOOL)setCategory:(NSString *)category
+ withOptions:(AVAudioSessionCategoryOptions)options
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setCategory:category withOptions:options error:outError];
+}
+
+- (BOOL)setMode:(NSString *)mode error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setMode:mode error:outError];
+}
+
+- (BOOL)setInputGain:(float)gain error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setInputGain:gain error:outError];
+}
+
+- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredSampleRate:sampleRate error:outError];
+}
+
+- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredIOBufferDuration:duration error:outError];
+}
+
+- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredInputNumberOfChannels:count error:outError];
+}
+- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredOutputNumberOfChannels:count error:outError];
+}
+
+- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session overrideOutputAudioPort:portOverride error:outError];
+}
+
+- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredInput:inPort error:outError];
+}
+
+- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setInputDataSource:dataSource error:outError];
+}
+
+- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setOutputDataSource:dataSource error:outError];
+}
+
+#pragma mark - Notifications
+
+- (void)handleInterruptionNotification:(NSNotification *)notification {
+ NSNumber* typeNumber =
+ notification.userInfo[AVAudioSessionInterruptionTypeKey];
+ AVAudioSessionInterruptionType type =
+ (AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue;
+ switch (type) {
+ case AVAudioSessionInterruptionTypeBegan:
+ RTCLog(@"Audio session interruption began.");
+ self.isActive = NO;
+ self.isInterrupted = YES;
+ [self notifyDidBeginInterruption];
+ break;
+ case AVAudioSessionInterruptionTypeEnded: {
+ RTCLog(@"Audio session interruption ended.");
+ self.isInterrupted = NO;
+ [self updateAudioSessionAfterEvent];
+ NSNumber *optionsNumber =
+ notification.userInfo[AVAudioSessionInterruptionOptionKey];
+ AVAudioSessionInterruptionOptions options =
+ optionsNumber.unsignedIntegerValue;
+ BOOL shouldResume =
+ options & AVAudioSessionInterruptionOptionShouldResume;
+ [self notifyDidEndInterruptionWithShouldResumeSession:shouldResume];
+ break;
+ }
+ }
+}
+
+- (void)handleRouteChangeNotification:(NSNotification *)notification {
+ // Get reason for current route change.
+ NSNumber* reasonNumber =
+ notification.userInfo[AVAudioSessionRouteChangeReasonKey];
+ AVAudioSessionRouteChangeReason reason =
+ (AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue;
+ RTCLog(@"Audio route changed:");
+ switch (reason) {
+ case AVAudioSessionRouteChangeReasonUnknown:
+ RTCLog(@"Audio route changed: ReasonUnknown");
+ break;
+ case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
+ RTCLog(@"Audio route changed: NewDeviceAvailable");
+ break;
+ case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
+ RTCLog(@"Audio route changed: OldDeviceUnavailable");
+ break;
+ case AVAudioSessionRouteChangeReasonCategoryChange:
+ RTCLog(@"Audio route changed: CategoryChange to :%@",
+ self.session.category);
+ break;
+ case AVAudioSessionRouteChangeReasonOverride:
+ RTCLog(@"Audio route changed: Override");
+ break;
+ case AVAudioSessionRouteChangeReasonWakeFromSleep:
+ RTCLog(@"Audio route changed: WakeFromSleep");
+ break;
+ case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
+ RTCLog(@"Audio route changed: NoSuitableRouteForCategory");
+ break;
+ case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
+ RTCLog(@"Audio route changed: RouteConfigurationChange");
+ break;
+ }
+ AVAudioSessionRouteDescription* previousRoute =
+ notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
+ // Log previous route configuration.
+ RTCLog(@"Previous route: %@\nCurrent route:%@",
+ previousRoute, self.session.currentRoute);
+ [self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute];
+}
+
+- (void)handleMediaServicesWereLost:(NSNotification *)notification {
+ RTCLog(@"Media services were lost.");
+ [self updateAudioSessionAfterEvent];
+ [self notifyMediaServicesWereLost];
+}
+
+- (void)handleMediaServicesWereReset:(NSNotification *)notification {
+ RTCLog(@"Media services were reset.");
+ [self updateAudioSessionAfterEvent];
+ [self notifyMediaServicesWereReset];
+}
+
+- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification {
+ // TODO(henrika): just adding logs here for now until we know if we are ever
+ // see this notification and might be affected by it or if further actions
+ // are required.
+ NSNumber *typeNumber =
+ notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey];
+ AVAudioSessionSilenceSecondaryAudioHintType type =
+ (AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue;
+ switch (type) {
+ case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
+ RTCLog(@"Another application's primary audio has started.");
+ break;
+ case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
+ RTCLog(@"Another application's primary audio has stopped.");
+ break;
+ }
+}
+
+- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
+ BOOL isInterrupted = self.isInterrupted;
+ RTCLog(@"Application became active after an interruption. Treating as interruption "
+ "end. isInterrupted changed from %d to 0.",
+ isInterrupted);
+ if (isInterrupted) {
+ self.isInterrupted = NO;
+ [self updateAudioSessionAfterEvent];
+ }
+ // Always treat application becoming active as an interruption end event.
+ [self notifyDidEndInterruptionWithShouldResumeSession:YES];
+}
+
+#pragma mark - Private
+
++ (NSError *)lockError {
+ NSDictionary *userInfo =
+ @{NSLocalizedDescriptionKey : @"Must call lockForConfiguration before calling this method."};
+ NSError *error = [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
+ code:kRTCAudioSessionErrorLockRequired
+ userInfo:userInfo];
+ return error;
+}
+
+- (std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> >)delegates {
+ @synchronized(self) {
+ // Note: this returns a copy.
+ return _delegates;
+ }
+}
+
+// TODO(tkchin): check for duplicates.
+- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
+ @synchronized(self) {
+ _delegates.insert(_delegates.begin(), delegate);
+ }
+}
+
+- (void)removeZeroedDelegates {
+ @synchronized(self) {
+ _delegates.erase(
+ std::remove_if(_delegates.begin(),
+ _delegates.end(),
+ [](id delegate) -> bool { return delegate == nil; }),
+ _delegates.end());
+ }
+}
+
+- (int)activationCount {
+ return _activationCount.load();
+}
+
+- (int)incrementActivationCount {
+ RTCLog(@"Incrementing activation count.");
+ return _activationCount.fetch_add(1) + 1;
+}
+
+- (NSInteger)decrementActivationCount {
+ RTCLog(@"Decrementing activation count.");
+ return _activationCount.fetch_sub(1) - 1;
+}
+
+- (int)webRTCSessionCount {
+ return _webRTCSessionCount.load();
+}
+
+- (BOOL)canPlayOrRecord {
+ return !self.useManualAudio || self.isAudioEnabled;
+}
+
+- (BOOL)isInterrupted {
+ @synchronized(self) {
+ return _isInterrupted;
+ }
+}
+
+- (void)setIsInterrupted:(BOOL)isInterrupted {
+ @synchronized(self) {
+ if (_isInterrupted == isInterrupted) {
+ return;
+ }
+ _isInterrupted = isInterrupted;
+ }
+}
+
+- (BOOL)checkLock:(NSError **)outError {
+ if (!mutex_locked) {
+ if (outError) {
+ *outError = [RTC_OBJC_TYPE(RTCAudioSession) lockError];
+ }
+ return NO;
+ }
+ return YES;
+}
+
+- (BOOL)beginWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ _webRTCSessionCount.fetch_add(1);
+ [self notifyDidStartPlayOrRecord];
+ return YES;
+}
+
+- (BOOL)endWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ _webRTCSessionCount.fetch_sub(1);
+ [self notifyDidStopPlayOrRecord];
+ return YES;
+}
+
+- (BOOL)configureWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ RTCLog(@"Configuring audio session for WebRTC.");
+
+ // Configure the AVAudioSession and activate it.
+ // Provide an error even if there isn't one so we can log it.
+ NSError *error = nil;
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
+ if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
+ RTCLogError(@"Failed to set WebRTC audio configuration: %@",
+ error.localizedDescription);
+ // Do not call setActive:NO if setActive:YES failed.
+ if (outError) {
+ *outError = error;
+ }
+ return NO;
+ }
+
+ // Ensure that the device currently supports audio input.
+ // TODO(tkchin): Figure out if this is really necessary.
+ if (!self.inputAvailable) {
+ RTCLogError(@"No audio input path is available!");
+ [self unconfigureWebRTCSession:nil];
+ if (outError) {
+ *outError = [self configurationErrorWithDescription:@"No input path."];
+ }
+ return NO;
+ }
+
+ // It can happen (e.g. in combination with BT devices) that the attempt to set
+ // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new
+ // configuration attempt using the sample rate that worked using the active
+ // audio session. A typical case is that only 8 or 16kHz can be set, e.g. in
+ // combination with BT headsets. Using this "trick" seems to avoid a state
+ // where Core Audio asks for a different number of audio frames than what the
+ // session's I/O buffer duration corresponds to.
+ // TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been
+ // tested on a limited set of iOS devices and BT devices.
+ double sessionSampleRate = self.sampleRate;
+ double preferredSampleRate = webRTCConfig.sampleRate;
+ if (sessionSampleRate != preferredSampleRate) {
+ RTCLogWarning(
+ @"Current sample rate (%.2f) is not the preferred rate (%.2f)",
+ sessionSampleRate, preferredSampleRate);
+ if (![self setPreferredSampleRate:sessionSampleRate
+ error:&error]) {
+ RTCLogError(@"Failed to set preferred sample rate: %@",
+ error.localizedDescription);
+ if (outError) {
+ *outError = error;
+ }
+ }
+ }
+
+ return YES;
+}
+
+- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ RTCLog(@"Unconfiguring audio session for WebRTC.");
+ [self setActive:NO error:outError];
+
+ return YES;
+}
+
+- (NSError *)configurationErrorWithDescription:(NSString *)description {
+ NSDictionary* userInfo = @{
+ NSLocalizedDescriptionKey: description,
+ };
+ return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
+ code:kRTCAudioSessionErrorConfiguration
+ userInfo:userInfo];
+}
+
+- (void)updateAudioSessionAfterEvent {
+ BOOL shouldActivate = self.activationCount > 0;
+ AVAudioSessionSetActiveOptions options = shouldActivate ?
+ 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
+ NSError *error = nil;
+ if ([self.session setActive:shouldActivate
+ withOptions:options
+ error:&error]) {
+ self.isActive = shouldActivate;
+ } else {
+ RTCLogError(@"Failed to set session active to %d. Error:%@",
+ shouldActivate, error.localizedDescription);
+ }
+}
+
+- (void)updateCanPlayOrRecord {
+ BOOL canPlayOrRecord = NO;
+ BOOL shouldNotify = NO;
+ @synchronized(self) {
+ canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
+ if (_canPlayOrRecord == canPlayOrRecord) {
+ return;
+ }
+ _canPlayOrRecord = canPlayOrRecord;
+ shouldNotify = YES;
+ }
+ if (shouldNotify) {
+ [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
+ }
+}
+
+- (void)audioSessionDidActivate:(AVAudioSession *)session {
+ if (_session != session) {
+ RTCLogError(@"audioSessionDidActivate called on different AVAudioSession");
+ }
+ RTCLog(@"Audio session was externally activated.");
+ [self incrementActivationCount];
+ self.isActive = YES;
+ // When a CallKit call begins, it's possible that we receive an interruption
+ // begin without a corresponding end. Since we know that we have an activated
+ // audio session at this point, just clear any saved interruption flag since
+ // the app may never be foregrounded during the duration of the call.
+ if (self.isInterrupted) {
+ RTCLog(@"Clearing interrupted state due to external activation.");
+ self.isInterrupted = NO;
+ }
+ // Treat external audio session activation as an end interruption event.
+ [self notifyDidEndInterruptionWithShouldResumeSession:YES];
+}
+
+- (void)audioSessionDidDeactivate:(AVAudioSession *)session {
+ if (_session != session) {
+ RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession");
+ }
+ RTCLog(@"Audio session was externally deactivated.");
+ self.isActive = NO;
+ [self decrementActivationCount];
+}
+
+- (void)observeValueForKeyPath:(NSString *)keyPath
+ ofObject:(id)object
+ change:(NSDictionary *)change
+ context:(void *)context {
+ if (context == (__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class) {
+ if (object == _session) {
+ NSNumber *newVolume = change[NSKeyValueChangeNewKey];
+ RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);
+ [self notifyDidChangeOutputVolume:newVolume.floatValue];
+ }
+ } else {
+ [super observeValueForKeyPath:keyPath
+ ofObject:object
+ change:change
+ context:context];
+ }
+}
+
+- (void)notifyAudioUnitStartFailedWithError:(OSStatus)error {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSession:audioUnitStartFailedWithError:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self
+ audioUnitStartFailedWithError:[NSError errorWithDomain:kRTCAudioSessionErrorDomain
+ code:error
+ userInfo:nil]];
+ }
+ }
+}
+
+- (void)notifyDidBeginInterruption {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidBeginInterruption:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidBeginInterruption:self];
+ }
+ }
+}
+
+- (void)notifyDidEndInterruptionWithShouldResumeSession:
+ (BOOL)shouldResumeSession {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidEndInterruption:self
+ shouldResumeSession:shouldResumeSession];
+ }
+ }
+}
+
+- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidChangeRoute:self
+ reason:reason
+ previousRoute:previousRoute];
+ }
+ }
+}
+
+- (void)notifyMediaServicesWereLost {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionMediaServerTerminated:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionMediaServerTerminated:self];
+ }
+ }
+}
+
+- (void)notifyMediaServicesWereReset {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionMediaServerReset:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionMediaServerReset:self];
+ }
+ }
+}
+
+- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
+ }
+ }
+}
+
+- (void)notifyDidStartPlayOrRecord {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidStartPlayOrRecord:self];
+ }
+ }
+}
+
+- (void)notifyDidStopPlayOrRecord {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidStopPlayOrRecord:self];
+ }
+ }
+}
+
+- (void)notifyDidChangeOutputVolume:(float)volume {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSession:didChangeOutputVolume:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self didChangeOutputVolume:volume];
+ }
+ }
+}
+
+- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSession:didDetectPlayoutGlitch:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self didDetectPlayoutGlitch:totalNumberOfGlitches];
+ }
+ }
+}
+
+- (void)notifyWillSetActive:(BOOL)active {
+ for (id delegate : self.delegates) {
+ SEL sel = @selector(audioSession:willSetActive:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self willSetActive:active];
+ }
+ }
+}
+
+- (void)notifyDidSetActive:(BOOL)active {
+ for (id delegate : self.delegates) {
+ SEL sel = @selector(audioSession:didSetActive:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self didSetActive:active];
+ }
+ }
+}
+
+- (void)notifyFailedToSetActive:(BOOL)active error:(NSError *)error {
+ for (id delegate : self.delegates) {
+ SEL sel = @selector(audioSession:failedToSetActive:error:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self failedToSetActive:active error:error];
+ }
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.h b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.h
new file mode 100644
index 0000000000..4582b80557
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXTERN const int kRTCAudioSessionPreferredNumberOfChannels;
+RTC_EXTERN const double kRTCAudioSessionHighPerformanceSampleRate;
+RTC_EXTERN const double kRTCAudioSessionLowComplexitySampleRate;
+RTC_EXTERN const double kRTCAudioSessionHighPerformanceIOBufferDuration;
+RTC_EXTERN const double kRTCAudioSessionLowComplexityIOBufferDuration;
+
+// Struct to hold configuration values.
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCAudioSessionConfiguration) : NSObject
+
+@property(nonatomic, strong) NSString *category;
+@property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions;
+@property(nonatomic, strong) NSString *mode;
+@property(nonatomic, assign) double sampleRate;
+@property(nonatomic, assign) NSTimeInterval ioBufferDuration;
+@property(nonatomic, assign) NSInteger inputNumberOfChannels;
+@property(nonatomic, assign) NSInteger outputNumberOfChannels;
+
+/** Initializes configuration to defaults. */
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+/** Returns the current configuration of the audio session. */
++ (instancetype)currentConfiguration;
+/** Returns the configuration that WebRTC needs. */
++ (instancetype)webRTCConfiguration;
+/** Provide a way to override the default configuration. */
++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.m
new file mode 100644
index 0000000000..39e9ac13ec
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.m
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSessionConfiguration.h"
+#import "RTCAudioSession.h"
+
+#import "helpers/RTCDispatcher.h"
+#import "helpers/UIDevice+RTCDevice.h"
+
+// Try to use mono to save resources. Also avoids channel format conversion
+// in the I/O audio unit. Initial tests have shown that it is possible to use
+// mono natively for built-in microphones and for BT headsets but not for
+// wired headsets. Wired headsets only support stereo as native channel format
+// but it is a low cost operation to do a format conversion to mono in the
+// audio unit. Hence, we will not hit a RTC_CHECK in
+// VerifyAudioParametersForActiveAudioSession() for a mismatch between the
+// preferred number of channels and the actual number of channels.
+const int kRTCAudioSessionPreferredNumberOfChannels = 1;
+
+// Preferred hardware sample rate (unit is in Hertz). The client sample rate
+// will be set to this value as well to avoid resampling the the audio unit's
+// format converter. Note that, some devices, e.g. BT headsets, only supports
+// 8000Hz as native sample rate.
+const double kRTCAudioSessionHighPerformanceSampleRate = 48000.0;
+
+// A lower sample rate will be used for devices with only one core
+// (e.g. iPhone 4). The goal is to reduce the CPU load of the application.
+const double kRTCAudioSessionLowComplexitySampleRate = 16000.0;
+
+// Use a hardware I/O buffer size (unit is in seconds) that matches the 10ms
+// size used by WebRTC. The exact actual size will differ between devices.
+// Example: using 48kHz on iPhone 6 results in a native buffer size of
+// ~10.6667ms or 512 audio frames per buffer. The FineAudioBuffer instance will
+// take care of any buffering required to convert between native buffers and
+// buffers used by WebRTC. It is beneficial for the performance if the native
+// size is as an even multiple of 10ms as possible since it results in "clean"
+// callback sequence without bursts of callbacks back to back.
+const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02;
+
+// Use a larger buffer size on devices with only one core (e.g. iPhone 4).
+// It will result in a lower CPU consumption at the cost of a larger latency.
+// The size of 60ms is based on instrumentation that shows a significant
+// reduction in CPU load compared with 10ms on low-end devices.
+// TODO(henrika): monitor this size and determine if it should be modified.
+const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
+
+static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
+
+@implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration)
+
+@synthesize category = _category;
+@synthesize categoryOptions = _categoryOptions;
+@synthesize mode = _mode;
+@synthesize sampleRate = _sampleRate;
+@synthesize ioBufferDuration = _ioBufferDuration;
+@synthesize inputNumberOfChannels = _inputNumberOfChannels;
+@synthesize outputNumberOfChannels = _outputNumberOfChannels;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ // Use a category which supports simultaneous recording and playback.
+ // By default, using this category implies that our app’s audio is
+ // nonmixable, hence activating the session will interrupt any other
+ // audio sessions which are also nonmixable.
+ _category = AVAudioSessionCategoryPlayAndRecord;
+ _categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth;
+
+ // Specify mode for two-way voice communication (e.g. VoIP).
+ _mode = AVAudioSessionModeVoiceChat;
+
+ // Set the session's sample rate or the hardware sample rate.
+ // It is essential that we use the same sample rate as stream format
+ // to ensure that the I/O unit does not have to do sample rate conversion.
+ // Set the preferred audio I/O buffer duration, in seconds.
+ NSUInteger processorCount = [NSProcessInfo processInfo].processorCount;
+ // Use best sample rate and buffer duration if the CPU has more than one
+ // core.
+ if (processorCount > 1 && [UIDevice deviceType] != RTCDeviceTypeIPhone4S) {
+ _sampleRate = kRTCAudioSessionHighPerformanceSampleRate;
+ _ioBufferDuration = kRTCAudioSessionHighPerformanceIOBufferDuration;
+ } else {
+ _sampleRate = kRTCAudioSessionLowComplexitySampleRate;
+ _ioBufferDuration = kRTCAudioSessionLowComplexityIOBufferDuration;
+ }
+
+ // We try to use mono in both directions to save resources and format
+ // conversions in the audio unit. Some devices does only support stereo;
+ // e.g. wired headset on iPhone 6.
+ // TODO(henrika): add support for stereo if needed.
+ _inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
+ _outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
+ }
+ return self;
+}
+
++ (void)initialize {
+ gWebRTCConfiguration = [[self alloc] init];
+}
+
++ (instancetype)currentConfiguration {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config =
+ [[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
+ config.category = session.category;
+ config.categoryOptions = session.categoryOptions;
+ config.mode = session.mode;
+ config.sampleRate = session.sampleRate;
+ config.ioBufferDuration = session.IOBufferDuration;
+ config.inputNumberOfChannels = session.inputNumberOfChannels;
+ config.outputNumberOfChannels = session.outputNumberOfChannels;
+ return config;
+}
+
++ (instancetype)webRTCConfiguration {
+ @synchronized(self) {
+ return (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)gWebRTCConfiguration;
+ }
+}
+
++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration {
+ @synchronized(self) {
+ gWebRTCConfiguration = configuration;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h
new file mode 100644
index 0000000000..6a75f01479
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSession.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+namespace webrtc {
+class AudioSessionObserver;
+}
+
+/** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate
+ * methods on the AudioSessionObserver.
+ */
+@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** `observer` is a raw pointer and should be kept alive
+ * for this object's lifetime.
+ */
+- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm
new file mode 100644
index 0000000000..daddf314a4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNativeAudioSessionDelegateAdapter.h"
+
+#include "sdk/objc/native/src/audio/audio_session_observer.h"
+
+#import "base/RTCLogging.h"
+
+@implementation RTCNativeAudioSessionDelegateAdapter {
+ webrtc::AudioSessionObserver *_observer;
+}
+
+- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer {
+ RTC_DCHECK(observer);
+ if (self = [super init]) {
+ _observer = observer;
+ }
+ return self;
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
+
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+ _observer->OnInterruptionBegin();
+}
+
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ shouldResumeSession:(BOOL)shouldResumeSession {
+ _observer->OnInterruptionEnd();
+}
+
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ reason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
+ switch (reason) {
+ case AVAudioSessionRouteChangeReasonUnknown:
+ case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
+ case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
+ case AVAudioSessionRouteChangeReasonCategoryChange:
+ // It turns out that we see a category change (at least in iOS 9.2)
+ // when making a switch from a BT device to e.g. Speaker using the
+ // iOS Control Center and that we therefore must check if the sample
+ // rate has changed. And if so is the case, restart the audio unit.
+ case AVAudioSessionRouteChangeReasonOverride:
+ case AVAudioSessionRouteChangeReasonWakeFromSleep:
+ case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
+ _observer->OnValidRouteChange();
+ break;
+ case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
+ // The set of input and output ports has not changed, but their
+ // configuration has, e.g., a port’s selected data source has
+ // changed. Ignore this type of route change since we are focusing
+ // on detecting headset changes.
+ RTCLog(@"Ignoring RouteConfigurationChange");
+ break;
+ }
+}
+
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
+ _observer->OnCanPlayOrRecordChange(canPlayOrRecord);
+}
+
+- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didChangeOutputVolume:(float)outputVolume {
+ _observer->OnChangedOutputVolume();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.h
new file mode 100644
index 0000000000..370bfa70f0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoCapturer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+// Camera capture that implements RTCVideoCapturer. Delivers frames to a
+// RTCVideoCapturerDelegate (usually RTCVideoSource).
+NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.")
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer)
+
+// Capture session that is used for capturing. Valid from initialization to dealloc.
+@property(readonly, nonatomic) AVCaptureSession *captureSession;
+
+// Returns list of available capture devices that support video capture.
++ (NSArray<AVCaptureDevice *> *)captureDevices;
+// Returns list of formats that are supported by this class for this device.
++ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
+
+// Returns the most efficient supported output pixel format for this capturer.
+- (FourCharCode)preferredOutputPixelFormat;
+
+// Starts the capture session asynchronously and notifies callback on completion.
+// The device will capture video in the format given in the `format` parameter. If the pixel format
+// in `format` is supported by the WebRTC pipeline, the same pixel format will be used for the
+// output. Otherwise, the format returned by `preferredOutputPixelFormat` will be used.
+- (void)startCaptureWithDevice:(AVCaptureDevice *)device
+ format:(AVCaptureDeviceFormat *)format
+ fps:(NSInteger)fps
+ completionHandler:(nullable void (^)(NSError *_Nullable))completionHandler;
+// Stops the capture session asynchronously and notifies callback on completion.
+- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler;
+
+// Starts the capture session asynchronously.
+- (void)startCaptureWithDevice:(AVCaptureDevice *)device
+ format:(AVCaptureDeviceFormat *)format
+ fps:(NSInteger)fps;
+// Stops the capture session asynchronously.
+- (void)stopCapture;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m
new file mode 100644
index 0000000000..98d3cf9f45
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m
@@ -0,0 +1,535 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCameraVideoCapturer.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#if TARGET_OS_IPHONE
+#import "helpers/UIDevice+RTCDevice.h"
+#endif
+
+#import "helpers/AVCaptureSession+DevicePosition.h"
+#import "helpers/RTCDispatcher+Private.h"
+#include "rtc_base/system/gcd_helpers.h"
+
+const int64_t kNanosecondsPerSecond = 1000000000;
+
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
+()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic,
+ readonly) dispatch_queue_t frameQueue;
+@property(nonatomic, strong) AVCaptureDevice *currentDevice;
+@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
+@property(nonatomic, assign) BOOL isRunning;
+// Will the session be running once all asynchronous operations have been completed?
+@property(nonatomic, assign) BOOL willBeRunning;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) {
+ AVCaptureVideoDataOutput *_videoDataOutput;
+ AVCaptureSession *_captureSession;
+ FourCharCode _preferredOutputPixelFormat;
+ FourCharCode _outputPixelFormat;
+ RTCVideoRotation _rotation;
+#if TARGET_OS_IPHONE
+ UIDeviceOrientation _orientation;
+ BOOL _generatingOrientationNotifications;
+#endif
+}
+
+@synthesize frameQueue = _frameQueue;
+@synthesize captureSession = _captureSession;
+@synthesize currentDevice = _currentDevice;
+@synthesize hasRetriedOnFatalError = _hasRetriedOnFatalError;
+@synthesize isRunning = _isRunning;
+@synthesize willBeRunning = _willBeRunning;
+
+- (instancetype)init {
+ return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
+}
+
+- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
+ return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
+}
+
+// This initializer is used for testing.
+- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
+ captureSession:(AVCaptureSession *)captureSession {
+ if (self = [super initWithDelegate:delegate]) {
+ // Create the capture session and all relevant inputs and outputs. We need
+ // to do this in init because the application may want the capture session
+ // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
+ // created here are retained until dealloc and never recreated.
+ if (![self setupCaptureSession:captureSession]) {
+ return nil;
+ }
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+#if TARGET_OS_IPHONE
+ _orientation = UIDeviceOrientationPortrait;
+ _rotation = RTCVideoRotation_90;
+ [center addObserver:self
+ selector:@selector(deviceOrientationDidChange:)
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionInterruption:)
+ name:AVCaptureSessionWasInterruptedNotification
+ object:_captureSession];
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionInterruptionEnded:)
+ name:AVCaptureSessionInterruptionEndedNotification
+ object:_captureSession];
+ [center addObserver:self
+ selector:@selector(handleApplicationDidBecomeActive:)
+ name:UIApplicationDidBecomeActiveNotification
+ object:[UIApplication sharedApplication]];
+#endif
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionRuntimeError:)
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:_captureSession];
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionDidStartRunning:)
+ name:AVCaptureSessionDidStartRunningNotification
+ object:_captureSession];
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionDidStopRunning:)
+ name:AVCaptureSessionDidStopRunningNotification
+ object:_captureSession];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ NSAssert(!_willBeRunning,
+ @"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
+ @"call stopCapture?");
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+}
+
++ (NSArray<AVCaptureDevice *> *)captureDevices {
+#if defined(WEBRTC_IOS) && defined(__IPHONE_10_0) && \
+ __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0
+ AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
+ discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+ return session.devices;
+#else
+ return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
+#endif
+}
+
++ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
+ // Support opening the device in any format. We make sure it's converted to a format we
+ // can handle, if needed, in the method `-setupVideoDataOutput`.
+ return device.formats;
+}
+
+- (FourCharCode)preferredOutputPixelFormat {
+ return _preferredOutputPixelFormat;
+}
+
+- (void)startCaptureWithDevice:(AVCaptureDevice *)device
+ format:(AVCaptureDeviceFormat *)format
+ fps:(NSInteger)fps {
+ [self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
+}
+
+- (void)stopCapture {
+ [self stopCaptureWithCompletionHandler:nil];
+}
+
+- (void)startCaptureWithDevice:(AVCaptureDevice *)device
+ format:(AVCaptureDeviceFormat *)format
+ fps:(NSInteger)fps
+ completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler {
+ _willBeRunning = YES;
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
+
+#if TARGET_OS_IPHONE
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (!self->_generatingOrientationNotifications) {
+ [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
+ self->_generatingOrientationNotifications = YES;
+ }
+ });
+#endif
+
+ self.currentDevice = device;
+
+ NSError *error = nil;
+ if (![self.currentDevice lockForConfiguration:&error]) {
+ RTCLogError(@"Failed to lock device %@. Error: %@",
+ self.currentDevice,
+ error.userInfo);
+ if (completionHandler) {
+ completionHandler(error);
+ }
+ self.willBeRunning = NO;
+ return;
+ }
+ [self reconfigureCaptureSessionInput];
+ [self updateOrientation];
+ [self updateDeviceCaptureFormat:format fps:fps];
+ [self updateVideoDataOutputPixelFormat:format];
+ [self.captureSession startRunning];
+ [self.currentDevice unlockForConfiguration];
+ self.isRunning = YES;
+ if (completionHandler) {
+ completionHandler(nil);
+ }
+ }];
+}
+
+- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
+ _willBeRunning = NO;
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ RTCLogInfo("Stop");
+ self.currentDevice = nil;
+ for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) {
+ [self.captureSession removeInput:oldInput];
+ }
+ [self.captureSession stopRunning];
+
+#if TARGET_OS_IPHONE
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (self->_generatingOrientationNotifications) {
+ [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
+ self->_generatingOrientationNotifications = NO;
+ }
+ });
+#endif
+ self.isRunning = NO;
+ if (completionHandler) {
+ completionHandler();
+ }
+ }];
+}
+
+#pragma mark iOS notifications
+
+#if TARGET_OS_IPHONE
+- (void)deviceOrientationDidChange:(NSNotification *)notification {
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ [self updateOrientation];
+ }];
+}
+#endif
+
+#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection {
+ NSParameterAssert(captureOutput == _videoDataOutput);
+
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
+ !CMSampleBufferDataIsReady(sampleBuffer)) {
+ return;
+ }
+
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (pixelBuffer == nil) {
+ return;
+ }
+
+#if TARGET_OS_IPHONE
+ // Default to portrait orientation on iPhone.
+ BOOL usingFrontCamera = NO;
+ // Check the image's EXIF for the camera the image came from as the image could have been
+ // delayed as we set alwaysDiscardsLateVideoFrames to NO.
+ AVCaptureDevicePosition cameraPosition =
+ [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
+ if (cameraPosition != AVCaptureDevicePositionUnspecified) {
+ usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
+ } else {
+ AVCaptureDeviceInput *deviceInput =
+ (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
+ usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
+ }
+ switch (_orientation) {
+ case UIDeviceOrientationPortrait:
+ _rotation = RTCVideoRotation_90;
+ break;
+ case UIDeviceOrientationPortraitUpsideDown:
+ _rotation = RTCVideoRotation_270;
+ break;
+ case UIDeviceOrientationLandscapeLeft:
+ _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
+ break;
+ case UIDeviceOrientationLandscapeRight:
+ _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
+ break;
+ case UIDeviceOrientationFaceUp:
+ case UIDeviceOrientationFaceDown:
+ case UIDeviceOrientationUnknown:
+ // Ignore.
+ break;
+ }
+#else
+ // No rotation on Mac.
+ _rotation = RTCVideoRotation_0;
+#endif
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
+ int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
+ kNanosecondsPerSecond;
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+ rotation:_rotation
+ timeStampNs:timeStampNs];
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection {
+#if TARGET_OS_IPHONE
+ CFStringRef droppedReason =
+ CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
+#else
+ // DroppedFrameReason unavailable on macOS.
+ CFStringRef droppedReason = nil;
+#endif
+ RTCLogError(@"Dropped sample buffer. Reason: %@", (__bridge NSString *)droppedReason);
+}
+
+#pragma mark - AVCaptureSession notifications
+
+- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
+ NSString *reasonString = nil;
+#if TARGET_OS_IPHONE
+ NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
+ if (reason) {
+ switch (reason.intValue) {
+ case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
+ reasonString = @"VideoDeviceNotAvailableInBackground";
+ break;
+ case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
+ reasonString = @"AudioDeviceInUseByAnotherClient";
+ break;
+ case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
+ reasonString = @"VideoDeviceInUseByAnotherClient";
+ break;
+ case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
+ reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
+ break;
+ }
+ }
+#endif
+ RTCLog(@"Capture session interrupted: %@", reasonString);
+}
+
+- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
+ RTCLog(@"Capture session interruption ended.");
+}
+
+- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
+ NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
+ RTCLogError(@"Capture session runtime error: %@", error);
+
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+#if TARGET_OS_IPHONE
+ if (error.code == AVErrorMediaServicesWereReset) {
+ [self handleNonFatalError];
+ } else {
+ [self handleFatalError];
+ }
+#else
+ [self handleFatalError];
+#endif
+ }];
+}
+
+- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
+ RTCLog(@"Capture session started.");
+
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ // If we successfully restarted after an unknown
+ // error, allow future retries on fatal errors.
+ self.hasRetriedOnFatalError = NO;
+ }];
+}
+
+- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
+ RTCLog(@"Capture session stopped.");
+}
+
+- (void)handleFatalError {
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ if (!self.hasRetriedOnFatalError) {
+ RTCLogWarning(@"Attempting to recover from fatal capture error.");
+ [self handleNonFatalError];
+ self.hasRetriedOnFatalError = YES;
+ } else {
+ RTCLogError(@"Previous fatal error recovery failed.");
+ }
+ }];
+}
+
+- (void)handleNonFatalError {
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ RTCLog(@"Restarting capture session after error.");
+ if (self.isRunning) {
+ [self.captureSession startRunning];
+ }
+ }];
+}
+
+#if TARGET_OS_IPHONE
+
+#pragma mark - UIApplication notifications
+
+- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ if (self.isRunning && !self.captureSession.isRunning) {
+ RTCLog(@"Restarting capture session on active.");
+ [self.captureSession startRunning];
+ }
+ }];
+}
+
+#endif // TARGET_OS_IPHONE
+
+#pragma mark - Private
+
+- (dispatch_queue_t)frameQueue {
+ if (!_frameQueue) {
+ _frameQueue = RTCDispatchQueueCreateWithTarget(
+ "org.webrtc.cameravideocapturer.video",
+ DISPATCH_QUEUE_SERIAL,
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
+ }
+ return _frameQueue;
+}
+
+- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
+ NSAssert(_captureSession == nil, @"Setup capture session called twice.");
+ _captureSession = captureSession;
+#if defined(WEBRTC_IOS)
+ _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
+ _captureSession.usesApplicationAudioSession = NO;
+#endif
+ [self setupVideoDataOutput];
+ // Add the output.
+ if (![_captureSession canAddOutput:_videoDataOutput]) {
+ RTCLogError(@"Video data output unsupported.");
+ return NO;
+ }
+ [_captureSession addOutput:_videoDataOutput];
+
+ return YES;
+}
+
+- (void)setupVideoDataOutput {
+ NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
+ AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+
+ // `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
+ // device with the most efficient output format first. Find the first format that we support.
+ NSSet<NSNumber *> *supportedPixelFormats =
+ [RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats];
+ NSMutableOrderedSet *availablePixelFormats =
+ [NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
+ [availablePixelFormats intersectSet:supportedPixelFormats];
+ NSNumber *pixelFormat = availablePixelFormats.firstObject;
+ NSAssert(pixelFormat, @"Output device has no supported formats.");
+
+ _preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
+ _outputPixelFormat = _preferredOutputPixelFormat;
+ videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
+ videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
+ [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
+ _videoDataOutput = videoDataOutput;
+}
+
+- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
+ FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+ if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) {
+ mediaSubType = _preferredOutputPixelFormat;
+ }
+
+ if (mediaSubType != _outputPixelFormat) {
+ _outputPixelFormat = mediaSubType;
+ }
+
+ // Update videoSettings with dimensions, as some virtual cameras, e.g. Snap Camera, may not work
+ // otherwise.
+ CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ _videoDataOutput.videoSettings = @{
+ (id)kCVPixelBufferWidthKey : @(dimensions.width),
+ (id)kCVPixelBufferHeightKey : @(dimensions.height),
+ (id)kCVPixelBufferPixelFormatTypeKey : @(_outputPixelFormat),
+ };
+}
+
+#pragma mark - Private, called inside capture queue
+
+- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
+ NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
+ @"updateDeviceCaptureFormat must be called on the capture queue.");
+ @try {
+ _currentDevice.activeFormat = format;
+ _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
+ } @catch (NSException *exception) {
+ RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
+ return;
+ }
+}
+
+- (void)reconfigureCaptureSessionInput {
+ NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
+ @"reconfigureCaptureSessionInput must be called on the capture queue.");
+ NSError *error = nil;
+ AVCaptureDeviceInput *input =
+ [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
+ if (!input) {
+ RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
+ return;
+ }
+ [_captureSession beginConfiguration];
+ for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
+ [_captureSession removeInput:oldInput];
+ }
+ if ([_captureSession canAddInput:input]) {
+ [_captureSession addInput:input];
+ } else {
+ RTCLogError(@"Cannot add camera as an input to the session.");
+ }
+ [_captureSession commitConfiguration];
+}
+
+- (void)updateOrientation {
+ NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
+ @"updateOrientation must be called on the capture queue.");
+#if TARGET_OS_IPHONE
+ _orientation = [UIDevice currentDevice].orientation;
+#endif
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.h b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.h
new file mode 100644
index 0000000000..19262c64cf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCVideoCapturer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Error passing block.
+ */
+typedef void (^RTCFileVideoCapturerErrorBlock)(NSError *error);
+
+/**
+ * Captures buffers from bundled video file.
+ *
+ * See @c RTCVideoCapturer for more info on capturers.
+ */
+RTC_OBJC_EXPORT
+
+NS_CLASS_AVAILABLE_IOS(10)
+@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer)
+
+/**
+ * Starts asynchronous capture of frames from video file.
+ *
+ * Capturing is not started if error occurs. Underlying error will be
+ * relayed in the errorBlock if one is provided.
+ * Successfully captured video frames will be passed to the delegate.
+ *
+ * @param nameOfFile The name of the bundled video file to be read.
+ * @errorBlock block to be executed upon error.
+ */
+- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
+ onError:(__nullable RTCFileVideoCapturerErrorBlock)errorBlock;
+
+/**
+ * Immediately stops capture.
+ */
+- (void)stopCapture;
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m
new file mode 100644
index 0000000000..bcf1506259
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m
@@ -0,0 +1,215 @@
+/**
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCFileVideoCapturer.h"
+
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#include "rtc_base/system/gcd_helpers.h"
+
+NSString *const kRTCFileVideoCapturerErrorDomain =
+ @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)";
+
+typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) {
+ RTCFileVideoCapturerErrorCode_CapturerRunning = 2000,
+ RTCFileVideoCapturerErrorCode_FileNotFound
+};
+
+typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
+ RTCFileVideoCapturerStatusNotInitialized,
+ RTCFileVideoCapturerStatusStarted,
+ RTCFileVideoCapturerStatusStopped
+};
+
+@interface RTC_OBJC_TYPE (RTCFileVideoCapturer)
+() @property(nonatomic, assign) CMTime lastPresentationTime;
+@property(nonatomic, strong) NSURL *fileURL;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) {
+ AVAssetReader *_reader;
+ AVAssetReaderTrackOutput *_outTrack;
+ RTCFileVideoCapturerStatus _status;
+ dispatch_queue_t _frameQueue;
+}
+
+@synthesize lastPresentationTime = _lastPresentationTime;
+@synthesize fileURL = _fileURL;
+
+- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
+ onError:(RTCFileVideoCapturerErrorBlock)errorBlock {
+ if (_status == RTCFileVideoCapturerStatusStarted) {
+ NSError *error =
+ [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
+ code:RTCFileVideoCapturerErrorCode_CapturerRunning
+ userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}];
+
+ errorBlock(error);
+ return;
+ } else {
+ _status = RTCFileVideoCapturerStatusStarted;
+ }
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ NSString *pathForFile = [self pathForFileName:nameOfFile];
+ if (!pathForFile) {
+ NSString *errorString =
+ [NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile];
+ NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
+ code:RTCFileVideoCapturerErrorCode_FileNotFound
+ userInfo:@{NSUnderlyingErrorKey : errorString}];
+ errorBlock(error);
+ return;
+ }
+
+ self.lastPresentationTime = CMTimeMake(0, 0);
+
+ self.fileURL = [NSURL fileURLWithPath:pathForFile];
+ [self setupReaderOnError:errorBlock];
+ });
+}
+
+- (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock {
+ AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_fileURL options:nil];
+
+ NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
+ NSError *error = nil;
+
+ _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
+ if (error) {
+ errorBlock(error);
+ return;
+ }
+
+ NSDictionary *options = @{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
+ };
+ _outTrack =
+ [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options];
+ [_reader addOutput:_outTrack];
+
+ [_reader startReading];
+ RTCLog(@"File capturer started reading");
+ [self readNextBuffer];
+}
+- (void)stopCapture {
+ _status = RTCFileVideoCapturerStatusStopped;
+ RTCLog(@"File capturer stopped.");
+}
+
+#pragma mark - Private
+
+- (nullable NSString *)pathForFileName:(NSString *)fileName {
+ NSArray *nameComponents = [fileName componentsSeparatedByString:@"."];
+ if (nameComponents.count != 2) {
+ return nil;
+ }
+
+ NSString *path =
+ [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]];
+ return path;
+}
+
+- (dispatch_queue_t)frameQueue {
+ if (!_frameQueue) {
+ _frameQueue = RTCDispatchQueueCreateWithTarget(
+ "org.webrtc.filecapturer.video",
+ DISPATCH_QUEUE_SERIAL,
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0));
+ }
+ return _frameQueue;
+}
+
+- (void)readNextBuffer {
+ if (_status == RTCFileVideoCapturerStatusStopped) {
+ [_reader cancelReading];
+ _reader = nil;
+ return;
+ }
+
+ if (_reader.status == AVAssetReaderStatusCompleted) {
+ [_reader cancelReading];
+ _reader = nil;
+ [self setupReaderOnError:nil];
+ return;
+ }
+
+ CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer];
+ if (!sampleBuffer) {
+ [self readNextBuffer];
+ return;
+ }
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
+ !CMSampleBufferDataIsReady(sampleBuffer)) {
+ CFRelease(sampleBuffer);
+ [self readNextBuffer];
+ return;
+ }
+
+ [self publishSampleBuffer:sampleBuffer];
+}
+
+- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
+ CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ Float64 presentationDifference =
+ CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
+ _lastPresentationTime = presentationTime;
+ int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
+
+ __block dispatch_source_t timer = [self createStrictTimer];
+ // Strict timer that will fire `presentationDifferenceRound` ns from now and never again.
+ dispatch_source_set_timer(timer,
+ dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
+ DISPATCH_TIME_FOREVER,
+ 0);
+ dispatch_source_set_event_handler(timer, ^{
+ dispatch_source_cancel(timer);
+ timer = nil;
+
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (!pixelBuffer) {
+ CFRelease(sampleBuffer);
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [self readNextBuffer];
+ });
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
+ NSTimeInterval timeStampSeconds = CACurrentMediaTime();
+ int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+ rotation:0
+ timeStampNs:timeStampNs];
+ CFRelease(sampleBuffer);
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [self readNextBuffer];
+ });
+
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
+ });
+ dispatch_activate(timer);
+}
+
+- (dispatch_source_t)createStrictTimer {
+ dispatch_source_t timer = dispatch_source_create(
+ DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]);
+ return timer;
+}
+
+- (void)dealloc {
+ [self stopCapture];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor+Private.h b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor+Private.h
new file mode 100644
index 0000000000..b5c786be18
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNetworkMonitor.h"
+
+#include "sdk/objc/native/src/network_monitor_observer.h"
+
+@interface RTCNetworkMonitor ()
+
+/** `observer` is a raw pointer and should be kept alive
+ * for this object's lifetime.
+ */
+- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer
+ NS_DESIGNATED_INITIALIZER;
+
+/** Stops the receiver from posting updates to `observer`. */
+- (void)stop;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.h b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.h
new file mode 100644
index 0000000000..21d22f5463
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Listens for NWPathMonitor updates and forwards the results to a C++
+ * observer.
+ */
+@interface RTCNetworkMonitor : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.mm b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.mm
new file mode 100644
index 0000000000..7e75b2b4c0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.mm
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNetworkMonitor+Private.h"
+
+#import <Network/Network.h>
+
+#import "base/RTCLogging.h"
+#import "helpers/RTCDispatcher+Private.h"
+
+#include "rtc_base/string_utils.h"
+
+namespace {
+
+rtc::AdapterType AdapterTypeFromInterfaceType(nw_interface_type_t interfaceType) {
+ rtc::AdapterType adapterType = rtc::ADAPTER_TYPE_UNKNOWN;
+ switch (interfaceType) {
+ case nw_interface_type_other:
+ adapterType = rtc::ADAPTER_TYPE_UNKNOWN;
+ break;
+ case nw_interface_type_wifi:
+ adapterType = rtc::ADAPTER_TYPE_WIFI;
+ break;
+ case nw_interface_type_cellular:
+ adapterType = rtc::ADAPTER_TYPE_CELLULAR;
+ break;
+ case nw_interface_type_wired:
+ adapterType = rtc::ADAPTER_TYPE_ETHERNET;
+ break;
+ case nw_interface_type_loopback:
+ adapterType = rtc::ADAPTER_TYPE_LOOPBACK;
+ break;
+ default:
+ adapterType = rtc::ADAPTER_TYPE_UNKNOWN;
+ break;
+ }
+ return adapterType;
+}
+
+} // namespace
+
+@implementation RTCNetworkMonitor {
+ webrtc::NetworkMonitorObserver *_observer;
+ nw_path_monitor_t _pathMonitor;
+ dispatch_queue_t _monitorQueue;
+}
+
+- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer {
+ RTC_DCHECK(observer);
+ if (self = [super init]) {
+ _observer = observer;
+ if (@available(iOS 12, *)) {
+ _pathMonitor = nw_path_monitor_create();
+ if (_pathMonitor == nil) {
+ RTCLog(@"nw_path_monitor_create failed.");
+ return nil;
+ }
+ RTCLog(@"NW path monitor created.");
+ __weak RTCNetworkMonitor *weakSelf = self;
+ nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) {
+ if (weakSelf == nil) {
+ return;
+ }
+ RTCNetworkMonitor *strongSelf = weakSelf;
+ RTCLog(@"NW path monitor: updated.");
+ nw_path_status_t status = nw_path_get_status(path);
+ if (status == nw_path_status_invalid) {
+ RTCLog(@"NW path monitor status: invalid.");
+ } else if (status == nw_path_status_unsatisfied) {
+ RTCLog(@"NW path monitor status: unsatisfied.");
+ } else if (status == nw_path_status_satisfied) {
+ RTCLog(@"NW path monitor status: satisfied.");
+ } else if (status == nw_path_status_satisfiable) {
+ RTCLog(@"NW path monitor status: satisfiable.");
+ }
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp> *map =
+ new std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>();
+ nw_path_enumerate_interfaces(
+ path, (nw_path_enumerate_interfaces_block_t) ^ (nw_interface_t interface) {
+ const char *name = nw_interface_get_name(interface);
+ nw_interface_type_t interfaceType = nw_interface_get_type(interface);
+ RTCLog(@"NW path monitor available interface: %s", name);
+ rtc::AdapterType adapterType = AdapterTypeFromInterfaceType(interfaceType);
+ map->insert(std::pair<std::string, rtc::AdapterType>(name, adapterType));
+ });
+ @synchronized(strongSelf) {
+ webrtc::NetworkMonitorObserver *observer = strongSelf->_observer;
+ if (observer) {
+ observer->OnPathUpdate(std::move(*map));
+ }
+ }
+ delete map;
+ });
+ nw_path_monitor_set_queue(
+ _pathMonitor,
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchQueueForType:RTCDispatcherTypeNetworkMonitor]);
+ nw_path_monitor_start(_pathMonitor);
+ }
+ }
+ return self;
+}
+
+- (void)cancel {
+ if (@available(iOS 12, *)) {
+ nw_path_monitor_cancel(_pathMonitor);
+ }
+}
+- (void)stop {
+ [self cancel];
+ @synchronized(self) {
+ _observer = nil;
+ }
+}
+
+- (void)dealloc {
+ [self cancel];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h
new file mode 100644
index 0000000000..e5987fe22a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMTLRenderer.h"
+
+NS_AVAILABLE(10_11, 9_0)
+@interface RTCMTLI420Renderer : RTCMTLRenderer
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm
new file mode 100644
index 0000000000..f4c76fa313
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm
@@ -0,0 +1,170 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLI420Renderer.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "base/RTCI420Buffer.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+
+#import "RTCMTLRenderer+Private.h"
+
+static NSString *const shaderSource = MTL_STRINGIFY(
+ using namespace metal;
+
+ typedef struct {
+ packed_float2 position;
+ packed_float2 texcoord;
+ } Vertex;
+
+ typedef struct {
+ float4 position[[position]];
+ float2 texcoord;
+ } Varyings;
+
+ vertex Varyings vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
+ unsigned int vid[[vertex_id]]) {
+ Varyings out;
+ constant Vertex &v = verticies[vid];
+ out.position = float4(float2(v.position), 0.0, 1.0);
+ out.texcoord = v.texcoord;
+
+ return out;
+ }
+
+ fragment half4 fragmentColorConversion(
+ Varyings in[[stage_in]],
+ texture2d<float, access::sample> textureY[[texture(0)]],
+ texture2d<float, access::sample> textureU[[texture(1)]],
+ texture2d<float, access::sample> textureV[[texture(2)]]) {
+ constexpr sampler s(address::clamp_to_edge, filter::linear);
+ float y;
+ float u;
+ float v;
+ float r;
+ float g;
+ float b;
+ // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
+ y = textureY.sample(s, in.texcoord).r;
+ u = textureU.sample(s, in.texcoord).r;
+ v = textureV.sample(s, in.texcoord).r;
+ u = u - 0.5;
+ v = v - 0.5;
+ r = y + 1.403 * v;
+ g = y - 0.344 * u - 0.714 * v;
+ b = y + 1.770 * u;
+
+ float4 out = float4(r, g, b, 1.0);
+
+ return half4(out);
+ });
+
+@implementation RTCMTLI420Renderer {
+ // Textures.
+ id<MTLTexture> _yTexture;
+ id<MTLTexture> _uTexture;
+ id<MTLTexture> _vTexture;
+
+ MTLTextureDescriptor *_descriptor;
+ MTLTextureDescriptor *_chromaDescriptor;
+
+ int _width;
+ int _height;
+ int _chromaWidth;
+ int _chromaHeight;
+}
+
+#pragma mark - Virtual
+
+- (NSString *)shaderSource {
+ return shaderSource;
+}
+
+- (void)getWidth:(nonnull int *)width
+ height:(nonnull int *)height
+ cropWidth:(nonnull int *)cropWidth
+ cropHeight:(nonnull int *)cropHeight
+ cropX:(nonnull int *)cropX
+ cropY:(nonnull int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ *width = frame.width;
+ *height = frame.height;
+ *cropWidth = frame.width;
+ *cropHeight = frame.height;
+ *cropX = 0;
+ *cropY = 0;
+}
+
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
+
+ id<MTLDevice> device = [self currentMetalDevice];
+ if (!device) {
+ return NO;
+ }
+
+ id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
+
+ // Luma (y) texture.
+ if (!_descriptor || _width != frame.width || _height != frame.height) {
+ _width = frame.width;
+ _height = frame.height;
+ _descriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
+ width:_width
+ height:_height
+ mipmapped:NO];
+ _descriptor.usage = MTLTextureUsageShaderRead;
+ _yTexture = [device newTextureWithDescriptor:_descriptor];
+ }
+
+ // Chroma (u,v) textures
+ [_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height)
+ mipmapLevel:0
+ withBytes:buffer.dataY
+ bytesPerRow:buffer.strideY];
+
+ if (!_chromaDescriptor || _chromaWidth != frame.width / 2 || _chromaHeight != frame.height / 2) {
+ _chromaWidth = frame.width / 2;
+ _chromaHeight = frame.height / 2;
+ _chromaDescriptor =
+ [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
+ width:_chromaWidth
+ height:_chromaHeight
+ mipmapped:NO];
+ _chromaDescriptor.usage = MTLTextureUsageShaderRead;
+ _uTexture = [device newTextureWithDescriptor:_chromaDescriptor];
+ _vTexture = [device newTextureWithDescriptor:_chromaDescriptor];
+ }
+
+ [_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
+ mipmapLevel:0
+ withBytes:buffer.dataU
+ bytesPerRow:buffer.strideU];
+ [_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
+ mipmapLevel:0
+ withBytes:buffer.dataV
+ bytesPerRow:buffer.strideV];
+
+ return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
+}
+
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
+ [renderEncoder setFragmentTexture:_yTexture atIndex:0];
+ [renderEncoder setFragmentTexture:_uTexture atIndex:1];
+ [renderEncoder setFragmentTexture:_vTexture atIndex:2];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h
new file mode 100644
index 0000000000..f70e2ad5ee
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AppKit/AppKit.h>
+
+#import "RTCVideoRenderer.h"
+
+NS_AVAILABLE_MAC(10.11)
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView <RTC_OBJC_TYPE(RTCVideoRenderer)>
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+
++ (BOOL)isMetalAvailable;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m
new file mode 100644
index 0000000000..625fb1caa7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLNSVideoView.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "base/RTCVideoFrame.h"
+
+#import "RTCMTLI420Renderer.h"
+
+@interface RTC_OBJC_TYPE (RTCMTLNSVideoView)
+()<MTKViewDelegate> @property(nonatomic) id<RTCMTLRenderer> renderer;
+@property(nonatomic, strong) MTKView *metalView;
+@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) {
+ id<RTCMTLRenderer> _renderer;
+}
+
+@synthesize delegate = _delegate;
+@synthesize renderer = _renderer;
+@synthesize metalView = _metalView;
+@synthesize videoFrame = _videoFrame;
+
+- (instancetype)initWithFrame:(CGRect)frameRect {
+ self = [super initWithFrame:frameRect];
+ if (self) {
+ [self configure];
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aCoder {
+ self = [super initWithCoder:aCoder];
+ if (self) {
+ [self configure];
+ }
+ return self;
+}
+
+#pragma mark - Private
+
++ (BOOL)isMetalAvailable {
+ return [MTLCopyAllDevices() count] > 0;
+}
+
+- (void)configure {
+ if ([[self class] isMetalAvailable]) {
+ _metalView = [[MTKView alloc] initWithFrame:self.bounds];
+ [self addSubview:_metalView];
+ _metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit;
+ _metalView.translatesAutoresizingMaskIntoConstraints = NO;
+ _metalView.framebufferOnly = YES;
+ _metalView.delegate = self;
+
+ _renderer = [[RTCMTLI420Renderer alloc] init];
+ if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) {
+ _renderer = nil;
+ };
+ }
+}
+
+- (void)updateConstraints {
+ NSDictionary *views = NSDictionaryOfVariableBindings(_metalView);
+
+ NSArray *constraintsHorizontal =
+ [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|"
+ options:0
+ metrics:nil
+ views:views];
+ [self addConstraints:constraintsHorizontal];
+
+ NSArray *constraintsVertical =
+ [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|"
+ options:0
+ metrics:nil
+ views:views];
+ [self addConstraints:constraintsVertical];
+ [super updateConstraints];
+}
+
+#pragma mark - MTKViewDelegate methods
+- (void)drawInMTKView:(nonnull MTKView *)view {
+ if (self.videoFrame == nil) {
+ return;
+ }
+ if (view == self.metalView) {
+ [_renderer drawFrame:self.videoFrame];
+ }
+}
+
+- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
+
+- (void)setSize:(CGSize)size {
+ _metalView.drawableSize = size;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [self.delegate videoView:self didChangeVideoSize:size];
+ });
+ [_metalView draw];
+}
+
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ if (frame == nil) {
+ return;
+ }
+ self.videoFrame = [frame newI420VideoFrame];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h
new file mode 100644
index 0000000000..866b7ea17e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMTLRenderer.h"
+
+NS_AVAILABLE(10_11, 9_0)
+@interface RTCMTLNV12Renderer : RTCMTLRenderer
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm
new file mode 100644
index 0000000000..7b037c6dbc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLNV12Renderer.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "RTCMTLRenderer+Private.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#include "rtc_base/checks.h"
+
+static NSString *const shaderSource = MTL_STRINGIFY(
+ using namespace metal;
+
+ typedef struct {
+ packed_float2 position;
+ packed_float2 texcoord;
+ } Vertex;
+
+ typedef struct {
+ float4 position[[position]];
+ float2 texcoord;
+ } Varyings;
+
+ vertex Varyings vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
+ unsigned int vid[[vertex_id]]) {
+ Varyings out;
+ constant Vertex &v = verticies[vid];
+ out.position = float4(float2(v.position), 0.0, 1.0);
+ out.texcoord = v.texcoord;
+ return out;
+ }
+
+ // Receiving YCrCb textures.
+ fragment half4 fragmentColorConversion(
+ Varyings in[[stage_in]],
+ texture2d<float, access::sample> textureY[[texture(0)]],
+ texture2d<float, access::sample> textureCbCr[[texture(1)]]) {
+ constexpr sampler s(address::clamp_to_edge, filter::linear);
+ float y;
+ float2 uv;
+ y = textureY.sample(s, in.texcoord).r;
+ uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5);
+
+ // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
+ float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
+
+ return half4(out);
+ });
+
+@implementation RTCMTLNV12Renderer {
+ // Textures.
+ CVMetalTextureCacheRef _textureCache;
+ id<MTLTexture> _yTexture;
+ id<MTLTexture> _CrCbTexture;
+}
+
+- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
+ if ([super addRenderingDestination:view]) {
+ return [self initializeTextureCache];
+ }
+ return NO;
+}
+
+- (BOOL)initializeTextureCache {
+ CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
+ nil, &_textureCache);
+ if (status != kCVReturnSuccess) {
+ RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
+ return NO;
+ }
+
+ return YES;
+}
+
+- (NSString *)shaderSource {
+ return shaderSource;
+}
+
+- (void)getWidth:(nonnull int *)width
+ height:(nonnull int *)height
+ cropWidth:(nonnull int *)cropWidth
+ cropHeight:(nonnull int *)cropHeight
+ cropX:(nonnull int *)cropX
+ cropY:(nonnull int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
+ *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
+ *cropWidth = pixelBuffer.cropWidth;
+ *cropHeight = pixelBuffer.cropHeight;
+ *cropX = pixelBuffer.cropX;
+ *cropY = pixelBuffer.cropY;
+}
+
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
+ CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
+
+ id<MTLTexture> lumaTexture = nil;
+ id<MTLTexture> chromaTexture = nil;
+ CVMetalTextureRef outTexture = nullptr;
+
+ // Luma (y) texture.
+ int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
+ int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
+
+ int indexPlane = 0;
+ CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, lumaWidth,
+ lumaHeight, indexPlane, &outTexture);
+
+ if (result == kCVReturnSuccess) {
+ lumaTexture = CVMetalTextureGetTexture(outTexture);
+ }
+
+ // Same as CFRelease except it can be passed NULL without crashing.
+ CVBufferRelease(outTexture);
+ outTexture = nullptr;
+
+ // Chroma (CrCb) texture.
+ indexPlane = 1;
+ result = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, lumaWidth / 2,
+ lumaHeight / 2, indexPlane, &outTexture);
+ if (result == kCVReturnSuccess) {
+ chromaTexture = CVMetalTextureGetTexture(outTexture);
+ }
+ CVBufferRelease(outTexture);
+
+ if (lumaTexture != nil && chromaTexture != nil) {
+ _yTexture = lumaTexture;
+ _CrCbTexture = chromaTexture;
+ return YES;
+ }
+ return NO;
+}
+
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
+ [renderEncoder setFragmentTexture:_yTexture atIndex:0];
+ [renderEncoder setFragmentTexture:_CrCbTexture atIndex:1];
+}
+
+- (void)dealloc {
+ if (_textureCache) {
+ CFRelease(_textureCache);
+ }
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h
new file mode 100644
index 0000000000..9db422cd22
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMTLRenderer.h"
+
+/** @abstract RGB/BGR renderer.
+ * @discussion This renderer handles both kCVPixelFormatType_32BGRA and
+ * kCVPixelFormatType_32ARGB.
+ */
+NS_AVAILABLE(10_11, 9_0)
+@interface RTCMTLRGBRenderer : RTCMTLRenderer
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm
new file mode 100644
index 0000000000..e5dc4ef80a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLRGBRenderer.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "RTCMTLRenderer+Private.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#include "rtc_base/checks.h"
+
+static NSString *const shaderSource = MTL_STRINGIFY(
+ using namespace metal;
+
+ typedef struct {
+ packed_float2 position;
+ packed_float2 texcoord;
+ } Vertex;
+
+ typedef struct {
+ float4 position[[position]];
+ float2 texcoord;
+ } VertexIO;
+
+ vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
+ uint vid[[vertex_id]]) {
+ VertexIO out;
+ constant Vertex &v = verticies[vid];
+ out.position = float4(float2(v.position), 0.0, 1.0);
+ out.texcoord = v.texcoord;
+ return out;
+ }
+
+ fragment half4 fragmentColorConversion(VertexIO in[[stage_in]],
+ texture2d<half, access::sample> texture[[texture(0)]],
+ constant bool &isARGB[[buffer(0)]]) {
+ constexpr sampler s(address::clamp_to_edge, filter::linear);
+
+ half4 out = texture.sample(s, in.texcoord);
+ if (isARGB) {
+ out = half4(out.g, out.b, out.a, out.r);
+ }
+
+ return out;
+ });
+
+@implementation RTCMTLRGBRenderer {
+ // Textures.
+ CVMetalTextureCacheRef _textureCache;
+ id<MTLTexture> _texture;
+
+ // Uniforms.
+ id<MTLBuffer> _uniformsBuffer;
+}
+
+- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
+ if ([super addRenderingDestination:view]) {
+ return [self initializeTextureCache];
+ }
+ return NO;
+}
+
+- (BOOL)initializeTextureCache {
+ CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
+ nil, &_textureCache);
+ if (status != kCVReturnSuccess) {
+ RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
+ return NO;
+ }
+
+ return YES;
+}
+
+- (NSString *)shaderSource {
+ return shaderSource;
+}
+
+- (void)getWidth:(nonnull int *)width
+ height:(nonnull int *)height
+ cropWidth:(nonnull int *)cropWidth
+ cropHeight:(nonnull int *)cropHeight
+ cropX:(nonnull int *)cropX
+ cropY:(nonnull int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
+ *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
+ *cropWidth = pixelBuffer.cropWidth;
+ *cropHeight = pixelBuffer.cropHeight;
+ *cropX = pixelBuffer.cropX;
+ *cropY = pixelBuffer.cropY;
+}
+
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
+ CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
+
+ id<MTLTexture> gpuTexture = nil;
+ CVMetalTextureRef textureOut = nullptr;
+ bool isARGB;
+
+ int width = CVPixelBufferGetWidth(pixelBuffer);
+ int height = CVPixelBufferGetHeight(pixelBuffer);
+ OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+
+ MTLPixelFormat mtlPixelFormat;
+ if (pixelFormat == kCVPixelFormatType_32BGRA) {
+ mtlPixelFormat = MTLPixelFormatBGRA8Unorm;
+ isARGB = false;
+ } else if (pixelFormat == kCVPixelFormatType_32ARGB) {
+ mtlPixelFormat = MTLPixelFormatRGBA8Unorm;
+ isARGB = true;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ return NO;
+ }
+
+ CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat,
+ width, height, 0, &textureOut);
+ if (result == kCVReturnSuccess) {
+ gpuTexture = CVMetalTextureGetTexture(textureOut);
+ }
+ CVBufferRelease(textureOut);
+
+ if (gpuTexture != nil) {
+ _texture = gpuTexture;
+ _uniformsBuffer =
+ [[self currentMetalDevice] newBufferWithBytes:&isARGB
+ length:sizeof(isARGB)
+ options:MTLResourceCPUCacheModeDefaultCache];
+ return YES;
+ }
+
+ return NO;
+}
+
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
+ [renderEncoder setFragmentTexture:_texture atIndex:0];
+ [renderEncoder setFragmentBuffer:_uniformsBuffer offset:0 atIndex:0];
+}
+
+- (void)dealloc {
+ if (_textureCache) {
+ CFRelease(_textureCache);
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h
new file mode 100644
index 0000000000..916d4d4430
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Metal/Metal.h>
+
+#import "RTCMTLRenderer.h"
+
+#define MTL_STRINGIFY(s) @ #s
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMTLRenderer (Private)
+- (nullable id<MTLDevice>)currentMetalDevice;
+- (NSString *)shaderSource;
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder;
+- (void)getWidth:(nonnull int *)width
+ height:(nonnull int *)height
+ cropWidth:(nonnull int *)cropWidth
+ cropHeight:(nonnull int *)cropHeight
+ cropX:(nonnull int *)cropX
+ cropY:(nonnull int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h
new file mode 100644
index 0000000000..aa31545973
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#else
+#import <AppKit/AppKit.h>
+#endif
+
+#import "base/RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+/**
+ * Protocol defining ability to render RTCVideoFrame in Metal enabled views.
+ */
+@protocol RTCMTLRenderer <NSObject>
+
+/**
+ * Method to be implemented to perform actual rendering of the provided frame.
+ *
+ * @param frame The frame to be rendered.
+ */
+- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+
+/**
+ * Sets the provided view as rendering destination if possible.
+ *
+ * If not possible method returns NO and callers of the method are responisble for performing
+ * cleanups.
+ */
+
+#if TARGET_OS_IOS
+- (BOOL)addRenderingDestination:(__kindof UIView *)view;
+#else
+- (BOOL)addRenderingDestination:(__kindof NSView *)view;
+#endif
+
+@end
+
+/**
+ * Implementation of RTCMTLRenderer protocol.
+ */
+NS_AVAILABLE(10_11, 9_0)
+@interface RTCMTLRenderer : NSObject <RTCMTLRenderer>
+
+/** @abstract A wrapped RTCVideoRotation, or nil.
+ @discussion When not nil, the rotation of the actual frame is ignored when rendering.
+ */
+@property(atomic, nullable) NSValue *rotationOverride;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm
new file mode 100644
index 0000000000..410590a7b1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm
@@ -0,0 +1,328 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLRenderer+Private.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+
+#include "api/video/video_rotation.h"
+#include "rtc_base/checks.h"
+
+// As defined in shaderSource.
+static NSString *const vertexFunctionName = @"vertexPassthrough";
+static NSString *const fragmentFunctionName = @"fragmentColorConversion";
+
+static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
+static NSString *const commandBufferLabel = @"RTCCommandBuffer";
+static NSString *const renderEncoderLabel = @"RTCEncoder";
+static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
+
+// Computes the texture coordinates given rotation and cropping.
+static inline void getCubeVertexData(int cropX,
+ int cropY,
+ int cropWidth,
+ int cropHeight,
+ size_t frameWidth,
+ size_t frameHeight,
+ RTCVideoRotation rotation,
+ float *buffer) {
+ // The computed values are the adjusted texture coordinates, in [0..1].
+ // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
+ // left/top edge.
+ // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
+ // right/bottom edge (i.e. render up to 80% of the width/height).
+ float cropLeft = cropX / (float)frameWidth;
+ float cropRight = (cropX + cropWidth) / (float)frameWidth;
+ float cropTop = cropY / (float)frameHeight;
+ float cropBottom = (cropY + cropHeight) / (float)frameHeight;
+
+ // These arrays map the view coordinates to texture coordinates, taking cropping and rotation
+ // into account. The first two columns are view coordinates, the last two are texture coordinates.
+ switch (rotation) {
+ case RTCVideoRotation_0: {
+ float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
+ 1.0, -1.0, cropRight, cropBottom,
+ -1.0, 1.0, cropLeft, cropTop,
+ 1.0, 1.0, cropRight, cropTop};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_90: {
+ float values[16] = {-1.0, -1.0, cropRight, cropBottom,
+ 1.0, -1.0, cropRight, cropTop,
+ -1.0, 1.0, cropLeft, cropBottom,
+ 1.0, 1.0, cropLeft, cropTop};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_180: {
+ float values[16] = {-1.0, -1.0, cropRight, cropTop,
+ 1.0, -1.0, cropLeft, cropTop,
+ -1.0, 1.0, cropRight, cropBottom,
+ 1.0, 1.0, cropLeft, cropBottom};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_270: {
+ float values[16] = {-1.0, -1.0, cropLeft, cropTop,
+ 1.0, -1.0, cropLeft, cropBottom,
+ -1.0, 1.0, cropRight, cropTop,
+ 1.0, 1.0, cropRight, cropBottom};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ }
+}
+
+// The max number of command buffers in flight (submitted to GPU).
+// For now setting it up to 1.
+// In future we might use triple buffering method if it improves performance.
+static const NSInteger kMaxInflightBuffers = 1;
+
+@implementation RTCMTLRenderer {
+ __kindof MTKView *_view;
+
+ // Controller.
+ dispatch_semaphore_t _inflight_semaphore;
+
+ // Renderer.
+ id<MTLDevice> _device;
+ id<MTLCommandQueue> _commandQueue;
+ id<MTLLibrary> _defaultLibrary;
+ id<MTLRenderPipelineState> _pipelineState;
+
+ // Buffers.
+ id<MTLBuffer> _vertexBuffer;
+
+ // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
+ int _oldFrameWidth;
+ int _oldFrameHeight;
+ int _oldCropWidth;
+ int _oldCropHeight;
+ int _oldCropX;
+ int _oldCropY;
+ RTCVideoRotation _oldRotation;
+}
+
+@synthesize rotationOverride = _rotationOverride;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
+ }
+
+ return self;
+}
+
+- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
+ return [self setupWithView:view];
+}
+
+#pragma mark - Private
+
+- (BOOL)setupWithView:(__kindof MTKView *)view {
+ BOOL success = NO;
+ if ([self setupMetal]) {
+ _view = view;
+ view.device = _device;
+ view.preferredFramesPerSecond = 30;
+ view.autoResizeDrawable = NO;
+
+ [self loadAssets];
+
+ float vertexBufferArray[16] = {0};
+ _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
+ length:sizeof(vertexBufferArray)
+ options:MTLResourceCPUCacheModeWriteCombined];
+ success = YES;
+ }
+ return success;
+}
+#pragma mark - Inheritance
+
+- (id<MTLDevice>)currentMetalDevice {
+ return _device;
+}
+
+- (NSString *)shaderSource {
+ RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
+ return nil;
+}
+
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
+ RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
+}
+
+- (void)getWidth:(int *)width
+ height:(int *)height
+ cropWidth:(int *)cropWidth
+ cropHeight:(int *)cropHeight
+ cropX:(int *)cropX
+ cropY:(int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
+}
+
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ // Apply rotation override if set.
+ RTCVideoRotation rotation;
+ NSValue *rotationOverride = self.rotationOverride;
+ if (rotationOverride) {
+#if defined(__IPHONE_11_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
+ (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 11, *)) {
+ [rotationOverride getValue:&rotation size:sizeof(rotation)];
+ } else
+#endif
+ {
+ [rotationOverride getValue:&rotation];
+ }
+ } else {
+ rotation = frame.rotation;
+ }
+
+ int frameWidth, frameHeight, cropWidth, cropHeight, cropX, cropY;
+ [self getWidth:&frameWidth
+ height:&frameHeight
+ cropWidth:&cropWidth
+ cropHeight:&cropHeight
+ cropX:&cropX
+ cropY:&cropY
+ ofFrame:frame];
+
+ // Recompute the texture cropping and recreate vertexBuffer if necessary.
+ if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth ||
+ cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth ||
+ frameHeight != _oldFrameHeight) {
+ getCubeVertexData(cropX,
+ cropY,
+ cropWidth,
+ cropHeight,
+ frameWidth,
+ frameHeight,
+ rotation,
+ (float *)_vertexBuffer.contents);
+ _oldCropX = cropX;
+ _oldCropY = cropY;
+ _oldCropWidth = cropWidth;
+ _oldCropHeight = cropHeight;
+ _oldRotation = rotation;
+ _oldFrameWidth = frameWidth;
+ _oldFrameHeight = frameHeight;
+ }
+
+ return YES;
+}
+
+#pragma mark - GPU methods
+
+- (BOOL)setupMetal {
+ // Set the view to use the default device.
+ _device = MTLCreateSystemDefaultDevice();
+ if (!_device) {
+ return NO;
+ }
+
+ // Create a new command queue.
+ _commandQueue = [_device newCommandQueue];
+
+ // Load metal library from source.
+ NSError *libraryError = nil;
+ NSString *shaderSource = [self shaderSource];
+
+ id<MTLLibrary> sourceLibrary =
+ [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
+
+ if (libraryError) {
+ RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
+ return NO;
+ }
+
+ if (!sourceLibrary) {
+ RTCLogError(@"Metal: Failed to load library. %@", libraryError);
+ return NO;
+ }
+ _defaultLibrary = sourceLibrary;
+
+ return YES;
+}
+
+- (void)loadAssets {
+ id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
+ id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
+
+ MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
+ pipelineDescriptor.label = pipelineDescriptorLabel;
+ pipelineDescriptor.vertexFunction = vertexFunction;
+ pipelineDescriptor.fragmentFunction = fragmentFunction;
+ pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
+ pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
+ NSError *error = nil;
+ _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
+
+ if (!_pipelineState) {
+ RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
+ }
+}
+
+- (void)render {
+ id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
+ commandBuffer.label = commandBufferLabel;
+
+ __block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
+ [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
+ // GPU work completed.
+ dispatch_semaphore_signal(block_semaphore);
+ }];
+
+ MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
+ if (renderPassDescriptor) { // Valid drawable.
+ id<MTLRenderCommandEncoder> renderEncoder =
+ [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
+ renderEncoder.label = renderEncoderLabel;
+
+ // Set context state.
+ [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
+ [renderEncoder setRenderPipelineState:_pipelineState];
+ [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
+ [self uploadTexturesToRenderEncoder:renderEncoder];
+
+ [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
+ vertexStart:0
+ vertexCount:4
+ instanceCount:1];
+ [renderEncoder popDebugGroup];
+ [renderEncoder endEncoding];
+
+ [commandBuffer presentDrawable:_view.currentDrawable];
+ }
+
+ // CPU work is completed, GPU work can be started.
+ [commandBuffer commit];
+}
+
+#pragma mark - RTCMTLRenderer
+
+- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ @autoreleasepool {
+ // Wait until the inflight (curently sent to GPU) command buffer
+ // has completed the GPU work.
+ dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
+
+ if ([self setupTexturesForFrame:frame]) {
+ [self render];
+ } else {
+ dispatch_semaphore_signal(_inflight_semaphore);
+ }
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
new file mode 100644
index 0000000000..3320d12076
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoFrame.h"
+#import "RTCVideoRenderer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * RTCMTLVideoView is thin wrapper around MTKView.
+ *
+ * It has id<RTCVideoRenderer> property that renders video frames in the view's
+ * bounds using Metal.
+ */
+NS_CLASS_AVAILABLE_IOS(9)
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView<RTC_OBJC_TYPE(RTCVideoRenderer)>
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+
+@property(nonatomic) UIViewContentMode videoContentMode;
+
+/** @abstract Enables/disables rendering.
+ */
+@property(nonatomic, getter=isEnabled) BOOL enabled;
+
+/** @abstract Wrapped RTCVideoRotation, or nil.
+ */
+@property(nonatomic, nullable) NSValue* rotationOverride;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
new file mode 100644
index 0000000000..c5d9e4385f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLVideoView.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#import "RTCMTLI420Renderer.h"
+#import "RTCMTLNV12Renderer.h"
+#import "RTCMTLRGBRenderer.h"
+
+// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime.
+// Linking errors occur when compiling for architectures that don't support Metal.
+#define MTKViewClass NSClassFromString(@"MTKView")
+#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
+#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
+#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
+
+@interface RTC_OBJC_TYPE (RTCMTLVideoView)
+()<MTKViewDelegate> @property(nonatomic) RTCMTLI420Renderer *rendererI420;
+@property(nonatomic) RTCMTLNV12Renderer *rendererNV12;
+@property(nonatomic) RTCMTLRGBRenderer *rendererRGB;
+@property(nonatomic) MTKView *metalView;
+@property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
+@property(nonatomic) CGSize videoFrameSize;
+@property(nonatomic) int64_t lastFrameTimeNs;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCMTLVideoView)
+
+@synthesize delegate = _delegate;
+@synthesize rendererI420 = _rendererI420;
+@synthesize rendererNV12 = _rendererNV12;
+@synthesize rendererRGB = _rendererRGB;
+@synthesize metalView = _metalView;
+@synthesize videoFrame = _videoFrame;
+@synthesize videoFrameSize = _videoFrameSize;
+@synthesize lastFrameTimeNs = _lastFrameTimeNs;
+@synthesize rotationOverride = _rotationOverride;
+
+- (instancetype)initWithFrame:(CGRect)frameRect {
+ self = [super initWithFrame:frameRect];
+ if (self) {
+ [self configure];
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aCoder {
+ self = [super initWithCoder:aCoder];
+ if (self) {
+ [self configure];
+ }
+ return self;
+}
+
+- (BOOL)isEnabled {
+ return !self.metalView.paused;
+}
+
+- (void)setEnabled:(BOOL)enabled {
+ self.metalView.paused = !enabled;
+}
+
+- (UIViewContentMode)videoContentMode {
+ return self.metalView.contentMode;
+}
+
+- (void)setVideoContentMode:(UIViewContentMode)mode {
+ self.metalView.contentMode = mode;
+}
+
+#pragma mark - Private
+
++ (BOOL)isMetalAvailable {
+ return MTLCreateSystemDefaultDevice() != nil;
+}
+
++ (MTKView *)createMetalView:(CGRect)frame {
+ return [[MTKViewClass alloc] initWithFrame:frame];
+}
+
++ (RTCMTLNV12Renderer *)createNV12Renderer {
+ return [[RTCMTLNV12RendererClass alloc] init];
+}
+
++ (RTCMTLI420Renderer *)createI420Renderer {
+ return [[RTCMTLI420RendererClass alloc] init];
+}
+
++ (RTCMTLRGBRenderer *)createRGBRenderer {
+ return [[RTCMTLRGBRenderer alloc] init];
+}
+
+- (void)configure {
+ NSAssert([RTC_OBJC_TYPE(RTCMTLVideoView) isMetalAvailable],
+ @"Metal not availiable on this device");
+
+ self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds];
+ self.metalView.delegate = self;
+ self.metalView.contentMode = UIViewContentModeScaleAspectFill;
+ [self addSubview:self.metalView];
+ self.videoFrameSize = CGSizeZero;
+}
+
+- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
+ [super setMultipleTouchEnabled:multipleTouchEnabled];
+ self.metalView.multipleTouchEnabled = multipleTouchEnabled;
+}
+
+- (void)layoutSubviews {
+ [super layoutSubviews];
+
+ CGRect bounds = self.bounds;
+ self.metalView.frame = bounds;
+ if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) {
+ self.metalView.drawableSize = [self drawableSize];
+ } else {
+ self.metalView.drawableSize = bounds.size;
+ }
+}
+
+#pragma mark - MTKViewDelegate methods
+
+- (void)drawInMTKView:(nonnull MTKView *)view {
+ NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame;
+ // Skip rendering if we've already rendered this frame.
+ if (!videoFrame || videoFrame.width <= 0 || videoFrame.height <= 0 ||
+ videoFrame.timeStampNs == self.lastFrameTimeNs) {
+ return;
+ }
+
+ if (CGRectIsEmpty(view.bounds)) {
+ return;
+ }
+
+ RTCMTLRenderer *renderer;
+ if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer;
+ const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
+ if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
+ if (!self.rendererRGB) {
+ self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer];
+ if (![self.rendererRGB addRenderingDestination:self.metalView]) {
+ self.rendererRGB = nil;
+ RTCLogError(@"Failed to create RGB renderer");
+ return;
+ }
+ }
+ renderer = self.rendererRGB;
+ } else {
+ if (!self.rendererNV12) {
+ self.rendererNV12 = [RTC_OBJC_TYPE(RTCMTLVideoView) createNV12Renderer];
+ if (![self.rendererNV12 addRenderingDestination:self.metalView]) {
+ self.rendererNV12 = nil;
+ RTCLogError(@"Failed to create NV12 renderer");
+ return;
+ }
+ }
+ renderer = self.rendererNV12;
+ }
+ } else {
+ if (!self.rendererI420) {
+ self.rendererI420 = [RTC_OBJC_TYPE(RTCMTLVideoView) createI420Renderer];
+ if (![self.rendererI420 addRenderingDestination:self.metalView]) {
+ self.rendererI420 = nil;
+ RTCLogError(@"Failed to create I420 renderer");
+ return;
+ }
+ }
+ renderer = self.rendererI420;
+ }
+
+ renderer.rotationOverride = self.rotationOverride;
+
+ [renderer drawFrame:videoFrame];
+ self.lastFrameTimeNs = videoFrame.timeStampNs;
+}
+
+- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
+}
+
+#pragma mark -
+
+- (void)setRotationOverride:(NSValue *)rotationOverride {
+ _rotationOverride = rotationOverride;
+
+ self.metalView.drawableSize = [self drawableSize];
+ [self setNeedsLayout];
+}
+
+- (RTCVideoRotation)frameRotation {
+ if (self.rotationOverride) {
+ RTCVideoRotation rotation;
+ if (@available(iOS 11, *)) {
+ [self.rotationOverride getValue:&rotation size:sizeof(rotation)];
+ } else {
+ [self.rotationOverride getValue:&rotation];
+ }
+ return rotation;
+ }
+
+ return self.videoFrame.rotation;
+}
+
+- (CGSize)drawableSize {
+ // Flip width/height if the rotations are not the same.
+ CGSize videoFrameSize = self.videoFrameSize;
+ RTCVideoRotation frameRotation = [self frameRotation];
+
+ BOOL useLandscape =
+ (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
+ BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
+ (self.videoFrame.rotation == RTCVideoRotation_180);
+
+ if (useLandscape == sizeIsLandscape) {
+ return videoFrameSize;
+ } else {
+ return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
+ }
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
+
+- (void)setSize:(CGSize)size {
+ __weak RTC_OBJC_TYPE(RTCMTLVideoView) *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTC_OBJC_TYPE(RTCMTLVideoView) *strongSelf = weakSelf;
+
+ strongSelf.videoFrameSize = size;
+ CGSize drawableSize = [strongSelf drawableSize];
+
+ strongSelf.metalView.drawableSize = drawableSize;
+ [strongSelf setNeedsLayout];
+ [strongSelf.delegate videoView:self didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ if (!self.isEnabled) {
+ return;
+ }
+
+ if (frame == nil) {
+ RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
+ return;
+ }
+ self.videoFrame = frame;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h
new file mode 100644
index 0000000000..71a073ab21
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoViewShading.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView
+ * and RTCEAGLVideoView if no external shader is specified. This shader will render
+ * the video in a rectangle without any color or geometric transformations.
+ */
+@interface RTCDefaultShader : NSObject <RTC_OBJC_TYPE (RTCVideoViewShading)>
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm
new file mode 100644
index 0000000000..51dca3223d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDefaultShader.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+
+#import "RTCOpenGLDefines.h"
+#import "RTCShader.h"
+#import "base/RTCLogging.h"
+
+#include "absl/types/optional.h"
+
+static const int kYTextureUnit = 0;
+static const int kUTextureUnit = 1;
+static const int kVTextureUnit = 2;
+static const int kUvTextureUnit = 1;
+
+// Fragment shader converts YUV values from input textures into a final RGB
+// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
+static const char kI420FragmentShaderSource[] =
+ SHADER_VERSION
+ "precision highp float;"
+ FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
+ "uniform lowp sampler2D s_textureY;\n"
+ "uniform lowp sampler2D s_textureU;\n"
+ "uniform lowp sampler2D s_textureV;\n"
+ FRAGMENT_SHADER_OUT
+ "void main() {\n"
+ " float y, u, v, r, g, b;\n"
+ " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
+ " u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
+ " v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
+ " u = u - 0.5;\n"
+ " v = v - 0.5;\n"
+ " r = y + 1.403 * v;\n"
+ " g = y - 0.344 * u - 0.714 * v;\n"
+ " b = y + 1.770 * u;\n"
+ " " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
+ " }\n";
+
+static const char kNV12FragmentShaderSource[] =
+ SHADER_VERSION
+ "precision mediump float;"
+ FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
+ "uniform lowp sampler2D s_textureY;\n"
+ "uniform lowp sampler2D s_textureUV;\n"
+ FRAGMENT_SHADER_OUT
+ "void main() {\n"
+ " mediump float y;\n"
+ " mediump vec2 uv;\n"
+ " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
+ " uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
+ " vec2(0.5, 0.5);\n"
+ " " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
+ " y - 0.344 * uv.x - 0.714 * uv.y,\n"
+ " y + 1.770 * uv.x,\n"
+ " 1.0);\n"
+ " }\n";
+
+@implementation RTCDefaultShader {
+ GLuint _vertexBuffer;
+ GLuint _vertexArray;
+ // Store current rotation and only upload new vertex data when rotation changes.
+ absl::optional<RTCVideoRotation> _currentRotation;
+
+ GLuint _i420Program;
+ GLuint _nv12Program;
+}
+
+- (void)dealloc {
+ glDeleteProgram(_i420Program);
+ glDeleteProgram(_nv12Program);
+ glDeleteBuffers(1, &_vertexBuffer);
+ glDeleteVertexArrays(1, &_vertexArray);
+}
+
+- (BOOL)createAndSetupI420Program {
+ NSAssert(!_i420Program, @"I420 program already created");
+ _i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource);
+ if (!_i420Program) {
+ return NO;
+ }
+ GLint ySampler = glGetUniformLocation(_i420Program, "s_textureY");
+ GLint uSampler = glGetUniformLocation(_i420Program, "s_textureU");
+ GLint vSampler = glGetUniformLocation(_i420Program, "s_textureV");
+
+ if (ySampler < 0 || uSampler < 0 || vSampler < 0) {
+ RTCLog(@"Failed to get uniform variable locations in I420 shader");
+ glDeleteProgram(_i420Program);
+ _i420Program = 0;
+ return NO;
+ }
+
+ glUseProgram(_i420Program);
+ glUniform1i(ySampler, kYTextureUnit);
+ glUniform1i(uSampler, kUTextureUnit);
+ glUniform1i(vSampler, kVTextureUnit);
+
+ return YES;
+}
+
+- (BOOL)createAndSetupNV12Program {
+ NSAssert(!_nv12Program, @"NV12 program already created");
+ _nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource);
+ if (!_nv12Program) {
+ return NO;
+ }
+ GLint ySampler = glGetUniformLocation(_nv12Program, "s_textureY");
+ GLint uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV");
+
+ if (ySampler < 0 || uvSampler < 0) {
+ RTCLog(@"Failed to get uniform variable locations in NV12 shader");
+ glDeleteProgram(_nv12Program);
+ _nv12Program = 0;
+ return NO;
+ }
+
+ glUseProgram(_nv12Program);
+ glUniform1i(ySampler, kYTextureUnit);
+ glUniform1i(uvSampler, kUvTextureUnit);
+
+ return YES;
+}
+
+- (BOOL)prepareVertexBufferWithRotation:(RTCVideoRotation)rotation {
+ if (!_vertexBuffer && !RTCCreateVertexBuffer(&_vertexBuffer, &_vertexArray)) {
+ RTCLog(@"Failed to setup vertex buffer");
+ return NO;
+ }
+#if !TARGET_OS_IPHONE
+ glBindVertexArray(_vertexArray);
+#endif
+ glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+ if (!_currentRotation || rotation != *_currentRotation) {
+ _currentRotation = absl::optional<RTCVideoRotation>(rotation);
+ RTCSetVertexData(*_currentRotation);
+ }
+ return YES;
+}
+
+- (void)applyShadingForFrameWithWidth:(int)width
+ height:(int)height
+ rotation:(RTCVideoRotation)rotation
+ yPlane:(GLuint)yPlane
+ uPlane:(GLuint)uPlane
+ vPlane:(GLuint)vPlane {
+ if (![self prepareVertexBufferWithRotation:rotation]) {
+ return;
+ }
+
+ if (!_i420Program && ![self createAndSetupI420Program]) {
+ RTCLog(@"Failed to setup I420 program");
+ return;
+ }
+
+ glUseProgram(_i420Program);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, yPlane);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, uPlane);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kVTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, vPlane);
+
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+}
+
+- (void)applyShadingForFrameWithWidth:(int)width
+ height:(int)height
+ rotation:(RTCVideoRotation)rotation
+ yPlane:(GLuint)yPlane
+ uvPlane:(GLuint)uvPlane {
+ if (![self prepareVertexBufferWithRotation:rotation]) {
+ return;
+ }
+
+ if (!_nv12Program && ![self createAndSetupNV12Program]) {
+ RTCLog(@"Failed to setup NV12 shader");
+ return;
+ }
+
+ glUseProgram(_nv12Program);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, yPlane);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUvTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, uvPlane);
+
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h
new file mode 100644
index 0000000000..b78501e9e6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
+// refreshes, which should be 30fps. We wrap the display link in order to avoid
+// a retain cycle since CADisplayLink takes a strong reference onto its target.
+// The timer is paused by default.
+@interface RTCDisplayLinkTimer : NSObject
+
+@property(nonatomic) BOOL isPaused;
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler;
+- (void)invalidate;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m
new file mode 100644
index 0000000000..906bb898d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDisplayLinkTimer.h"
+
+#import <UIKit/UIKit.h>
+
+@implementation RTCDisplayLinkTimer {
+ CADisplayLink *_displayLink;
+ void (^_timerHandler)(void);
+}
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
+ NSParameterAssert(timerHandler);
+ if (self = [super init]) {
+ _timerHandler = timerHandler;
+ _displayLink =
+ [CADisplayLink displayLinkWithTarget:self
+ selector:@selector(displayLinkDidFire:)];
+ _displayLink.paused = YES;
+#if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0
+ _displayLink.preferredFramesPerSecond = 30;
+#else
+ [_displayLink setFrameInterval:2];
+#endif
+ [_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
+ forMode:NSRunLoopCommonModes];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self invalidate];
+}
+
+- (BOOL)isPaused {
+ return _displayLink.paused;
+}
+
+- (void)setIsPaused:(BOOL)isPaused {
+ _displayLink.paused = isPaused;
+}
+
+- (void)invalidate {
+ [_displayLink invalidate];
+}
+
+- (void)displayLinkDidFire:(CADisplayLink *)displayLink {
+ _timerHandler();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h
new file mode 100644
index 0000000000..24b26cd602
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoRenderer.h"
+#import "RTCVideoViewShading.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCEAGLVideoView);
+
+/**
+ * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames
+ * in its bounds using OpenGLES 2.0 or OpenGLES 3.0.
+ */
+RTC_OBJC_EXPORT
+NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.")
+@interface RTC_OBJC_TYPE (RTCEAGLVideoView) : UIView <RTC_OBJC_TYPE(RTCVideoRenderer)>
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+
+- (instancetype)initWithFrame:(CGRect)frame
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+ NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+ NS_DESIGNATED_INITIALIZER;
+
+/** @abstract Wrapped RTCVideoRotation, or nil.
+ */
+@property(nonatomic, nullable) NSValue *rotationOverride;
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m
new file mode 100644
index 0000000000..89e62d2ce7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m
@@ -0,0 +1,295 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCEAGLVideoView.h"
+
+#import <GLKit/GLKit.h>
+
+#import "RTCDefaultShader.h"
+#import "RTCDisplayLinkTimer.h"
+#import "RTCI420TextureCache.h"
+#import "RTCNV12TextureCache.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+// RTC_OBJC_TYPE(RTCEAGLVideoView) wraps a GLKView which is setup with
+// enableSetNeedsDisplay = NO for the purpose of gaining control of
+// exactly when to call -[GLKView display]. This need for extra
+// control is required to avoid triggering method calls on GLKView
+// that results in attempting to bind the underlying render buffer
+// when the drawable size would be empty which would result in the
+// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
+// the method that will trigger the binding of the render
+// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
+// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains
+// its own `isDirty` flag.
+
+@interface RTC_OBJC_TYPE (RTCEAGLVideoView)
+()<GLKViewDelegate>
+ // `videoFrame` is set when we receive a frame from a worker thread and is read
+ // from the display link callback so atomicity is required.
+ @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
+@property(nonatomic, readonly) GLKView *glkView;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCEAGLVideoView) {
+ RTCDisplayLinkTimer *_timer;
+ EAGLContext *_glContext;
+ // This flag should only be set and read on the main thread (e.g. by
+ // setNeedsDisplay)
+ BOOL _isDirty;
+ id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
+ RTCNV12TextureCache *_nv12TextureCache;
+ RTCI420TextureCache *_i420TextureCache;
+ // As timestamps should be unique between frames, will store last
+ // drawn frame timestamp instead of the whole frame to reduce memory usage.
+ int64_t _lastDrawnFrameTimeStampNs;
+}
+
+@synthesize delegate = _delegate;
+@synthesize videoFrame = _videoFrame;
+@synthesize glkView = _glkView;
+@synthesize rotationOverride = _rotationOverride;
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]];
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder {
+ return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
+}
+
+- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
+ if (self = [super initWithFrame:frame]) {
+ _shader = shader;
+ if (![self configure]) {
+ return nil;
+ }
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
+ if (self = [super initWithCoder:aDecoder]) {
+ _shader = shader;
+ if (![self configure]) {
+ return nil;
+ }
+ }
+ return self;
+}
+
+- (BOOL)configure {
+ EAGLContext *glContext =
+ [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
+ if (!glContext) {
+ glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ }
+ if (!glContext) {
+ RTCLogError(@"Failed to create EAGLContext");
+ return NO;
+ }
+ _glContext = glContext;
+
+ // GLKView manages a framebuffer for us.
+ _glkView = [[GLKView alloc] initWithFrame:CGRectZero
+ context:_glContext];
+ _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
+ _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
+ _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
+ _glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
+ _glkView.delegate = self;
+ _glkView.layer.masksToBounds = YES;
+ _glkView.enableSetNeedsDisplay = NO;
+ [self addSubview:_glkView];
+
+ // Listen to application state in order to clean up OpenGL before app goes
+ // away.
+ NSNotificationCenter *notificationCenter =
+ [NSNotificationCenter defaultCenter];
+ [notificationCenter addObserver:self
+ selector:@selector(willResignActive)
+ name:UIApplicationWillResignActiveNotification
+ object:nil];
+ [notificationCenter addObserver:self
+ selector:@selector(didBecomeActive)
+ name:UIApplicationDidBecomeActiveNotification
+ object:nil];
+
+ // Frames are received on a separate thread, so we poll for current frame
+ // using a refresh rate proportional to screen refresh frequency. This
+ // occurs on the main thread.
+ __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
+ _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
+ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
+ [strongSelf displayLinkTimerDidFire];
+ }];
+ if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) {
+ [self setupGL];
+ }
+ return YES;
+}
+
+- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
+ [super setMultipleTouchEnabled:multipleTouchEnabled];
+ _glkView.multipleTouchEnabled = multipleTouchEnabled;
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+ UIApplicationState appState =
+ [UIApplication sharedApplication].applicationState;
+ if (appState == UIApplicationStateActive) {
+ [self teardownGL];
+ }
+ [_timer invalidate];
+ [self ensureGLContext];
+ _shader = nil;
+ if (_glContext && [EAGLContext currentContext] == _glContext) {
+ [EAGLContext setCurrentContext:nil];
+ }
+}
+
+#pragma mark - UIView
+
+- (void)setNeedsDisplay {
+ [super setNeedsDisplay];
+ _isDirty = YES;
+}
+
+- (void)setNeedsDisplayInRect:(CGRect)rect {
+ [super setNeedsDisplayInRect:rect];
+ _isDirty = YES;
+}
+
+- (void)layoutSubviews {
+ [super layoutSubviews];
+ _glkView.frame = self.bounds;
+}
+
+#pragma mark - GLKViewDelegate
+
+// This method is called when the GLKView's content is dirty and needs to be
+// redrawn. This occurs on main thread.
+- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
+ // The renderer will draw the frame to the framebuffer corresponding to the
+ // one used by `view`.
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
+ if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) {
+ return;
+ }
+ RTCVideoRotation rotation = frame.rotation;
+ if(_rotationOverride != nil) {
+ [_rotationOverride getValue: &rotation];
+ }
+ [self ensureGLContext];
+ glClear(GL_COLOR_BUFFER_BIT);
+ if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ if (!_nv12TextureCache) {
+ _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
+ }
+ if (_nv12TextureCache) {
+ [_nv12TextureCache uploadFrameToTextures:frame];
+ [_shader applyShadingForFrameWithWidth:frame.width
+ height:frame.height
+ rotation:rotation
+ yPlane:_nv12TextureCache.yTexture
+ uvPlane:_nv12TextureCache.uvTexture];
+ [_nv12TextureCache releaseTextures];
+
+ _lastDrawnFrameTimeStampNs = self.videoFrame.timeStampNs;
+ }
+ } else {
+ if (!_i420TextureCache) {
+ _i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext];
+ }
+ [_i420TextureCache uploadFrameToTextures:frame];
+ [_shader applyShadingForFrameWithWidth:frame.width
+ height:frame.height
+ rotation:rotation
+ yPlane:_i420TextureCache.yTexture
+ uPlane:_i420TextureCache.uTexture
+ vPlane:_i420TextureCache.vTexture];
+
+ _lastDrawnFrameTimeStampNs = self.videoFrame.timeStampNs;
+ }
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
+ [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ self.videoFrame = frame;
+}
+
+#pragma mark - Private
+
+- (void)displayLinkTimerDidFire {
+ // Don't render unless video frame have changed or the view content
+ // has explicitly been marked dirty.
+ if (!_isDirty && _lastDrawnFrameTimeStampNs == self.videoFrame.timeStampNs) {
+ return;
+ }
+
+ // Always reset isDirty at this point, even if -[GLKView display]
+ // won't be called in the case the drawable size is empty.
+ _isDirty = NO;
+
+ // Only call -[GLKView display] if the drawable size is
+ // non-empty. Calling display will make the GLKView setup its
+ // render buffer if necessary, but that will fail with error
+ // GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty.
+ if (self.bounds.size.width > 0 && self.bounds.size.height > 0) {
+ [_glkView display];
+ }
+}
+
+- (void)setupGL {
+ [self ensureGLContext];
+ glDisable(GL_DITHER);
+ _timer.isPaused = NO;
+}
+
+- (void)teardownGL {
+ self.videoFrame = nil;
+ _timer.isPaused = YES;
+ [_glkView deleteDrawable];
+ [self ensureGLContext];
+ _nv12TextureCache = nil;
+ _i420TextureCache = nil;
+}
+
+- (void)didBecomeActive {
+ [self setupGL];
+}
+
+- (void)willResignActive {
+ [self teardownGL];
+}
+
+- (void)ensureGLContext {
+ NSAssert(_glContext, @"context shouldn't be nil");
+ if ([EAGLContext currentContext] != _glContext) {
+ [EAGLContext setCurrentContext:_glContext];
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h
new file mode 100644
index 0000000000..9fdcc5a695
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCOpenGLDefines.h"
+#import "base/RTCVideoFrame.h"
+
+@interface RTCI420TextureCache : NSObject
+
+@property(nonatomic, readonly) GLuint yTexture;
+@property(nonatomic, readonly) GLuint uTexture;
+@property(nonatomic, readonly) GLuint vTexture;
+
+- (instancetype)init NS_UNAVAILABLE;
+- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
+
+- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm
new file mode 100644
index 0000000000..5dccd4bf6a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCI420TextureCache.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+
+#import "base/RTCI420Buffer.h"
+#import "base/RTCVideoFrameBuffer.h"
+
+#include <vector>
+
+// Two sets of 3 textures are used here, one for each of the Y, U and V planes. Having two sets
+// alleviates CPU blockage in the event that the GPU is asked to render to a texture that is already
+// in use.
+static const GLsizei kNumTextureSets = 2;
+static const GLsizei kNumTexturesPerSet = 3;
+static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
+
+@implementation RTCI420TextureCache {
+ BOOL _hasUnpackRowLength;
+ GLint _currentTextureSet;
+ // Handles for OpenGL constructs.
+ GLuint _textures[kNumTextures];
+ // Used to create a non-padded plane for GPU upload when we receive padded frames.
+ std::vector<uint8_t> _planeBuffer;
+}
+
+- (GLuint)yTexture {
+ return _textures[_currentTextureSet * kNumTexturesPerSet];
+}
+
+- (GLuint)uTexture {
+ return _textures[_currentTextureSet * kNumTexturesPerSet + 1];
+}
+
+- (GLuint)vTexture {
+ return _textures[_currentTextureSet * kNumTexturesPerSet + 2];
+}
+
+- (instancetype)initWithContext:(GlContextType *)context {
+ if (self = [super init]) {
+#if TARGET_OS_IPHONE
+ _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3);
+#else
+ _hasUnpackRowLength = YES;
+#endif
+ glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+ [self setupTextures];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ glDeleteTextures(kNumTextures, _textures);
+}
+
+- (void)setupTextures {
+ glGenTextures(kNumTextures, _textures);
+ // Set parameters for each of the textures we created.
+ for (GLsizei i = 0; i < kNumTextures; i++) {
+ glBindTexture(GL_TEXTURE_2D, _textures[i]);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ }
+}
+
+- (void)uploadPlane:(const uint8_t *)plane
+ texture:(GLuint)texture
+ width:(size_t)width
+ height:(size_t)height
+ stride:(int32_t)stride {
+ glBindTexture(GL_TEXTURE_2D, texture);
+
+ const uint8_t *uploadPlane = plane;
+ if ((size_t)stride != width) {
+ if (_hasUnpackRowLength) {
+ // GLES3 allows us to specify stride.
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ static_cast<GLsizei>(width),
+ static_cast<GLsizei>(height),
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ uploadPlane);
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
+ return;
+ } else {
+ // Make an unpadded copy and upload that instead. Quick profiling showed
+ // that this is faster than uploading row by row using glTexSubImage2D.
+ uint8_t *unpaddedPlane = _planeBuffer.data();
+ for (size_t y = 0; y < height; ++y) {
+ memcpy(unpaddedPlane + y * width, plane + y * stride, width);
+ }
+ uploadPlane = unpaddedPlane;
+ }
+ }
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ static_cast<GLsizei>(width),
+ static_cast<GLsizei>(height),
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ uploadPlane);
+}
+
+- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
+
+ id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
+
+ const int chromaWidth = buffer.chromaWidth;
+ const int chromaHeight = buffer.chromaHeight;
+ if (buffer.strideY != frame.width || buffer.strideU != chromaWidth ||
+ buffer.strideV != chromaWidth) {
+ _planeBuffer.resize(buffer.width * buffer.height);
+ }
+
+ [self uploadPlane:buffer.dataY
+ texture:self.yTexture
+ width:buffer.width
+ height:buffer.height
+ stride:buffer.strideY];
+
+ [self uploadPlane:buffer.dataU
+ texture:self.uTexture
+ width:chromaWidth
+ height:chromaHeight
+ stride:buffer.strideU];
+
+ [self uploadPlane:buffer.dataV
+ texture:self.vTexture
+ width:chromaWidth
+ height:chromaHeight
+ stride:buffer.strideV];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h
new file mode 100644
index 0000000000..c9ee986f88
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#if !TARGET_OS_IPHONE
+
+#import <AppKit/NSOpenGLView.h>
+
+#import "RTCVideoRenderer.h"
+#import "RTCVideoViewShading.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCNSGLVideoView);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCNSGLVideoViewDelegate)<RTC_OBJC_TYPE(RTCVideoViewDelegate)> @end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView <RTC_OBJC_TYPE(RTCVideoRenderer)>
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+
+- (instancetype)initWithFrame:(NSRect)frameRect
+ pixelFormat:(NSOpenGLPixelFormat *)format
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#endif
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m
new file mode 100644
index 0000000000..168c73126f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#if !TARGET_OS_IPHONE
+
+#import "RTCNSGLVideoView.h"
+
+#import <AppKit/NSOpenGL.h>
+#import <CoreVideo/CVDisplayLink.h>
+#import <OpenGL/gl3.h>
+
+#import "RTCDefaultShader.h"
+#import "RTCI420TextureCache.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+
+@interface RTC_OBJC_TYPE (RTCNSGLVideoView)
+()
+ // `videoFrame` is set when we receive a frame from a worker thread and is read
+ // from the display link callback so atomicity is required.
+ @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *
+ videoFrame;
+@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
+
+- (void)drawFrame;
+@end
+
+static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
+ const CVTimeStamp *now,
+ const CVTimeStamp *outputTime,
+ CVOptionFlags flagsIn,
+ CVOptionFlags *flagsOut,
+ void *displayLinkContext) {
+ RTC_OBJC_TYPE(RTCNSGLVideoView) *view =
+ (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext;
+ [view drawFrame];
+ return kCVReturnSuccess;
+}
+
+@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) {
+ CVDisplayLinkRef _displayLink;
+ RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame;
+ id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
+}
+
+@synthesize delegate = _delegate;
+@synthesize videoFrame = _videoFrame;
+@synthesize i420TextureCache = _i420TextureCache;
+
+- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format {
+ return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]];
+}
+
+- (instancetype)initWithFrame:(NSRect)frame
+ pixelFormat:(NSOpenGLPixelFormat *)format
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
+ if (self = [super initWithFrame:frame pixelFormat:format]) {
+ _shader = shader;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self teardownDisplayLink];
+}
+
+- (void)drawRect:(NSRect)rect {
+ [self drawFrame];
+}
+
+- (void)reshape {
+ [super reshape];
+ NSRect frame = [self frame];
+ [self ensureGLContext];
+ CGLLockContext([[self openGLContext] CGLContextObj]);
+ glViewport(0, 0, frame.size.width, frame.size.height);
+ CGLUnlockContext([[self openGLContext] CGLContextObj]);
+}
+
+- (void)lockFocus {
+ NSOpenGLContext *context = [self openGLContext];
+ [super lockFocus];
+ if ([context view] != self) {
+ [context setView:self];
+ }
+ [context makeCurrentContext];
+}
+
+- (void)prepareOpenGL {
+ [super prepareOpenGL];
+ [self ensureGLContext];
+ glDisable(GL_DITHER);
+ [self setupDisplayLink];
+}
+
+- (void)clearGLContext {
+ [self ensureGLContext];
+ self.i420TextureCache = nil;
+ [super clearGLContext];
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [self.delegate videoView:self didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ self.videoFrame = frame;
+}
+
+#pragma mark - Private
+
+- (void)drawFrame {
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
+ if (!frame || frame == _lastDrawnFrame) {
+ return;
+ }
+ // This method may be called from CVDisplayLink callback which isn't on the
+ // main thread so we have to lock the GL context before drawing.
+ NSOpenGLContext *context = [self openGLContext];
+ CGLLockContext([context CGLContextObj]);
+
+ [self ensureGLContext];
+ glClear(GL_COLOR_BUFFER_BIT);
+
+ // Rendering native CVPixelBuffer is not supported on OS X.
+ // TODO(magjed): Add support for NV12 texture cache on OS X.
+ frame = [frame newI420VideoFrame];
+ if (!self.i420TextureCache) {
+ self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context];
+ }
+ RTCI420TextureCache *i420TextureCache = self.i420TextureCache;
+ if (i420TextureCache) {
+ [i420TextureCache uploadFrameToTextures:frame];
+ [_shader applyShadingForFrameWithWidth:frame.width
+ height:frame.height
+ rotation:frame.rotation
+ yPlane:i420TextureCache.yTexture
+ uPlane:i420TextureCache.uTexture
+ vPlane:i420TextureCache.vTexture];
+ [context flushBuffer];
+ _lastDrawnFrame = frame;
+ }
+ CGLUnlockContext([context CGLContextObj]);
+}
+
+- (void)setupDisplayLink {
+ if (_displayLink) {
+ return;
+ }
+ // Synchronize buffer swaps with vertical refresh rate.
+ GLint swapInt = 1;
+ [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+
+ // Create display link.
+ CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
+ CVDisplayLinkSetOutputCallback(_displayLink,
+ &OnDisplayLinkFired,
+ (__bridge void *)self);
+ // Set the display link for the current renderer.
+ CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
+ CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
+ CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
+ _displayLink, cglContext, cglPixelFormat);
+ CVDisplayLinkStart(_displayLink);
+}
+
+- (void)teardownDisplayLink {
+ if (!_displayLink) {
+ return;
+ }
+ CVDisplayLinkRelease(_displayLink);
+ _displayLink = NULL;
+}
+
+- (void)ensureGLContext {
+ NSOpenGLContext* context = [self openGLContext];
+ NSAssert(context, @"context shouldn't be nil");
+ if ([NSOpenGLContext currentContext] != context) {
+ [context makeCurrentContext];
+ }
+}
+
+@end
+
+#endif // !TARGET_OS_IPHONE
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h
new file mode 100644
index 0000000000..f202b836b5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <GLKit/GLKit.h>
+
+#import "base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCVideoFrame);
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCNV12TextureCache : NSObject
+
+@property(nonatomic, readonly) GLuint yTexture;
+@property(nonatomic, readonly) GLuint uvTexture;
+
+- (instancetype)init NS_UNAVAILABLE;
+- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
+
+- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+
+- (void)releaseTextures;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m
new file mode 100644
index 0000000000..a520ac45b4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNV12TextureCache.h"
+
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+@implementation RTCNV12TextureCache {
+ CVOpenGLESTextureCacheRef _textureCache;
+ CVOpenGLESTextureRef _yTextureRef;
+ CVOpenGLESTextureRef _uvTextureRef;
+}
+
+- (GLuint)yTexture {
+ return CVOpenGLESTextureGetName(_yTextureRef);
+}
+
+- (GLuint)uvTexture {
+ return CVOpenGLESTextureGetName(_uvTextureRef);
+}
+
+- (instancetype)initWithContext:(EAGLContext *)context {
+ if (self = [super init]) {
+ CVReturn ret = CVOpenGLESTextureCacheCreate(
+ kCFAllocatorDefault, NULL,
+#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
+ context,
+#else
+ (__bridge void *)context,
+#endif
+ NULL, &_textureCache);
+ if (ret != kCVReturnSuccess) {
+ self = nil;
+ }
+ }
+ return self;
+}
+
+- (BOOL)loadTexture:(CVOpenGLESTextureRef *)textureOut
+ pixelBuffer:(CVPixelBufferRef)pixelBuffer
+ planeIndex:(int)planeIndex
+ pixelFormat:(GLenum)pixelFormat {
+ const int width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex);
+ const int height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex);
+
+ if (*textureOut) {
+ CFRelease(*textureOut);
+ *textureOut = nil;
+ }
+ CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, pixelFormat, width,
+ height, pixelFormat, GL_UNSIGNED_BYTE, planeIndex, textureOut);
+ if (ret != kCVReturnSuccess) {
+ if (*textureOut) {
+ CFRelease(*textureOut);
+ *textureOut = nil;
+ }
+ return NO;
+ }
+ NSAssert(CVOpenGLESTextureGetTarget(*textureOut) == GL_TEXTURE_2D,
+ @"Unexpected GLES texture target");
+ glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(*textureOut));
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ return YES;
+}
+
+- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]],
+ @"frame must be CVPixelBuffer backed");
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
+ return [self loadTexture:&_yTextureRef
+ pixelBuffer:pixelBuffer
+ planeIndex:0
+ pixelFormat:GL_LUMINANCE] &&
+ [self loadTexture:&_uvTextureRef
+ pixelBuffer:pixelBuffer
+ planeIndex:1
+ pixelFormat:GL_LUMINANCE_ALPHA];
+}
+
+- (void)releaseTextures {
+ if (_uvTextureRef) {
+ CFRelease(_uvTextureRef);
+ _uvTextureRef = nil;
+ }
+ if (_yTextureRef) {
+ CFRelease(_yTextureRef);
+ _yTextureRef = nil;
+ }
+}
+
+- (void)dealloc {
+ [self releaseTextures];
+ if (_textureCache) {
+ CFRelease(_textureCache);
+ _textureCache = nil;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h
new file mode 100644
index 0000000000..4088535861
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#if TARGET_OS_IPHONE
+#define RTC_PIXEL_FORMAT GL_LUMINANCE
+#define SHADER_VERSION
+#define VERTEX_SHADER_IN "attribute"
+#define VERTEX_SHADER_OUT "varying"
+#define FRAGMENT_SHADER_IN "varying"
+#define FRAGMENT_SHADER_OUT
+#define FRAGMENT_SHADER_COLOR "gl_FragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture2D"
+
+@class EAGLContext;
+typedef EAGLContext GlContextType;
+#else
+#define RTC_PIXEL_FORMAT GL_RED
+#define SHADER_VERSION "#version 150\n"
+#define VERTEX_SHADER_IN "in"
+#define VERTEX_SHADER_OUT "out"
+#define FRAGMENT_SHADER_IN "in"
+#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
+#define FRAGMENT_SHADER_COLOR "fragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture"
+
+@class NSOpenGLContext;
+typedef NSOpenGLContext GlContextType;
+#endif
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h
new file mode 100644
index 0000000000..d1b91fb643
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "base/RTCVideoFrame.h"
+
+RTC_EXTERN const char kRTCVertexShaderSource[];
+
+RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar* source);
+RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
+RTC_EXTERN GLuint
+RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
+RTC_EXTERN BOOL RTCCreateVertexBuffer(GLuint* vertexBuffer,
+ GLuint* vertexArray);
+RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation);
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm
new file mode 100644
index 0000000000..8eccd7fbec
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm
@@ -0,0 +1,189 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCShader.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+
+#include <algorithm>
+#include <array>
+#include <memory>
+
+#import "RTCOpenGLDefines.h"
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+// Vertex shader doesn't do anything except pass coordinates through.
+const char kRTCVertexShaderSource[] =
+ SHADER_VERSION
+ VERTEX_SHADER_IN " vec2 position;\n"
+ VERTEX_SHADER_IN " vec2 texcoord;\n"
+ VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
+ "void main() {\n"
+ " gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
+ " v_texcoord = texcoord;\n"
+ "}\n";
+
+// Compiles a shader of the given `type` with GLSL source `source` and returns
+// the shader handle or 0 on error.
+GLuint RTCCreateShader(GLenum type, const GLchar *source) {
+ GLuint shader = glCreateShader(type);
+ if (!shader) {
+ return 0;
+ }
+ glShaderSource(shader, 1, &source, NULL);
+ glCompileShader(shader);
+ GLint compileStatus = GL_FALSE;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
+ if (compileStatus == GL_FALSE) {
+ GLint logLength = 0;
+ // The null termination character is included in the returned log length.
+ glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
+ if (logLength > 0) {
+ std::unique_ptr<char[]> compileLog(new char[logLength]);
+ // The returned string is null terminated.
+ glGetShaderInfoLog(shader, logLength, NULL, compileLog.get());
+ RTC_LOG(LS_ERROR) << "Shader compile error: " << compileLog.get();
+ }
+ glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+}
+
+// Links a shader program with the given vertex and fragment shaders and
+// returns the program handle or 0 on error.
+GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) {
+ if (vertexShader == 0 || fragmentShader == 0) {
+ return 0;
+ }
+ GLuint program = glCreateProgram();
+ if (!program) {
+ return 0;
+ }
+ glAttachShader(program, vertexShader);
+ glAttachShader(program, fragmentShader);
+ glLinkProgram(program);
+ GLint linkStatus = GL_FALSE;
+ glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+ if (linkStatus == GL_FALSE) {
+ glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+}
+
+// Creates and links a shader program with the given fragment shader source and
+// a plain vertex shader. Returns the program handle or 0 on error.
+GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
+ GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource);
+ RTC_CHECK(vertexShader) << "failed to create vertex shader";
+ GLuint fragmentShader =
+ RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
+ RTC_CHECK(fragmentShader) << "failed to create fragment shader";
+ GLuint program = RTCCreateProgram(vertexShader, fragmentShader);
+ // Shaders are created only to generate program.
+ if (vertexShader) {
+ glDeleteShader(vertexShader);
+ }
+ if (fragmentShader) {
+ glDeleteShader(fragmentShader);
+ }
+
+ // Set vertex shader variables 'position' and 'texcoord' in program.
+ GLint position = glGetAttribLocation(program, "position");
+ GLint texcoord = glGetAttribLocation(program, "texcoord");
+ if (position < 0 || texcoord < 0) {
+ glDeleteProgram(program);
+ return 0;
+ }
+
+ // Read position attribute with size of 2 and stride of 4 beginning at the start of the array. The
+ // last argument indicates offset of data within the vertex buffer.
+ glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
+ glEnableVertexAttribArray(position);
+
+ // Read texcoord attribute with size of 2 and stride of 4 beginning at the first texcoord in the
+ // array. The last argument indicates offset of data within the vertex buffer.
+ glVertexAttribPointer(
+ texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)(2 * sizeof(GLfloat)));
+ glEnableVertexAttribArray(texcoord);
+
+ return program;
+}
+
+BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) {
+#if !TARGET_OS_IPHONE
+ glGenVertexArrays(1, vertexArray);
+ if (*vertexArray == 0) {
+ return NO;
+ }
+ glBindVertexArray(*vertexArray);
+#endif
+ glGenBuffers(1, vertexBuffer);
+ if (*vertexBuffer == 0) {
+ glDeleteVertexArrays(1, vertexArray);
+ return NO;
+ }
+ glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer);
+ glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW);
+ return YES;
+}
+
+// Set vertex data to the currently bound vertex buffer.
+void RTCSetVertexData(RTCVideoRotation rotation) {
+ // When modelview and projection matrices are identity (default) the world is
+ // contained in the square around origin with unit size 2. Drawing to these
+ // coordinates is equivalent to drawing to the entire screen. The texture is
+ // stretched over that square using texture coordinates (u, v) that range
+ // from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
+ // here because the incoming frame has origin in upper left hand corner but
+ // OpenGL expects origin in bottom left corner.
+ std::array<std::array<GLfloat, 2>, 4> UVCoords = {{
+ {{0, 1}}, // Lower left.
+ {{1, 1}}, // Lower right.
+ {{1, 0}}, // Upper right.
+ {{0, 0}}, // Upper left.
+ }};
+
+ // Rotate the UV coordinates.
+ int rotation_offset;
+ switch (rotation) {
+ case RTCVideoRotation_0:
+ rotation_offset = 0;
+ break;
+ case RTCVideoRotation_90:
+ rotation_offset = 1;
+ break;
+ case RTCVideoRotation_180:
+ rotation_offset = 2;
+ break;
+ case RTCVideoRotation_270:
+ rotation_offset = 3;
+ break;
+ }
+ std::rotate(UVCoords.begin(), UVCoords.begin() + rotation_offset,
+ UVCoords.end());
+
+ const GLfloat gVertices[] = {
+ // X, Y, U, V.
+ -1, -1, UVCoords[0][0], UVCoords[0][1],
+ 1, -1, UVCoords[1][0], UVCoords[1][1],
+ 1, 1, UVCoords[2][0], UVCoords[2][1],
+ -1, 1, UVCoords[3][0], UVCoords[3][1],
+ };
+
+ glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(gVertices), gVertices);
+}
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h
new file mode 100644
index 0000000000..9df30a8fa0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders
+ * used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoViewShading)<NSObject>
+
+ /** Callback for I420 frames. Each plane is given as a texture. */
+ - (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation
+ : (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane
+ : (GLuint)vPlane;
+
+/** Callback for NV12 frames. Each plane is given as a texture. */
+- (void)applyShadingForFrameWithWidth:(int)width
+ height:(int)height
+ rotation:(RTCVideoRotation)rotation
+ yPlane:(GLuint)yPlane
+ uvPlane:(GLuint)uvPlane;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h
new file mode 100644
index 0000000000..a0cd8515d1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCodecSpecificInfoH264.h"
+
+#include "modules/video_coding/include/video_codec_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/* Interfaces for converting to/from internal C++ formats. */
+@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
+()
+
+ - (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h
new file mode 100644
index 0000000000..ae3003a115
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCodecSpecificInfo.h"
+#import "RTCMacros.h"
+
+/** Class for H264 specific config. */
+typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) {
+ RTCH264PacketizationModeNonInterleaved = 0, // Mode 1 - STAP-A, FU-A is allowed
+ RTCH264PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject <RTC_OBJC_TYPE(RTCCodecSpecificInfo)>
+
+@property(nonatomic, assign) RTCH264PacketizationMode packetizationMode;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm
new file mode 100644
index 0000000000..e38ed307b3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCodecSpecificInfoH264+Private.h"
+
+#import "RTCH264ProfileLevelId.h"
+
+// H264 specific settings.
+@implementation RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
+
+@synthesize packetizationMode = _packetizationMode;
+
+- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo {
+ webrtc::CodecSpecificInfo codecSpecificInfo;
+ codecSpecificInfo.codecType = webrtc::kVideoCodecH264;
+ codecSpecificInfo.codecSpecific.H264.packetization_mode =
+ (webrtc::H264PacketizationMode)_packetizationMode;
+
+ return codecSpecificInfo;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h
new file mode 100644
index 0000000000..de5a9c4684
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderFactory.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** This decoder factory include support for all codecs bundled with WebRTC. If using custom
+ * codecs, create custom implementations of RTCVideoEncoderFactory and
+ * RTCVideoDecoderFactory.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m
new file mode 100644
index 0000000000..6e3baa8750
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDefaultVideoDecoderFactory.h"
+
+#import "RTCH264ProfileLevelId.h"
+#import "RTCVideoDecoderH264.h"
+#import "api/video_codec/RTCVideoCodecConstants.h"
+#import "api/video_codec/RTCVideoDecoderVP8.h"
+#import "api/video_codec/RTCVideoDecoderVP9.h"
+#import "base/RTCVideoCodecInfo.h"
+
+#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
+#import "api/video_codec/RTCVideoDecoderAV1.h" // nogncheck
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory)
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+ parameters:constrainedHighParams];
+
+ NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+ parameters:constrainedBaselineParams];
+
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
+
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
+ constrainedHighInfo,
+ constrainedBaselineInfo,
+ vp8Info,
+ ] mutableCopy];
+
+ if ([RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) {
+ [result
+ addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]];
+ }
+
+#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
+ [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]];
+#endif
+
+ return result;
+}
+
+- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
+ return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
+ } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
+ return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder];
+ } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name] &&
+ [RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) {
+ return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder];
+ }
+
+#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
+ if ([info.name isEqualToString:kRTCVideoCodecAv1Name]) {
+ return [RTC_OBJC_TYPE(RTCVideoDecoderAV1) av1Decoder];
+ }
+#endif
+
+ return nil;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h
new file mode 100644
index 0000000000..92ab40c95b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderFactory.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** This encoder factory include support for all codecs bundled with WebRTC. If using custom
+ * codecs, create custom implementations of RTCVideoEncoderFactory and
+ * RTCVideoDecoderFactory.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
+
+@property(nonatomic, retain) RTC_OBJC_TYPE(RTCVideoCodecInfo) *preferredCodec;
+
++ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m
new file mode 100644
index 0000000000..8de55bde4a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDefaultVideoEncoderFactory.h"
+
+#import "RTCH264ProfileLevelId.h"
+#import "RTCVideoEncoderH264.h"
+#import "api/video_codec/RTCVideoCodecConstants.h"
+#import "api/video_codec/RTCVideoEncoderVP8.h"
+#import "api/video_codec/RTCVideoEncoderVP9.h"
+#import "base/RTCVideoCodecInfo.h"
+
+#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
+#import "api/video_codec/RTCVideoEncoderAV1.h" // nogncheck
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory)
+
+@synthesize preferredCodec;
+
++ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+ parameters:constrainedHighParams];
+
+ NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+ parameters:constrainedBaselineParams];
+
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
+
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
+ constrainedHighInfo,
+ constrainedBaselineInfo,
+ vp8Info,
+ ] mutableCopy];
+
+ if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) {
+ [result
+ addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]];
+ }
+
+#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
+ [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]];
+#endif
+
+ return result;
+}
+
+- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
+ return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
+ } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
+ return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder];
+ } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name] &&
+ [RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) {
+ return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder];
+ }
+
+#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
+ if ([info.name isEqualToString:kRTCVideoCodecAv1Name]) {
+ return [RTC_OBJC_TYPE(RTCVideoEncoderAV1) av1Encoder];
+ }
+#endif
+
+ return nil;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
+ [[[self class] supportedCodecs] mutableCopy];
+
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs = [NSMutableArray array];
+ NSUInteger index = [codecs indexOfObject:self.preferredCodec];
+ if (index != NSNotFound) {
+ [orderedCodecs addObject:[codecs objectAtIndex:index]];
+ [codecs removeObjectAtIndex:index];
+ }
+ [orderedCodecs addObjectsFromArray:codecs];
+
+ return [orderedCodecs copy];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h
new file mode 100644
index 0000000000..dac7bb5610
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+RTC_OBJC_EXPORT extern NSString *const kRTCVideoCodecH264Name;
+RTC_OBJC_EXPORT extern NSString *const kRTCLevel31ConstrainedHigh;
+RTC_OBJC_EXPORT extern NSString *const kRTCLevel31ConstrainedBaseline;
+RTC_OBJC_EXPORT extern NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh;
+RTC_OBJC_EXPORT extern NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline;
+
+/** H264 Profiles and levels. */
+typedef NS_ENUM(NSUInteger, RTCH264Profile) {
+ RTCH264ProfileConstrainedBaseline,
+ RTCH264ProfileBaseline,
+ RTCH264ProfileMain,
+ RTCH264ProfileConstrainedHigh,
+ RTCH264ProfileHigh,
+};
+
+typedef NS_ENUM(NSUInteger, RTCH264Level) {
+ RTCH264Level1_b = 0,
+ RTCH264Level1 = 10,
+ RTCH264Level1_1 = 11,
+ RTCH264Level1_2 = 12,
+ RTCH264Level1_3 = 13,
+ RTCH264Level2 = 20,
+ RTCH264Level2_1 = 21,
+ RTCH264Level2_2 = 22,
+ RTCH264Level3 = 30,
+ RTCH264Level3_1 = 31,
+ RTCH264Level3_2 = 32,
+ RTCH264Level4 = 40,
+ RTCH264Level4_1 = 41,
+ RTCH264Level4_2 = 42,
+ RTCH264Level5 = 50,
+ RTCH264Level5_1 = 51,
+ RTCH264Level5_2 = 52
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject
+
+@property(nonatomic, readonly) RTCH264Profile profile;
+@property(nonatomic, readonly) RTCH264Level level;
+@property(nonatomic, readonly) NSString *hexString;
+
+- (instancetype)initWithHexString:(NSString *)hexString;
+- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm
new file mode 100644
index 0000000000..f0ef3ec232
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "RTCH264ProfileLevelId.h"
+
+#import "helpers/NSString+StdString.h"
+#if defined(WEBRTC_IOS)
+#import "UIDevice+H264Profile.h"
+#endif
+
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "media/base/media_constants.h"
+
+namespace {
+
+NSString *MaxSupportedProfileLevelConstrainedHigh();
+NSString *MaxSupportedProfileLevelConstrainedBaseline();
+
+} // namespace
+
+NSString *const kRTCVideoCodecH264Name = @(cricket::kH264CodecName);
+NSString *const kRTCLevel31ConstrainedHigh = @"640c1f";
+NSString *const kRTCLevel31ConstrainedBaseline = @"42e01f";
+NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh =
+ MaxSupportedProfileLevelConstrainedHigh();
+NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline =
+ MaxSupportedProfileLevelConstrainedBaseline();
+
+namespace {
+
+#if defined(WEBRTC_IOS)
+
+NSString *MaxSupportedLevelForProfile(webrtc::H264Profile profile) {
+ const absl::optional<webrtc::H264ProfileLevelId> profileLevelId =
+ [UIDevice maxSupportedH264Profile];
+ if (profileLevelId && profileLevelId->profile >= profile) {
+ const absl::optional<std::string> profileString =
+ H264ProfileLevelIdToString(webrtc::H264ProfileLevelId(profile, profileLevelId->level));
+ if (profileString) {
+ return [NSString stringForStdString:*profileString];
+ }
+ }
+ return nil;
+}
+#endif
+
+NSString *MaxSupportedProfileLevelConstrainedBaseline() {
+#if defined(WEBRTC_IOS)
+ NSString *profile = MaxSupportedLevelForProfile(webrtc::H264Profile::kProfileConstrainedBaseline);
+ if (profile != nil) {
+ return profile;
+ }
+#endif
+ return kRTCLevel31ConstrainedBaseline;
+}
+
+NSString *MaxSupportedProfileLevelConstrainedHigh() {
+#if defined(WEBRTC_IOS)
+ NSString *profile = MaxSupportedLevelForProfile(webrtc::H264Profile::kProfileConstrainedHigh);
+ if (profile != nil) {
+ return profile;
+ }
+#endif
+ return kRTCLevel31ConstrainedHigh;
+}
+
+} // namespace
+
+@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId)
+()
+
+ @property(nonatomic, assign) RTCH264Profile profile;
+@property(nonatomic, assign) RTCH264Level level;
+@property(nonatomic, strong) NSString *hexString;
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCH264ProfileLevelId)
+
+@synthesize profile = _profile;
+@synthesize level = _level;
+@synthesize hexString = _hexString;
+
+- (instancetype)initWithHexString:(NSString *)hexString {
+ if (self = [super init]) {
+ self.hexString = hexString;
+
+ absl::optional<webrtc::H264ProfileLevelId> profile_level_id =
+ webrtc::ParseH264ProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]);
+ if (profile_level_id.has_value()) {
+ self.profile = static_cast<RTCH264Profile>(profile_level_id->profile);
+ self.level = static_cast<RTCH264Level>(profile_level_id->level);
+ }
+ }
+ return self;
+}
+
+- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level {
+ if (self = [super init]) {
+ self.profile = profile;
+ self.level = level;
+
+ absl::optional<std::string> hex_string =
+ webrtc::H264ProfileLevelIdToString(webrtc::H264ProfileLevelId(
+ static_cast<webrtc::H264Profile>(profile), static_cast<webrtc::H264Level>(level)));
+ self.hexString =
+ [NSString stringWithCString:hex_string.value_or("").c_str() encoding:NSUTF8StringEncoding];
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h
new file mode 100644
index 0000000000..88bacbbdfe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderFactory.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m
new file mode 100644
index 0000000000..bdae19d687
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoDecoderFactoryH264.h"
+
+#import "RTCH264ProfileLevelId.h"
+#import "RTCVideoDecoderH264.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264)
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
+ NSString *codecName = kRTCVideoCodecH264Name;
+
+ NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+ parameters:constrainedHighParams];
+ [codecs addObject:constrainedHighInfo];
+
+ NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+ parameters:constrainedBaselineParams];
+ [codecs addObject:constrainedBaselineInfo];
+
+ return [codecs copy];
+}
+
+- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.h
new file mode 100644
index 0000000000..a12e4212a7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoder)>
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm
new file mode 100644
index 0000000000..09e642bc37
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm
@@ -0,0 +1,276 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "RTCVideoDecoderH264.h"
+
+#import <VideoToolbox/VideoToolbox.h>
+
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#import "helpers.h"
+#import "helpers/scoped_cftyperef.h"
+
+#if defined(WEBRTC_IOS)
+#import "helpers/UIDevice+RTCDevice.h"
+#endif
+
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/objc/components/video_codec/nalu_rewriter.h"
+
+// Struct that we pass to the decoder per frame to decode. We receive it again
+// in the decoder callback.
+struct RTCFrameDecodeParams {
+ RTCFrameDecodeParams(RTCVideoDecoderCallback cb, int64_t ts) : callback(cb), timestamp(ts) {}
+ RTCVideoDecoderCallback callback;
+ int64_t timestamp;
+};
+
+@interface RTC_OBJC_TYPE (RTCVideoDecoderH264)
+() - (void)setError : (OSStatus)error;
+@end
+
+// This is the callback function that VideoToolbox calls when decode is
+// complete.
+void decompressionOutputCallback(void *decoderRef,
+ void *params,
+ OSStatus status,
+ VTDecodeInfoFlags infoFlags,
+ CVImageBufferRef imageBuffer,
+ CMTime timestamp,
+ CMTime duration) {
+ std::unique_ptr<RTCFrameDecodeParams> decodeParams(
+ reinterpret_cast<RTCFrameDecodeParams *>(params));
+ if (status != noErr) {
+ RTC_OBJC_TYPE(RTCVideoDecoderH264) *decoder =
+ (__bridge RTC_OBJC_TYPE(RTCVideoDecoderH264) *)decoderRef;
+ [decoder setError:status];
+ RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
+ return;
+ }
+ // TODO(tkchin): Handle CVO properly.
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *frameBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer];
+ RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
+ initWithBuffer:frameBuffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
+ decodedFrame.timeStamp = decodeParams->timestamp;
+ decodeParams->callback(decodedFrame);
+}
+
+// Decoder.
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderH264) {
+ CMVideoFormatDescriptionRef _videoFormat;
+ CMMemoryPoolRef _memoryPool;
+ VTDecompressionSessionRef _decompressionSession;
+ RTCVideoDecoderCallback _callback;
+ OSStatus _error;
+}
+
+- (instancetype)init {
+ self = [super init];
+ if (self) {
+ _memoryPool = CMMemoryPoolCreate(nil);
+ }
+ return self;
+}
+
+- (void)dealloc {
+ CMMemoryPoolInvalidate(_memoryPool);
+ CFRelease(_memoryPool);
+ [self destroyDecompressionSession];
+ [self setVideoFormat:nullptr];
+}
+
+- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)inputImage
+ missingFrames:(BOOL)missingFrames
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ renderTimeMs:(int64_t)renderTimeMs {
+ RTC_DCHECK(inputImage.buffer);
+
+ if (_error != noErr) {
+ RTC_LOG(LS_WARNING) << "Last frame decode failed.";
+ _error = noErr;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ rtc::ScopedCFTypeRef<CMVideoFormatDescriptionRef> inputFormat =
+ rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
+ inputImage.buffer.length));
+ if (inputFormat) {
+ // Check if the video format has changed, and reinitialize decoder if
+ // needed.
+ if (!CMFormatDescriptionEqual(inputFormat.get(), _videoFormat)) {
+ [self setVideoFormat:inputFormat.get()];
+ int resetDecompressionSessionError = [self resetDecompressionSession];
+ if (resetDecompressionSessionError != WEBRTC_VIDEO_CODEC_OK) {
+ return resetDecompressionSessionError;
+ }
+ }
+ }
+ if (!_videoFormat) {
+ // We received a frame but we don't have format information so we can't
+ // decode it.
+ // This can happen after backgrounding. We need to wait for the next
+ // sps/pps before we can resume so we request a keyframe by returning an
+ // error.
+ RTC_LOG(LS_WARNING) << "Missing video format. Frame with sps/pps required.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ CMSampleBufferRef sampleBuffer = nullptr;
+ if (!webrtc::H264AnnexBBufferToCMSampleBuffer((uint8_t *)inputImage.buffer.bytes,
+ inputImage.buffer.length,
+ _videoFormat,
+ &sampleBuffer,
+ _memoryPool)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ RTC_DCHECK(sampleBuffer);
+ VTDecodeFrameFlags decodeFlags = kVTDecodeFrame_EnableAsynchronousDecompression;
+ std::unique_ptr<RTCFrameDecodeParams> frameDecodeParams;
+ frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
+ OSStatus status = VTDecompressionSessionDecodeFrame(
+ _decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr);
+#if defined(WEBRTC_IOS)
+ // Re-initialize the decoder if we have an invalid session while the app is
+ // active or decoder malfunctions and retry the decode request.
+ if ((status == kVTInvalidSessionErr || status == kVTVideoDecoderMalfunctionErr) &&
+ [self resetDecompressionSession] == WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_INFO) << "Failed to decode frame with code: " << status
+ << " retrying decode after decompression session reset";
+ frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
+ status = VTDecompressionSessionDecodeFrame(
+ _decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr);
+ }
+#endif
+ CFRelease(sampleBuffer);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to decode frame with code: " << status;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (void)setCallback:(RTCVideoDecoderCallback)callback {
+ _callback = callback;
+}
+
+- (void)setError:(OSStatus)error {
+ _error = error;
+}
+
+- (NSInteger)releaseDecoder {
+ // Need to invalidate the session so that callbacks no longer occur and it
+ // is safe to null out the callback.
+ [self destroyDecompressionSession];
+ [self setVideoFormat:nullptr];
+ _callback = nullptr;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+#pragma mark - Private
+
+- (int)resetDecompressionSession {
+ [self destroyDecompressionSession];
+
+ // Need to wait for the first SPS to initialize decoder.
+ if (!_videoFormat) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ // Set keys for OpenGL and IOSurface compatibilty, which makes the encoder
+ // create pixel buffers with GPU backed memory. The intent here is to pass
+ // the pixel buffers directly so we avoid a texture upload later during
+ // rendering. This currently is moot because we are converting back to an
+ // I420 frame after decode, but eventually we will be able to plumb
+ // CVPixelBuffers directly to the renderer.
+ // TODO(tkchin): Maybe only set OpenGL/IOSurface keys if we know that that
+ // we can pass CVPixelBuffers as native handles in decoder output.
+ NSDictionary *attributes = @{
+#if defined(WEBRTC_IOS) && (TARGET_OS_MACCATALYST || TARGET_OS_SIMULATOR)
+ (NSString *)kCVPixelBufferMetalCompatibilityKey : @(YES),
+#elif defined(WEBRTC_IOS)
+ (NSString *)kCVPixelBufferOpenGLESCompatibilityKey : @(YES),
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_ARCH_ARM64)
+ (NSString *)kCVPixelBufferOpenGLCompatibilityKey : @(YES),
+#endif
+#if !(TARGET_OS_SIMULATOR)
+ (NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{},
+#endif
+ (NSString *)
+ kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
+ };
+
+ VTDecompressionOutputCallbackRecord record = {
+ decompressionOutputCallback, (__bridge void *)self,
+ };
+ OSStatus status = VTDecompressionSessionCreate(nullptr,
+ _videoFormat,
+ nullptr,
+ (__bridge CFDictionaryRef)attributes,
+ &record,
+ &_decompressionSession);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create decompression session: " << status;
+ [self destroyDecompressionSession];
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ [self configureDecompressionSession];
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (void)configureDecompressionSession {
+ RTC_DCHECK(_decompressionSession);
+#if defined(WEBRTC_IOS)
+ VTSessionSetProperty(_decompressionSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue);
+#endif
+}
+
+- (void)destroyDecompressionSession {
+ if (_decompressionSession) {
+#if defined(WEBRTC_IOS)
+ if ([UIDevice isIOS11OrLater]) {
+ VTDecompressionSessionWaitForAsynchronousFrames(_decompressionSession);
+ }
+#endif
+ VTDecompressionSessionInvalidate(_decompressionSession);
+ CFRelease(_decompressionSession);
+ _decompressionSession = nullptr;
+ }
+}
+
+- (void)setVideoFormat:(CMVideoFormatDescriptionRef)videoFormat {
+ if (_videoFormat == videoFormat) {
+ return;
+ }
+ if (_videoFormat) {
+ CFRelease(_videoFormat);
+ }
+ _videoFormat = videoFormat;
+ if (_videoFormat) {
+ CFRetain(_videoFormat);
+ }
+}
+
+- (NSString *)implementationName {
+ return @"VideoToolbox";
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h
new file mode 100644
index 0000000000..45fc4be2ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderFactory.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m
new file mode 100644
index 0000000000..9843849307
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoEncoderFactoryH264.h"
+
+#import "RTCH264ProfileLevelId.h"
+#import "RTCVideoEncoderH264.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264)
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
+ NSString *codecName = kRTCVideoCodecH264Name;
+
+ NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+ parameters:constrainedHighParams];
+ [codecs addObject:constrainedHighInfo];
+
+ NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+ parameters:constrainedBaselineParams];
+ [codecs addObject:constrainedBaselineInfo];
+
+ return [codecs copy];
+}
+
+- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h
new file mode 100644
index 0000000000..9f4f4c7c8d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoCodecInfo.h"
+#import "RTCVideoEncoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoder)>
+
+- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
new file mode 100644
index 0000000000..7dbbfaf019
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
@@ -0,0 +1,819 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "RTCVideoEncoderH264.h"
+
+#import <VideoToolbox/VideoToolbox.h>
+#include <vector>
+
+#if defined(WEBRTC_IOS)
+#import "helpers/UIDevice+RTCDevice.h"
+#endif
+#import "RTCCodecSpecificInfoH264.h"
+#import "RTCH264ProfileLevelId.h"
+#import "api/peerconnection/RTCVideoCodecInfo+Private.h"
+#import "base/RTCCodecSpecificInfo.h"
+#import "base/RTCI420Buffer.h"
+#import "base/RTCVideoEncoder.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#import "helpers.h"
+
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "common_video/h264/h264_bitstream_parser.h"
+#include "common_video/include/bitrate_adjuster.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/objc/components/video_codec/nalu_rewriter.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+
+@interface RTC_OBJC_TYPE (RTCVideoEncoderH264)
+()
+
+ - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer
+ : (CMSampleBufferRef)sampleBuffer codecSpecificInfo
+ : (id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo width : (int32_t)width height
+ : (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation
+ : (RTCVideoRotation)rotation;
+
+@end
+
+namespace { // anonymous namespace
+
+// The ratio between kVTCompressionPropertyKey_DataRateLimits and
+// kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher
+// than the average bit rate to avoid undershooting the target.
+const float kLimitToAverageBitRateFactor = 1.5f;
+// These thresholds deviate from the default h264 QP thresholds, as they
+// have been found to work better on devices that support VideoToolbox
+const int kLowH264QpThreshold = 28;
+const int kHighH264QpThreshold = 39;
+
+const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
+
+// Struct that we pass to the encoder per frame to encode. We receive it again
+// in the encoder callback.
+struct RTCFrameEncodeParams {
+ RTCFrameEncodeParams(RTC_OBJC_TYPE(RTCVideoEncoderH264) * e,
+ RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * csi,
+ int32_t w,
+ int32_t h,
+ int64_t rtms,
+ uint32_t ts,
+ RTCVideoRotation r)
+ : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) {
+ if (csi) {
+ codecSpecificInfo = csi;
+ } else {
+ codecSpecificInfo = [[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init];
+ }
+ }
+
+ RTC_OBJC_TYPE(RTCVideoEncoderH264) * encoder;
+ RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * codecSpecificInfo;
+ int32_t width;
+ int32_t height;
+ int64_t render_time_ms;
+ uint32_t timestamp;
+ RTCVideoRotation rotation;
+};
+
+// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
+// encoder. This performs the copy and format conversion.
+// TODO(tkchin): See if encoder will accept i420 frames and compare performance.
+bool CopyVideoFrameToNV12PixelBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frameBuffer,
+ CVPixelBufferRef pixelBuffer) {
+ RTC_DCHECK(pixelBuffer);
+ RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat);
+ RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height);
+ RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixelBuffer, 0), frameBuffer.width);
+
+ CVReturn cvRet = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+ if (cvRet != kCVReturnSuccess) {
+ RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
+ return false;
+ }
+ uint8_t *dstY = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
+ int dstStrideY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
+ uint8_t *dstUV = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
+ int dstStrideUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
+ // Convert I420 to NV12.
+ int ret = libyuv::I420ToNV12(frameBuffer.dataY,
+ frameBuffer.strideY,
+ frameBuffer.dataU,
+ frameBuffer.strideU,
+ frameBuffer.dataV,
+ frameBuffer.strideV,
+ dstY,
+ dstStrideY,
+ dstUV,
+ dstStrideUV,
+ frameBuffer.width,
+ frameBuffer.height);
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+ if (ret) {
+ RTC_LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
+ return false;
+ }
+ return true;
+}
+
+CVPixelBufferRef CreatePixelBuffer(CVPixelBufferPoolRef pixel_buffer_pool) {
+ if (!pixel_buffer_pool) {
+ RTC_LOG(LS_ERROR) << "Failed to get pixel buffer pool.";
+ return nullptr;
+ }
+ CVPixelBufferRef pixel_buffer;
+ CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, &pixel_buffer);
+ if (ret != kCVReturnSuccess) {
+ RTC_LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret;
+ // We probably want to drop frames here, since failure probably means
+ // that the pool is empty.
+ return nullptr;
+ }
+ return pixel_buffer;
+}
+
+// This is the callback function that VideoToolbox calls when encode is
+// complete. From inspection this happens on its own queue.
+void compressionOutputCallback(void *encoder,
+ void *params,
+ OSStatus status,
+ VTEncodeInfoFlags infoFlags,
+ CMSampleBufferRef sampleBuffer) {
+ if (!params) {
+ // If there are pending callbacks when the encoder is destroyed, this can happen.
+ return;
+ }
+ std::unique_ptr<RTCFrameEncodeParams> encodeParams(
+ reinterpret_cast<RTCFrameEncodeParams *>(params));
+ [encodeParams->encoder frameWasEncoded:status
+ flags:infoFlags
+ sampleBuffer:sampleBuffer
+ codecSpecificInfo:encodeParams->codecSpecificInfo
+ width:encodeParams->width
+ height:encodeParams->height
+ renderTimeMs:encodeParams->render_time_ms
+ timestamp:encodeParams->timestamp
+ rotation:encodeParams->rotation];
+}
+
+// Extract VideoToolbox profile out of the webrtc::SdpVideoFormat. If there is
+// no specific VideoToolbox profile for the specified level, AutoLevel will be
+// returned. The user must initialize the encoder with a resolution and
+// framerate conforming to the selected H264 level regardless.
+CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id) {
+ switch (profile_level_id.profile) {
+ case webrtc::H264Profile::kProfileConstrainedBaseline:
+ case webrtc::H264Profile::kProfileBaseline:
+ switch (profile_level_id.level) {
+ case webrtc::H264Level::kLevel3:
+ return kVTProfileLevel_H264_Baseline_3_0;
+ case webrtc::H264Level::kLevel3_1:
+ return kVTProfileLevel_H264_Baseline_3_1;
+ case webrtc::H264Level::kLevel3_2:
+ return kVTProfileLevel_H264_Baseline_3_2;
+ case webrtc::H264Level::kLevel4:
+ return kVTProfileLevel_H264_Baseline_4_0;
+ case webrtc::H264Level::kLevel4_1:
+ return kVTProfileLevel_H264_Baseline_4_1;
+ case webrtc::H264Level::kLevel4_2:
+ return kVTProfileLevel_H264_Baseline_4_2;
+ case webrtc::H264Level::kLevel5:
+ return kVTProfileLevel_H264_Baseline_5_0;
+ case webrtc::H264Level::kLevel5_1:
+ return kVTProfileLevel_H264_Baseline_5_1;
+ case webrtc::H264Level::kLevel5_2:
+ return kVTProfileLevel_H264_Baseline_5_2;
+ case webrtc::H264Level::kLevel1:
+ case webrtc::H264Level::kLevel1_b:
+ case webrtc::H264Level::kLevel1_1:
+ case webrtc::H264Level::kLevel1_2:
+ case webrtc::H264Level::kLevel1_3:
+ case webrtc::H264Level::kLevel2:
+ case webrtc::H264Level::kLevel2_1:
+ case webrtc::H264Level::kLevel2_2:
+ return kVTProfileLevel_H264_Baseline_AutoLevel;
+ }
+
+ case webrtc::H264Profile::kProfileMain:
+ switch (profile_level_id.level) {
+ case webrtc::H264Level::kLevel3:
+ return kVTProfileLevel_H264_Main_3_0;
+ case webrtc::H264Level::kLevel3_1:
+ return kVTProfileLevel_H264_Main_3_1;
+ case webrtc::H264Level::kLevel3_2:
+ return kVTProfileLevel_H264_Main_3_2;
+ case webrtc::H264Level::kLevel4:
+ return kVTProfileLevel_H264_Main_4_0;
+ case webrtc::H264Level::kLevel4_1:
+ return kVTProfileLevel_H264_Main_4_1;
+ case webrtc::H264Level::kLevel4_2:
+ return kVTProfileLevel_H264_Main_4_2;
+ case webrtc::H264Level::kLevel5:
+ return kVTProfileLevel_H264_Main_5_0;
+ case webrtc::H264Level::kLevel5_1:
+ return kVTProfileLevel_H264_Main_5_1;
+ case webrtc::H264Level::kLevel5_2:
+ return kVTProfileLevel_H264_Main_5_2;
+ case webrtc::H264Level::kLevel1:
+ case webrtc::H264Level::kLevel1_b:
+ case webrtc::H264Level::kLevel1_1:
+ case webrtc::H264Level::kLevel1_2:
+ case webrtc::H264Level::kLevel1_3:
+ case webrtc::H264Level::kLevel2:
+ case webrtc::H264Level::kLevel2_1:
+ case webrtc::H264Level::kLevel2_2:
+ return kVTProfileLevel_H264_Main_AutoLevel;
+ }
+
+ case webrtc::H264Profile::kProfileConstrainedHigh:
+ case webrtc::H264Profile::kProfileHigh:
+ case webrtc::H264Profile::kProfilePredictiveHigh444:
+ switch (profile_level_id.level) {
+ case webrtc::H264Level::kLevel3:
+ return kVTProfileLevel_H264_High_3_0;
+ case webrtc::H264Level::kLevel3_1:
+ return kVTProfileLevel_H264_High_3_1;
+ case webrtc::H264Level::kLevel3_2:
+ return kVTProfileLevel_H264_High_3_2;
+ case webrtc::H264Level::kLevel4:
+ return kVTProfileLevel_H264_High_4_0;
+ case webrtc::H264Level::kLevel4_1:
+ return kVTProfileLevel_H264_High_4_1;
+ case webrtc::H264Level::kLevel4_2:
+ return kVTProfileLevel_H264_High_4_2;
+ case webrtc::H264Level::kLevel5:
+ return kVTProfileLevel_H264_High_5_0;
+ case webrtc::H264Level::kLevel5_1:
+ return kVTProfileLevel_H264_High_5_1;
+ case webrtc::H264Level::kLevel5_2:
+ return kVTProfileLevel_H264_High_5_2;
+ case webrtc::H264Level::kLevel1:
+ case webrtc::H264Level::kLevel1_b:
+ case webrtc::H264Level::kLevel1_1:
+ case webrtc::H264Level::kLevel1_2:
+ case webrtc::H264Level::kLevel1_3:
+ case webrtc::H264Level::kLevel2:
+ case webrtc::H264Level::kLevel2_1:
+ case webrtc::H264Level::kLevel2_2:
+ return kVTProfileLevel_H264_High_AutoLevel;
+ }
+ }
+}
+
+// The function returns the max allowed sample rate (pixels per second) that
+// can be processed by given encoder with `profile_level_id`.
+// See https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-H.264-201610-S!!PDF-E&type=items
+// for details.
+NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id) {
+ switch (profile_level_id.level) {
+ case webrtc::H264Level::kLevel3:
+ return 10368000;
+ case webrtc::H264Level::kLevel3_1:
+ return 27648000;
+ case webrtc::H264Level::kLevel3_2:
+ return 55296000;
+ case webrtc::H264Level::kLevel4:
+ case webrtc::H264Level::kLevel4_1:
+ return 62914560;
+ case webrtc::H264Level::kLevel4_2:
+ return 133693440;
+ case webrtc::H264Level::kLevel5:
+ return 150994944;
+ case webrtc::H264Level::kLevel5_1:
+ return 251658240;
+ case webrtc::H264Level::kLevel5_2:
+ return 530841600;
+ case webrtc::H264Level::kLevel1:
+ case webrtc::H264Level::kLevel1_b:
+ case webrtc::H264Level::kLevel1_1:
+ case webrtc::H264Level::kLevel1_2:
+ case webrtc::H264Level::kLevel1_3:
+ case webrtc::H264Level::kLevel2:
+ case webrtc::H264Level::kLevel2_1:
+ case webrtc::H264Level::kLevel2_2:
+ // Zero means auto rate setting.
+ return 0;
+ }
+}
+} // namespace
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo;
+ std::unique_ptr<webrtc::BitrateAdjuster> _bitrateAdjuster;
+ uint32_t _targetBitrateBps;
+ uint32_t _encoderBitrateBps;
+ uint32_t _encoderFrameRate;
+ uint32_t _maxAllowedFrameRate;
+ RTCH264PacketizationMode _packetizationMode;
+ absl::optional<webrtc::H264ProfileLevelId> _profile_level_id;
+ RTCVideoEncoderCallback _callback;
+ int32_t _width;
+ int32_t _height;
+ VTCompressionSessionRef _compressionSession;
+ CVPixelBufferPoolRef _pixelBufferPool;
+ RTCVideoCodecMode _mode;
+
+ webrtc::H264BitstreamParser _h264BitstreamParser;
+ std::vector<uint8_t> _frameScaleBuffer;
+}
+
+// .5 is set as a mininum to prevent overcompensating for large temporary
+// overshoots. We don't want to degrade video quality too badly.
+// .95 is set to prevent oscillations. When a lower bitrate is set on the
+// encoder than previously set, its output seems to have a brief period of
+// drastically reduced bitrate, so we want to avoid that. In steady state
+// conditions, 0.95 seems to give us better overall bitrate over long periods
+// of time.
+- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
+ if (self = [super init]) {
+ _codecInfo = codecInfo;
+ _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
+ _packetizationMode = RTCH264PacketizationModeNonInterleaved;
+ _profile_level_id =
+ webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters);
+ RTC_DCHECK(_profile_level_id);
+ RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile(*_profile_level_id));
+ RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]);
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self destroyCompressionSession];
+}
+
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
+ numberOfCores:(int)numberOfCores {
+ RTC_DCHECK(settings);
+ RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]);
+
+ _width = settings.width;
+ _height = settings.height;
+ _mode = settings.mode;
+
+ uint32_t aligned_width = (((_width + 15) >> 4) << 4);
+ uint32_t aligned_height = (((_height + 15) >> 4) << 4);
+ _maxAllowedFrameRate = static_cast<uint32_t>(GetMaxSampleRate(*_profile_level_id) /
+ (aligned_width * aligned_height));
+
+ // We can only set average bitrate on the HW encoder.
+ _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps.
+ _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
+ _encoderFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate);
+ if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) {
+ RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate
+ << " is larger than the "
+ << "maximal allowed frame rate " << _maxAllowedFrameRate << ".";
+ }
+
+ // TODO(tkchin): Try setting payload size via
+ // kVTCompressionPropertyKey_MaxH264SliceBytes.
+
+ return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat];
+}
+
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
+ frameTypes:(NSArray<NSNumber *> *)frameTypes {
+ if (!_callback || !_compressionSession) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ BOOL isKeyframeRequired = NO;
+
+ // Get a pixel buffer from the pool and copy frame data over.
+ if ([self resetCompressionSessionIfNeededWithFrame:frame]) {
+ isKeyframeRequired = YES;
+ }
+
+ CVPixelBufferRef pixelBuffer = nullptr;
+ if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ // Native frame buffer
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ if (![rtcPixelBuffer requiresCropping]) {
+ // This pixel buffer might have a higher resolution than what the
+ // compression session is configured to. The compression session can
+ // handle that and will output encoded frames in the configured
+ // resolution regardless of the input pixel buffer resolution.
+ pixelBuffer = rtcPixelBuffer.pixelBuffer;
+ CVBufferRetain(pixelBuffer);
+ } else {
+ // Cropping required, we need to crop and scale to a new pixel buffer.
+ pixelBuffer = CreatePixelBuffer(_pixelBufferPool);
+ if (!pixelBuffer) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ int dstWidth = CVPixelBufferGetWidth(pixelBuffer);
+ int dstHeight = CVPixelBufferGetHeight(pixelBuffer);
+ if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) {
+ int size =
+ [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight];
+ _frameScaleBuffer.resize(size);
+ } else {
+ _frameScaleBuffer.clear();
+ }
+ _frameScaleBuffer.shrink_to_fit();
+ if (![rtcPixelBuffer cropAndScaleTo:pixelBuffer withTempBuffer:_frameScaleBuffer.data()]) {
+ CVBufferRelease(pixelBuffer);
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+ }
+
+ if (!pixelBuffer) {
+ // We did not have a native frame buffer
+ RTC_DCHECK_EQ(frame.width, _width);
+ RTC_DCHECK_EQ(frame.height, _height);
+ pixelBuffer = CreatePixelBuffer(_pixelBufferPool);
+ if (!pixelBuffer) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ RTC_DCHECK(pixelBuffer);
+ if (!CopyVideoFrameToNV12PixelBuffer([frame.buffer toI420], pixelBuffer)) {
+ RTC_LOG(LS_ERROR) << "Failed to copy frame data.";
+ CVBufferRelease(pixelBuffer);
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
+ // Check if we need a keyframe.
+ if (!isKeyframeRequired && frameTypes) {
+ for (NSNumber *frameType in frameTypes) {
+ if ((RTCFrameType)frameType.intValue == RTCFrameTypeVideoFrameKey) {
+ isKeyframeRequired = YES;
+ break;
+ }
+ }
+ }
+
+ CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000);
+ CFDictionaryRef frameProperties = nullptr;
+ if (isKeyframeRequired) {
+ CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
+ CFTypeRef values[] = {kCFBooleanTrue};
+ frameProperties = CreateCFTypeDictionary(keys, values, 1);
+ }
+
+ std::unique_ptr<RTCFrameEncodeParams> encodeParams;
+ encodeParams.reset(new RTCFrameEncodeParams(self,
+ codecSpecificInfo,
+ _width,
+ _height,
+ frame.timeStampNs / rtc::kNumNanosecsPerMillisec,
+ frame.timeStamp,
+ frame.rotation));
+ encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode;
+
+ // Update the bitrate if needed.
+ [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:_encoderFrameRate];
+
+ OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession,
+ pixelBuffer,
+ presentationTimeStamp,
+ kCMTimeInvalid,
+ frameProperties,
+ encodeParams.release(),
+ nullptr);
+ if (frameProperties) {
+ CFRelease(frameProperties);
+ }
+ if (pixelBuffer) {
+ CVBufferRelease(pixelBuffer);
+ }
+
+ if (status == kVTInvalidSessionErr) {
+ // This error occurs when entering foreground after backgrounding the app.
+ RTC_LOG(LS_ERROR) << "Invalid compression session, resetting.";
+ [self resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
+
+ return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
+ } else if (status == kVTVideoEncoderMalfunctionErr) {
+ // Sometimes the encoder malfunctions and needs to be restarted.
+ RTC_LOG(LS_ERROR)
+ << "Encountered video encoder malfunction error. Resetting compression session.";
+ [self resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
+
+ return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
+ } else if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to encode frame with code: " << status;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (void)setCallback:(RTCVideoEncoderCallback)callback {
+ _callback = callback;
+}
+
+- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate {
+ _targetBitrateBps = 1000 * bitrateKbit;
+ _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
+ if (framerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) {
+ RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate << " is larger than the "
+ << "maximal allowed frame rate " << _maxAllowedFrameRate << ".";
+ }
+ framerate = MIN(framerate, _maxAllowedFrameRate);
+ [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:framerate];
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (NSInteger)resolutionAlignment {
+ return 1;
+}
+
+- (BOOL)applyAlignmentToAllSimulcastLayers {
+ return NO;
+}
+
+- (BOOL)supportsNativeHandle {
+ return YES;
+}
+
+#pragma mark - Private
+
+- (NSInteger)releaseEncoder {
+ // Need to destroy so that the session is invalidated and won't use the
+ // callback anymore. Do not remove callback until the session is invalidated
+ // since async encoder callbacks can occur until invalidation.
+ [self destroyCompressionSession];
+ _callback = nullptr;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (OSType)pixelFormatOfFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ // Use NV12 for non-native frames.
+ if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ return CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
+ }
+
+ return kNV12PixelFormat;
+}
+
+- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ BOOL resetCompressionSession = NO;
+
+ // If we're capturing native frames in another pixel format than the compression session is
+ // configured with, make sure the compression session is reset using the correct pixel format.
+ OSType framePixelFormat = [self pixelFormatOfFrame:frame];
+
+ if (_compressionSession) {
+ // The pool attribute `kCVPixelBufferPixelFormatTypeKey` can contain either an array of pixel
+ // formats or a single pixel format.
+ NSDictionary *poolAttributes =
+ (__bridge NSDictionary *)CVPixelBufferPoolGetPixelBufferAttributes(_pixelBufferPool);
+ id pixelFormats =
+ [poolAttributes objectForKey:(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey];
+ NSArray<NSNumber *> *compressionSessionPixelFormats = nil;
+ if ([pixelFormats isKindOfClass:[NSArray class]]) {
+ compressionSessionPixelFormats = (NSArray *)pixelFormats;
+ } else if ([pixelFormats isKindOfClass:[NSNumber class]]) {
+ compressionSessionPixelFormats = @[ (NSNumber *)pixelFormats ];
+ }
+
+ if (![compressionSessionPixelFormats
+ containsObject:[NSNumber numberWithLong:framePixelFormat]]) {
+ resetCompressionSession = YES;
+ RTC_LOG(LS_INFO) << "Resetting compression session due to non-matching pixel format.";
+ }
+ } else {
+ resetCompressionSession = YES;
+ }
+
+ if (resetCompressionSession) {
+ [self resetCompressionSessionWithPixelFormat:framePixelFormat];
+ }
+ return resetCompressionSession;
+}
+
+- (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat {
+ [self destroyCompressionSession];
+
+ // Set source image buffer attributes. These attributes will be present on
+ // buffers retrieved from the encoder's pixel buffer pool.
+ NSDictionary *sourceAttributes = @{
+#if defined(WEBRTC_IOS) && (TARGET_OS_MACCATALYST || TARGET_OS_SIMULATOR)
+ (NSString *)kCVPixelBufferMetalCompatibilityKey : @(YES),
+#elif defined(WEBRTC_IOS)
+ (NSString *)kCVPixelBufferOpenGLESCompatibilityKey : @(YES),
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_ARCH_ARM64)
+ (NSString *)kCVPixelBufferOpenGLCompatibilityKey : @(YES),
+#endif
+ (NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{},
+ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(framePixelFormat),
+ };
+
+ NSDictionary *encoder_specs;
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ // Currently hw accl is supported above 360p on mac, below 360p
+ // the compression session will be created with hw accl disabled.
+ encoder_specs = @{
+ (NSString *)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES),
+ };
+
+#endif
+ OSStatus status = VTCompressionSessionCreate(
+ nullptr, // use default allocator
+ _width,
+ _height,
+ kCMVideoCodecType_H264,
+ (__bridge CFDictionaryRef)encoder_specs, // use hardware accelerated encoder if available
+ (__bridge CFDictionaryRef)sourceAttributes,
+ nullptr, // use default compressed data allocator
+ compressionOutputCallback,
+ nullptr,
+ &_compressionSession);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create compression session: " << status;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ CFBooleanRef hwaccl_enabled = nullptr;
+ status = VTSessionCopyProperty(_compressionSession,
+ kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder,
+ nullptr,
+ &hwaccl_enabled);
+ if (status == noErr && (CFBooleanGetValue(hwaccl_enabled))) {
+ RTC_LOG(LS_INFO) << "Compression session created with hw accl enabled";
+ } else {
+ RTC_LOG(LS_INFO) << "Compression session created with hw accl disabled";
+ }
+#endif
+ [self configureCompressionSession];
+
+ // The pixel buffer pool is dependent on the compression session so if the session is reset, the
+ // pool should be reset as well.
+ _pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (void)configureCompressionSession {
+ RTC_DCHECK(_compressionSession);
+ SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true);
+ SetVTSessionProperty(_compressionSession,
+ kVTCompressionPropertyKey_ProfileLevel,
+ ExtractProfile(*_profile_level_id));
+ SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false);
+ [self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate];
+ // TODO(tkchin): Look at entropy mode and colorspace matrices.
+ // TODO(tkchin): Investigate to see if there's any way to make this work.
+ // May need it to interop with Android. Currently this call just fails.
+ // On inspecting encoder output on iOS8, this value is set to 6.
+ // internal::SetVTSessionProperty(compression_session_,
+ // kVTCompressionPropertyKey_MaxFrameDelayCount,
+ // 1);
+
+ // Set a relatively large value for keyframe emission (7200 frames or 4 minutes).
+ SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, 7200);
+ SetVTSessionProperty(
+ _compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, 240);
+}
+
+- (void)destroyCompressionSession {
+ if (_compressionSession) {
+ VTCompressionSessionInvalidate(_compressionSession);
+ CFRelease(_compressionSession);
+ _compressionSession = nullptr;
+ _pixelBufferPool = nullptr;
+ }
+}
+
+- (NSString *)implementationName {
+ return @"VideoToolbox";
+}
+
+- (void)setBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate {
+ if (_encoderBitrateBps != bitrateBps || _encoderFrameRate != frameRate) {
+ [self setEncoderBitrateBps:bitrateBps frameRate:frameRate];
+ }
+}
+
+- (void)setEncoderBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate {
+ if (_compressionSession) {
+ SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps);
+
+ // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection.
+ if (_maxAllowedFrameRate > 0) {
+ SetVTSessionProperty(
+ _compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, frameRate);
+ }
+
+ // TODO(tkchin): Add a helper method to set array value.
+ int64_t dataLimitBytesPerSecondValue =
+ static_cast<int64_t>(bitrateBps * kLimitToAverageBitRateFactor / 8);
+ CFNumberRef bytesPerSecond =
+ CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue);
+ int64_t oneSecondValue = 1;
+ CFNumberRef oneSecond =
+ CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue);
+ const void *nums[2] = {bytesPerSecond, oneSecond};
+ CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks);
+ OSStatus status = VTSessionSetProperty(
+ _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits);
+ if (bytesPerSecond) {
+ CFRelease(bytesPerSecond);
+ }
+ if (oneSecond) {
+ CFRelease(oneSecond);
+ }
+ if (dataRateLimits) {
+ CFRelease(dataRateLimits);
+ }
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status;
+ }
+
+ _encoderBitrateBps = bitrateBps;
+ _encoderFrameRate = frameRate;
+ }
+}
+
+- (void)frameWasEncoded:(OSStatus)status
+ flags:(VTEncodeInfoFlags)infoFlags
+ sampleBuffer:(CMSampleBufferRef)sampleBuffer
+ codecSpecificInfo:(id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
+ width:(int32_t)width
+ height:(int32_t)height
+ renderTimeMs:(int64_t)renderTimeMs
+ timestamp:(uint32_t)timestamp
+ rotation:(RTCVideoRotation)rotation {
+ RTCVideoEncoderCallback callback = _callback;
+ if (!callback) {
+ return;
+ }
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "H264 encode failed with code: " << status;
+ return;
+ }
+ if (infoFlags & kVTEncodeInfo_FrameDropped) {
+ RTC_LOG(LS_INFO) << "H264 encode dropped frame.";
+ return;
+ }
+
+ BOOL isKeyframe = NO;
+ CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
+ if (attachments != nullptr && CFArrayGetCount(attachments)) {
+ CFDictionaryRef attachment =
+ static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0));
+ isKeyframe = !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync);
+ }
+
+ if (isKeyframe) {
+ RTC_LOG(LS_INFO) << "Generated keyframe";
+ }
+
+ __block std::unique_ptr<rtc::Buffer> buffer = std::make_unique<rtc::Buffer>();
+ if (!webrtc::H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) {
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCEncodedImage) *frame = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
+ // This assumes ownership of `buffer` and is responsible for freeing it when done.
+ frame.buffer = [[NSData alloc] initWithBytesNoCopy:buffer->data()
+ length:buffer->size()
+ deallocator:^(void *bytes, NSUInteger size) {
+ buffer.reset();
+ }];
+ frame.encodedWidth = width;
+ frame.encodedHeight = height;
+ frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta;
+ frame.captureTimeMs = renderTimeMs;
+ frame.timeStamp = timestamp;
+ frame.rotation = rotation;
+ frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare :
+ RTCVideoContentTypeUnspecified;
+ frame.flags = webrtc::VideoSendTiming::kInvalid;
+
+ _h264BitstreamParser.ParseBitstream(*buffer);
+ frame.qp = @(_h264BitstreamParser.GetLastSliceQp().value_or(-1));
+
+ BOOL res = callback(frame, codecSpecificInfo);
+ if (!res) {
+ RTC_LOG(LS_ERROR) << "Encode callback failed";
+ return;
+ }
+ _bitrateAdjuster->Update(frame.buffer.length);
+}
+
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings {
+ return [[RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) alloc]
+ initWithThresholdsLow:kLowH264QpThreshold
+ high:kHighH264QpThreshold];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.h b/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.h
new file mode 100644
index 0000000000..a51debb9fa
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.h
@@ -0,0 +1,19 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#include "api/video_codecs/h264_profile_level_id.h"
+
+@interface UIDevice (H264Profile)
+
++ (absl::optional<webrtc::H264ProfileLevelId>)maxSupportedH264Profile;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.mm
new file mode 100644
index 0000000000..0ef6a8d77c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.mm
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "UIDevice+H264Profile.h"
+#import "helpers/UIDevice+RTCDevice.h"
+
+#include <algorithm>
+
+namespace {
+
+using namespace webrtc;
+
+struct SupportedH264Profile {
+ const RTCDeviceType deviceType;
+ const H264ProfileLevelId profile;
+};
+
+constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
+ // iPhones with at least iOS 9
+ {RTCDeviceTypeIPhone13ProMax,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP848
+ {RTCDeviceTypeIPhone13Pro,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP852
+ {RTCDeviceTypeIPhone13,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP851
+ {RTCDeviceTypeIPhone13Mini,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP847
+ {RTCDeviceTypeIPhoneSE2Gen,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP820
+ {RTCDeviceTypeIPhone12ProMax,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP832
+ {RTCDeviceTypeIPhone12Pro,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP831
+ {RTCDeviceTypeIPhone12,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP830
+ {RTCDeviceTypeIPhone12Mini,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP829
+ {RTCDeviceTypeIPhone11ProMax,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP806
+ {RTCDeviceTypeIPhone11Pro,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP805
+ {RTCDeviceTypeIPhone11,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP804
+ {RTCDeviceTypeIPhoneXS,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP779
+ {RTCDeviceTypeIPhoneXSMax,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP780
+ {RTCDeviceTypeIPhoneXR,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP781
+ {RTCDeviceTypeIPhoneX,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP770
+ {RTCDeviceTypeIPhone8,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP767
+ {RTCDeviceTypeIPhone8Plus,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP768
+ {RTCDeviceTypeIPhone7,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_1}}, // https://support.apple.com/kb/SP743
+ {RTCDeviceTypeIPhone7Plus,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_1}}, // https://support.apple.com/kb/SP744
+ {RTCDeviceTypeIPhoneSE,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP738
+ {RTCDeviceTypeIPhone6S,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP726
+ {RTCDeviceTypeIPhone6SPlus,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP727
+ {RTCDeviceTypeIPhone6,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP705
+ {RTCDeviceTypeIPhone6Plus,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP706
+ {RTCDeviceTypeIPhone5SGSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP685
+ {RTCDeviceTypeIPhone5SGSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP685
+ {RTCDeviceTypeIPhone5GSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP655
+ {RTCDeviceTypeIPhone5GSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP655
+ {RTCDeviceTypeIPhone5CGSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP684
+ {RTCDeviceTypeIPhone5CGSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP684
+ {RTCDeviceTypeIPhone4S,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP643
+
+ // iPods with at least iOS 9
+ {RTCDeviceTypeIPodTouch7G,
+ {H264Profile::kProfileMain, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP796
+ {RTCDeviceTypeIPodTouch6G,
+ {H264Profile::kProfileMain, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP720
+ {RTCDeviceTypeIPodTouch5G,
+ {H264Profile::kProfileMain, H264Level::kLevel3_1}}, // https://support.apple.com/kb/SP657
+
+ // iPads with at least iOS 9
+ {RTCDeviceTypeIPadAir4Gen,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP828
+ {RTCDeviceTypeIPad8,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP822
+ {RTCDeviceTypeIPadPro4Gen12Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP815
+ {RTCDeviceTypeIPadPro4Gen11Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP814
+ {RTCDeviceTypeIPadAir3Gen,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP787
+ {RTCDeviceTypeIPadMini5Gen,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP788
+ {RTCDeviceTypeIPadPro3Gen12Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP785
+ {RTCDeviceTypeIPadPro3Gen11Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP784
+ {RTCDeviceTypeIPad7Gen10Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP807
+ {RTCDeviceTypeIPad2Wifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP622
+ {RTCDeviceTypeIPad2GSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP622
+ {RTCDeviceTypeIPad2CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP622
+ {RTCDeviceTypeIPad2Wifi2,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP622
+ {RTCDeviceTypeIPadMiniWifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP661
+ {RTCDeviceTypeIPadMiniGSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP661
+ {RTCDeviceTypeIPadMiniGSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP661
+ {RTCDeviceTypeIPad3Wifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP647
+ {RTCDeviceTypeIPad3GSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP647
+ {RTCDeviceTypeIPad3GSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP647
+ {RTCDeviceTypeIPad4Wifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP662
+ {RTCDeviceTypeIPad4GSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP662
+ {RTCDeviceTypeIPad4GSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP662
+ {RTCDeviceTypeIPad5,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP751
+ {RTCDeviceTypeIPad6,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP774
+ {RTCDeviceTypeIPadAirWifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP692
+ {RTCDeviceTypeIPadAirCellular,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP692
+ {RTCDeviceTypeIPadAirWifiCellular,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP692
+ {RTCDeviceTypeIPadAir2,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP708
+ {RTCDeviceTypeIPadMini2GWifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP693
+ {RTCDeviceTypeIPadMini2GCellular,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP693
+ {RTCDeviceTypeIPadMini2GWifiCellular,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP693
+ {RTCDeviceTypeIPadMini3,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP709
+ {RTCDeviceTypeIPadMini4,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP725
+ {RTCDeviceTypeIPadPro9Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP739
+ {RTCDeviceTypeIPadPro12Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/sp723
+ {RTCDeviceTypeIPadPro12Inch2,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP761
+ {RTCDeviceTypeIPadPro10Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP762
+ {RTCDeviceTypeIPadMini6,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP850
+ {RTCDeviceTypeIPad9,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP849
+ {RTCDeviceTypeIPadPro5Gen12Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP844
+ {RTCDeviceTypeIPadPro5Gen11Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP843
+};
+
+absl::optional<H264ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
+ const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles),
+ std::end(kH264MaxSupportedProfiles),
+ [deviceType](const SupportedH264Profile& supportedProfile) {
+ return supportedProfile.deviceType == deviceType;
+ });
+ if (result != std::end(kH264MaxSupportedProfiles)) {
+ return result->profile;
+ }
+ return absl::nullopt;
+}
+
+} // namespace
+
+@implementation UIDevice (H264Profile)
+
++ (absl::optional<webrtc::H264ProfileLevelId>)maxSupportedH264Profile {
+ return FindMaxSupportedProfileForDevice([self deviceType]);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.cc b/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.cc
new file mode 100644
index 0000000000..ac957f1b49
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.cc
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "helpers.h"
+
+#include <string>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+// Copies characters from a CFStringRef into a std::string.
+std::string CFStringToString(const CFStringRef cf_string) {
+ RTC_DCHECK(cf_string);
+ std::string std_string;
+ // Get the size needed for UTF8 plus terminating character.
+ size_t buffer_size =
+ CFStringGetMaximumSizeForEncoding(CFStringGetLength(cf_string),
+ kCFStringEncodingUTF8) +
+ 1;
+ std::unique_ptr<char[]> buffer(new char[buffer_size]);
+ if (CFStringGetCString(cf_string, buffer.get(), buffer_size,
+ kCFStringEncodingUTF8)) {
+ // Copy over the characters.
+ std_string.assign(buffer.get());
+ }
+ return std_string;
+}
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ int32_t value) {
+ CFNumberRef cfNum =
+ CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &value);
+ OSStatus status = VTSessionSetProperty(session, key, cfNum);
+ CFRelease(cfNum);
+ if (status != noErr) {
+ std::string key_string = CFStringToString(key);
+ RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
+ << " to " << value << ": " << status;
+ }
+}
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ uint32_t value) {
+ int64_t value_64 = value;
+ CFNumberRef cfNum =
+ CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &value_64);
+ OSStatus status = VTSessionSetProperty(session, key, cfNum);
+ CFRelease(cfNum);
+ if (status != noErr) {
+ std::string key_string = CFStringToString(key);
+ RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
+ << " to " << value << ": " << status;
+ }
+}
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value) {
+ CFBooleanRef cf_bool = (value) ? kCFBooleanTrue : kCFBooleanFalse;
+ OSStatus status = VTSessionSetProperty(session, key, cf_bool);
+ if (status != noErr) {
+ std::string key_string = CFStringToString(key);
+ RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
+ << " to " << value << ": " << status;
+ }
+}
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ CFStringRef value) {
+ OSStatus status = VTSessionSetProperty(session, key, value);
+ if (status != noErr) {
+ std::string key_string = CFStringToString(key);
+ std::string val_string = CFStringToString(value);
+ RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
+ << " to " << val_string << ": " << status;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.h b/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.h
new file mode 100644
index 0000000000..7c9ef1cd87
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
+#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <VideoToolbox/VideoToolbox.h>
+#include <string>
+
+// Convenience function for creating a dictionary.
+inline CFDictionaryRef CreateCFTypeDictionary(CFTypeRef* keys,
+ CFTypeRef* values,
+ size_t size) {
+ return CFDictionaryCreate(kCFAllocatorDefault, keys, values, size,
+ &kCFTypeDictionaryKeyCallBacks,
+ &kCFTypeDictionaryValueCallBacks);
+}
+
+// Copies characters from a CFStringRef into a std::string.
+std::string CFStringToString(CFStringRef cf_string);
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session, CFStringRef key, int32_t value);
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ uint32_t value);
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value);
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ CFStringRef value);
+
+#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.cc b/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.cc
new file mode 100644
index 0000000000..b7330e1f9c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.cc
@@ -0,0 +1,327 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "sdk/objc/components/video_codec/nalu_rewriter.h"
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <memory>
+#include <vector>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+using H264::kAud;
+using H264::kSps;
+using H264::NaluIndex;
+using H264::NaluType;
+using H264::ParseNaluType;
+
+const char kAnnexBHeaderBytes[4] = {0, 0, 0, 1};
+const size_t kAvccHeaderByteSize = sizeof(uint32_t);
+
+bool H264CMSampleBufferToAnnexBBuffer(CMSampleBufferRef avcc_sample_buffer,
+ bool is_keyframe,
+ rtc::Buffer* annexb_buffer) {
+ RTC_DCHECK(avcc_sample_buffer);
+
+ // Get format description from the sample buffer.
+ CMVideoFormatDescriptionRef description =
+ CMSampleBufferGetFormatDescription(avcc_sample_buffer);
+ if (description == nullptr) {
+ RTC_LOG(LS_ERROR) << "Failed to get sample buffer's description.";
+ return false;
+ }
+
+ // Get parameter set information.
+ int nalu_header_size = 0;
+ size_t param_set_count = 0;
+ OSStatus status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
+ description, 0, nullptr, nullptr, &param_set_count, &nalu_header_size);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to get parameter set.";
+ return false;
+ }
+ RTC_CHECK_EQ(nalu_header_size, kAvccHeaderByteSize);
+ RTC_DCHECK_EQ(param_set_count, 2);
+
+ // Truncate any previous data in the buffer without changing its capacity.
+ annexb_buffer->SetSize(0);
+
+ // Place all parameter sets at the front of buffer.
+ if (is_keyframe) {
+ size_t param_set_size = 0;
+ const uint8_t* param_set = nullptr;
+ for (size_t i = 0; i < param_set_count; ++i) {
+ status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
+ description, i, &param_set, &param_set_size, nullptr, nullptr);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to get parameter set.";
+ return false;
+ }
+ // Update buffer.
+ annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes));
+ annexb_buffer->AppendData(reinterpret_cast<const char*>(param_set),
+ param_set_size);
+ }
+ }
+
+ // Get block buffer from the sample buffer.
+ CMBlockBufferRef block_buffer =
+ CMSampleBufferGetDataBuffer(avcc_sample_buffer);
+ if (block_buffer == nullptr) {
+ RTC_LOG(LS_ERROR) << "Failed to get sample buffer's block buffer.";
+ return false;
+ }
+ CMBlockBufferRef contiguous_buffer = nullptr;
+ // Make sure block buffer is contiguous.
+ if (!CMBlockBufferIsRangeContiguous(block_buffer, 0, 0)) {
+ status = CMBlockBufferCreateContiguous(
+ nullptr, block_buffer, nullptr, nullptr, 0, 0, 0, &contiguous_buffer);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
+ << status;
+ return false;
+ }
+ } else {
+ contiguous_buffer = block_buffer;
+ // Retain to make cleanup easier.
+ CFRetain(contiguous_buffer);
+ block_buffer = nullptr;
+ }
+
+ // Now copy the actual data.
+ char* data_ptr = nullptr;
+ size_t block_buffer_size = CMBlockBufferGetDataLength(contiguous_buffer);
+ status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr, nullptr,
+ &data_ptr);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to get block buffer data.";
+ CFRelease(contiguous_buffer);
+ return false;
+ }
+ size_t bytes_remaining = block_buffer_size;
+ while (bytes_remaining > 0) {
+ // The size type here must match `nalu_header_size`, we expect 4 bytes.
+ // Read the length of the next packet of data. Must convert from big endian
+ // to host endian.
+ RTC_DCHECK_GE(bytes_remaining, (size_t)nalu_header_size);
+ uint32_t* uint32_data_ptr = reinterpret_cast<uint32_t*>(data_ptr);
+ uint32_t packet_size = CFSwapInt32BigToHost(*uint32_data_ptr);
+ // Update buffer.
+ annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes));
+ annexb_buffer->AppendData(data_ptr + nalu_header_size, packet_size);
+
+ size_t bytes_written = packet_size + sizeof(kAnnexBHeaderBytes);
+ bytes_remaining -= bytes_written;
+ data_ptr += bytes_written;
+ }
+ RTC_DCHECK_EQ(bytes_remaining, (size_t)0);
+
+ CFRelease(contiguous_buffer);
+ return true;
+}
+
+bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size,
+ CMVideoFormatDescriptionRef video_format,
+ CMSampleBufferRef* out_sample_buffer,
+ CMMemoryPoolRef memory_pool) {
+ RTC_DCHECK(annexb_buffer);
+ RTC_DCHECK(out_sample_buffer);
+ RTC_DCHECK(video_format);
+ *out_sample_buffer = nullptr;
+
+ AnnexBBufferReader reader(annexb_buffer, annexb_buffer_size);
+ if (reader.SeekToNextNaluOfType(kSps)) {
+ // Buffer contains an SPS NALU - skip it and the following PPS
+ const uint8_t* data;
+ size_t data_len;
+ if (!reader.ReadNalu(&data, &data_len)) {
+ RTC_LOG(LS_ERROR) << "Failed to read SPS";
+ return false;
+ }
+ if (!reader.ReadNalu(&data, &data_len)) {
+ RTC_LOG(LS_ERROR) << "Failed to read PPS";
+ return false;
+ }
+ } else {
+ // No SPS NALU - start reading from the first NALU in the buffer
+ reader.SeekToStart();
+ }
+
+ // Allocate memory as a block buffer.
+ CMBlockBufferRef block_buffer = nullptr;
+ CFAllocatorRef block_allocator = CMMemoryPoolGetAllocator(memory_pool);
+ OSStatus status = CMBlockBufferCreateWithMemoryBlock(
+ kCFAllocatorDefault, nullptr, reader.BytesRemaining(), block_allocator,
+ nullptr, 0, reader.BytesRemaining(), kCMBlockBufferAssureMemoryNowFlag,
+ &block_buffer);
+ if (status != kCMBlockBufferNoErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create block buffer.";
+ return false;
+ }
+
+ // Make sure block buffer is contiguous.
+ CMBlockBufferRef contiguous_buffer = nullptr;
+ if (!CMBlockBufferIsRangeContiguous(block_buffer, 0, 0)) {
+ status = CMBlockBufferCreateContiguous(kCFAllocatorDefault, block_buffer,
+ block_allocator, nullptr, 0, 0, 0,
+ &contiguous_buffer);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
+ << status;
+ CFRelease(block_buffer);
+ return false;
+ }
+ } else {
+ contiguous_buffer = block_buffer;
+ block_buffer = nullptr;
+ }
+
+ // Get a raw pointer into allocated memory.
+ size_t block_buffer_size = 0;
+ char* data_ptr = nullptr;
+ status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr,
+ &block_buffer_size, &data_ptr);
+ if (status != kCMBlockBufferNoErr) {
+ RTC_LOG(LS_ERROR) << "Failed to get block buffer data pointer.";
+ CFRelease(contiguous_buffer);
+ return false;
+ }
+ RTC_DCHECK(block_buffer_size == reader.BytesRemaining());
+
+ // Write Avcc NALUs into block buffer memory.
+ AvccBufferWriter writer(reinterpret_cast<uint8_t*>(data_ptr),
+ block_buffer_size);
+ while (reader.BytesRemaining() > 0) {
+ const uint8_t* nalu_data_ptr = nullptr;
+ size_t nalu_data_size = 0;
+ if (reader.ReadNalu(&nalu_data_ptr, &nalu_data_size)) {
+ writer.WriteNalu(nalu_data_ptr, nalu_data_size);
+ }
+ }
+
+ // Create sample buffer.
+ status = CMSampleBufferCreate(kCFAllocatorDefault, contiguous_buffer, true,
+ nullptr, nullptr, video_format, 1, 0, nullptr,
+ 0, nullptr, out_sample_buffer);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create sample buffer.";
+ CFRelease(contiguous_buffer);
+ return false;
+ }
+ CFRelease(contiguous_buffer);
+ return true;
+}
+
+CMVideoFormatDescriptionRef CreateVideoFormatDescription(
+ const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size) {
+ const uint8_t* param_set_ptrs[2] = {};
+ size_t param_set_sizes[2] = {};
+ AnnexBBufferReader reader(annexb_buffer, annexb_buffer_size);
+ // Skip everyting before the SPS, then read the SPS and PPS
+ if (!reader.SeekToNextNaluOfType(kSps)) {
+ return nullptr;
+ }
+ if (!reader.ReadNalu(&param_set_ptrs[0], &param_set_sizes[0])) {
+ RTC_LOG(LS_ERROR) << "Failed to read SPS";
+ return nullptr;
+ }
+ if (!reader.ReadNalu(&param_set_ptrs[1], &param_set_sizes[1])) {
+ RTC_LOG(LS_ERROR) << "Failed to read PPS";
+ return nullptr;
+ }
+
+ // Parse the SPS and PPS into a CMVideoFormatDescription.
+ CMVideoFormatDescriptionRef description = nullptr;
+ OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
+ kCFAllocatorDefault, 2, param_set_ptrs, param_set_sizes, 4, &description);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create video format description.";
+ return nullptr;
+ }
+ return description;
+}
+
+AnnexBBufferReader::AnnexBBufferReader(const uint8_t* annexb_buffer,
+ size_t length)
+ : start_(annexb_buffer), length_(length) {
+ RTC_DCHECK(annexb_buffer);
+ offsets_ = H264::FindNaluIndices(annexb_buffer, length);
+ offset_ = offsets_.begin();
+}
+
+AnnexBBufferReader::~AnnexBBufferReader() = default;
+
+bool AnnexBBufferReader::ReadNalu(const uint8_t** out_nalu,
+ size_t* out_length) {
+ RTC_DCHECK(out_nalu);
+ RTC_DCHECK(out_length);
+ *out_nalu = nullptr;
+ *out_length = 0;
+
+ if (offset_ == offsets_.end()) {
+ return false;
+ }
+ *out_nalu = start_ + offset_->payload_start_offset;
+ *out_length = offset_->payload_size;
+ ++offset_;
+ return true;
+}
+
+size_t AnnexBBufferReader::BytesRemaining() const {
+ if (offset_ == offsets_.end()) {
+ return 0;
+ }
+ return length_ - offset_->start_offset;
+}
+
+void AnnexBBufferReader::SeekToStart() {
+ offset_ = offsets_.begin();
+}
+
+bool AnnexBBufferReader::SeekToNextNaluOfType(NaluType type) {
+ for (; offset_ != offsets_.end(); ++offset_) {
+ if (offset_->payload_size < 1)
+ continue;
+ if (ParseNaluType(*(start_ + offset_->payload_start_offset)) == type)
+ return true;
+ }
+ return false;
+}
+AvccBufferWriter::AvccBufferWriter(uint8_t* const avcc_buffer, size_t length)
+ : start_(avcc_buffer), offset_(0), length_(length) {
+ RTC_DCHECK(avcc_buffer);
+}
+
+bool AvccBufferWriter::WriteNalu(const uint8_t* data, size_t data_size) {
+ // Check if we can write this length of data.
+ if (data_size + kAvccHeaderByteSize > BytesRemaining()) {
+ return false;
+ }
+ // Write length header, which needs to be big endian.
+ uint32_t big_endian_length = CFSwapInt32HostToBig(data_size);
+ memcpy(start_ + offset_, &big_endian_length, sizeof(big_endian_length));
+ offset_ += sizeof(big_endian_length);
+ // Write data.
+ memcpy(start_ + offset_, data, data_size);
+ offset_ += data_size;
+ return true;
+}
+
+size_t AvccBufferWriter::BytesRemaining() const {
+ return length_ - offset_;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.h b/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.h
new file mode 100644
index 0000000000..c6474971e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
+#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
+
+#include "modules/video_coding/codecs/h264/include/h264.h"
+
+#include <CoreMedia/CoreMedia.h>
+#include <vector>
+
+#include "common_video/h264/h264_common.h"
+#include "rtc_base/buffer.h"
+
+using webrtc::H264::NaluIndex;
+
+namespace webrtc {
+
+// Converts a sample buffer emitted from the VideoToolbox encoder into a buffer
+// suitable for RTP. The sample buffer is in avcc format whereas the rtp buffer
+// needs to be in Annex B format. Data is written directly to `annexb_buffer`.
+bool H264CMSampleBufferToAnnexBBuffer(CMSampleBufferRef avcc_sample_buffer,
+ bool is_keyframe,
+ rtc::Buffer* annexb_buffer);
+
+// Converts a buffer received from RTP into a sample buffer suitable for the
+// VideoToolbox decoder. The RTP buffer is in annex b format whereas the sample
+// buffer is in avcc format.
+// If `is_keyframe` is true then `video_format` is ignored since the format will
+// be read from the buffer. Otherwise `video_format` must be provided.
+// Caller is responsible for releasing the created sample buffer.
+bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size,
+ CMVideoFormatDescriptionRef video_format,
+ CMSampleBufferRef* out_sample_buffer,
+ CMMemoryPoolRef memory_pool);
+
+// Returns a video format description created from the sps/pps information in
+// the Annex B buffer. If there is no such information, nullptr is returned.
+// The caller is responsible for releasing the description.
+CMVideoFormatDescriptionRef CreateVideoFormatDescription(
+ const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size);
+
+// Helper class for reading NALUs from an RTP Annex B buffer.
+class AnnexBBufferReader final {
+ public:
+ AnnexBBufferReader(const uint8_t* annexb_buffer, size_t length);
+ ~AnnexBBufferReader();
+ AnnexBBufferReader(const AnnexBBufferReader& other) = delete;
+ void operator=(const AnnexBBufferReader& other) = delete;
+
+ // Returns a pointer to the beginning of the next NALU slice without the
+ // header bytes and its length. Returns false if no more slices remain.
+ bool ReadNalu(const uint8_t** out_nalu, size_t* out_length);
+
+ // Returns the number of unread NALU bytes, including the size of the header.
+ // If the buffer has no remaining NALUs this will return zero.
+ size_t BytesRemaining() const;
+
+ // Reset the reader to start reading from the first NALU
+ void SeekToStart();
+
+ // Seek to the next position that holds a NALU of the desired type,
+ // or the end if no such NALU is found.
+ // Return true if a NALU of the desired type is found, false if we
+ // reached the end instead
+ bool SeekToNextNaluOfType(H264::NaluType type);
+
+ private:
+ // Returns the the next offset that contains NALU data.
+ size_t FindNextNaluHeader(const uint8_t* start,
+ size_t length,
+ size_t offset) const;
+
+ const uint8_t* const start_;
+ std::vector<NaluIndex> offsets_;
+ std::vector<NaluIndex>::iterator offset_;
+ const size_t length_;
+};
+
+// Helper class for writing NALUs using avcc format into a buffer.
+class AvccBufferWriter final {
+ public:
+ AvccBufferWriter(uint8_t* const avcc_buffer, size_t length);
+ ~AvccBufferWriter() {}
+ AvccBufferWriter(const AvccBufferWriter& other) = delete;
+ void operator=(const AvccBufferWriter& other) = delete;
+
+ // Writes the data slice into the buffer. Returns false if there isn't
+ // enough space left.
+ bool WriteNalu(const uint8_t* data, size_t data_size);
+
+ // Returns the unused bytes in the buffer.
+ size_t BytesRemaining() const;
+
+ private:
+ uint8_t* const start_;
+ size_t offset_;
+ const size_t length_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
diff --git a/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h b/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h
new file mode 100644
index 0000000000..664d9bb904
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoFrameBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** RTCVideoFrameBuffer containing a CVPixelBufferRef */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCVPixelBuffer) : NSObject <RTC_OBJC_TYPE(RTCVideoFrameBuffer)>
+
+@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer;
+@property(nonatomic, readonly) int cropX;
+@property(nonatomic, readonly) int cropY;
+@property(nonatomic, readonly) int cropWidth;
+@property(nonatomic, readonly) int cropHeight;
+
++ (NSSet<NSNumber *> *)supportedPixelFormats;
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer;
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ adaptedWidth:(int)adaptedWidth
+ adaptedHeight:(int)adaptedHeight
+ cropWidth:(int)cropWidth
+ cropHeight:(int)cropHeight
+ cropX:(int)cropX
+ cropY:(int)cropY;
+
+- (BOOL)requiresCropping;
+- (BOOL)requiresScalingToWidth:(int)width height:(int)height;
+- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height;
+
+/** The minimum size of the `tmpBuffer` must be the number of bytes returned from the
+ * bufferSizeForCroppingAndScalingToWidth:height: method.
+ * If that size is 0, the `tmpBuffer` may be nil.
+ */
+- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
+ withTempBuffer:(nullable uint8_t *)tmpBuffer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm b/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm
new file mode 100644
index 0000000000..20b97d02b7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm
@@ -0,0 +1,352 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCVPixelBuffer.h"
+
+#import "api/video_frame_buffer/RTCNativeMutableI420Buffer.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "libyuv/include/libyuv.h"
+
+#if !defined(NDEBUG) && defined(WEBRTC_IOS)
+#import <UIKit/UIKit.h>
+#import <VideoToolbox/VideoToolbox.h>
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCCVPixelBuffer) {
+ int _width;
+ int _height;
+ int _bufferWidth;
+ int _bufferHeight;
+ int _cropWidth;
+ int _cropHeight;
+}
+
+@synthesize pixelBuffer = _pixelBuffer;
+@synthesize cropX = _cropX;
+@synthesize cropY = _cropY;
+@synthesize cropWidth = _cropWidth;
+@synthesize cropHeight = _cropHeight;
+
++ (NSSet<NSNumber*>*)supportedPixelFormats {
+ return [NSSet setWithObjects:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
+ @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
+ @(kCVPixelFormatType_32BGRA),
+ @(kCVPixelFormatType_32ARGB),
+ nil];
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
+ return [self initWithPixelBuffer:pixelBuffer
+ adaptedWidth:CVPixelBufferGetWidth(pixelBuffer)
+ adaptedHeight:CVPixelBufferGetHeight(pixelBuffer)
+ cropWidth:CVPixelBufferGetWidth(pixelBuffer)
+ cropHeight:CVPixelBufferGetHeight(pixelBuffer)
+ cropX:0
+ cropY:0];
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ adaptedWidth:(int)adaptedWidth
+ adaptedHeight:(int)adaptedHeight
+ cropWidth:(int)cropWidth
+ cropHeight:(int)cropHeight
+ cropX:(int)cropX
+ cropY:(int)cropY {
+ if (self = [super init]) {
+ _width = adaptedWidth;
+ _height = adaptedHeight;
+ _pixelBuffer = pixelBuffer;
+ _bufferWidth = CVPixelBufferGetWidth(_pixelBuffer);
+ _bufferHeight = CVPixelBufferGetHeight(_pixelBuffer);
+ _cropWidth = cropWidth;
+ _cropHeight = cropHeight;
+ // Can only crop at even pixels.
+ _cropX = cropX & ~1;
+ _cropY = cropY & ~1;
+ CVBufferRetain(_pixelBuffer);
+ }
+
+ return self;
+}
+
+- (void)dealloc {
+ CVBufferRelease(_pixelBuffer);
+}
+
+- (int)width {
+ return _width;
+}
+
+- (int)height {
+ return _height;
+}
+
+- (BOOL)requiresCropping {
+ return _cropWidth != _bufferWidth || _cropHeight != _bufferHeight;
+}
+
+- (BOOL)requiresScalingToWidth:(int)width height:(int)height {
+ return _cropWidth != width || _cropHeight != height;
+}
+
+- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height {
+ const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
+ switch (srcPixelFormat) {
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ int srcChromaWidth = (_cropWidth + 1) / 2;
+ int srcChromaHeight = (_cropHeight + 1) / 2;
+ int dstChromaWidth = (width + 1) / 2;
+ int dstChromaHeight = (height + 1) / 2;
+
+ return srcChromaWidth * srcChromaHeight * 2 + dstChromaWidth * dstChromaHeight * 2;
+ }
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_32ARGB: {
+ return 0; // Scaling RGBA frames does not require a temporary buffer.
+ }
+ }
+ RTC_DCHECK_NOTREACHED() << "Unsupported pixel format.";
+ return 0;
+}
+
+- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
+ withTempBuffer:(nullable uint8_t*)tmpBuffer {
+ const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
+ const OSType dstPixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
+
+ switch (srcPixelFormat) {
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ size_t dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
+ size_t dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
+ if (dstWidth > 0 && dstHeight > 0) {
+ RTC_DCHECK(dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
+ dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
+ if ([self requiresScalingToWidth:dstWidth height:dstHeight]) {
+ RTC_DCHECK(tmpBuffer);
+ }
+ [self cropAndScaleNV12To:outputPixelBuffer withTempBuffer:tmpBuffer];
+ }
+ break;
+ }
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_32ARGB: {
+ RTC_DCHECK(srcPixelFormat == dstPixelFormat);
+ [self cropAndScaleARGBTo:outputPixelBuffer];
+ break;
+ }
+ default: {
+ RTC_DCHECK_NOTREACHED() << "Unsupported pixel format.";
+ }
+ }
+
+ return YES;
+}
+
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420 {
+ const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
+
+ CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+ RTC_OBJC_TYPE(RTCMutableI420Buffer)* i420Buffer =
+ [[RTC_OBJC_TYPE(RTCMutableI420Buffer) alloc] initWithWidth:[self width] height:[self height]];
+
+ switch (pixelFormat) {
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ const uint8_t* srcY =
+ static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
+ const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
+ const uint8_t* srcUV =
+ static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
+ const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
+
+ // Crop just by modifying pointers.
+ srcY += srcYStride * _cropY + _cropX;
+ srcUV += srcUVStride * (_cropY / 2) + _cropX;
+
+ // TODO(magjed): Use a frame buffer pool.
+ webrtc::NV12ToI420Scaler nv12ToI420Scaler;
+ nv12ToI420Scaler.NV12ToI420Scale(srcY,
+ srcYStride,
+ srcUV,
+ srcUVStride,
+ _cropWidth,
+ _cropHeight,
+ i420Buffer.mutableDataY,
+ i420Buffer.strideY,
+ i420Buffer.mutableDataU,
+ i420Buffer.strideU,
+ i420Buffer.mutableDataV,
+ i420Buffer.strideV,
+ i420Buffer.width,
+ i420Buffer.height);
+ break;
+ }
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_32ARGB: {
+ CVPixelBufferRef scaledPixelBuffer = NULL;
+ CVPixelBufferRef sourcePixelBuffer = NULL;
+ if ([self requiresCropping] ||
+ [self requiresScalingToWidth:i420Buffer.width height:i420Buffer.height]) {
+ CVPixelBufferCreate(
+ NULL, i420Buffer.width, i420Buffer.height, pixelFormat, NULL, &scaledPixelBuffer);
+ [self cropAndScaleTo:scaledPixelBuffer withTempBuffer:NULL];
+
+ CVPixelBufferLockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
+ sourcePixelBuffer = scaledPixelBuffer;
+ } else {
+ sourcePixelBuffer = _pixelBuffer;
+ }
+ const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(sourcePixelBuffer));
+ const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(sourcePixelBuffer);
+
+ if (pixelFormat == kCVPixelFormatType_32BGRA) {
+ // Corresponds to libyuv::FOURCC_ARGB
+ libyuv::ARGBToI420(src,
+ bytesPerRow,
+ i420Buffer.mutableDataY,
+ i420Buffer.strideY,
+ i420Buffer.mutableDataU,
+ i420Buffer.strideU,
+ i420Buffer.mutableDataV,
+ i420Buffer.strideV,
+ i420Buffer.width,
+ i420Buffer.height);
+ } else if (pixelFormat == kCVPixelFormatType_32ARGB) {
+ // Corresponds to libyuv::FOURCC_BGRA
+ libyuv::BGRAToI420(src,
+ bytesPerRow,
+ i420Buffer.mutableDataY,
+ i420Buffer.strideY,
+ i420Buffer.mutableDataU,
+ i420Buffer.strideU,
+ i420Buffer.mutableDataV,
+ i420Buffer.strideV,
+ i420Buffer.width,
+ i420Buffer.height);
+ }
+
+ if (scaledPixelBuffer) {
+ CVPixelBufferUnlockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
+ CVBufferRelease(scaledPixelBuffer);
+ }
+ break;
+ }
+ default: {
+ RTC_DCHECK_NOTREACHED() << "Unsupported pixel format.";
+ }
+ }
+
+ CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+ return i420Buffer;
+}
+
+#pragma mark - Debugging
+
+#if !defined(NDEBUG) && defined(WEBRTC_IOS)
+- (id)debugQuickLookObject {
+ CGImageRef cgImage;
+ VTCreateCGImageFromCVPixelBuffer(_pixelBuffer, NULL, &cgImage);
+ UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
+ CGImageRelease(cgImage);
+ return image;
+}
+#endif
+
+#pragma mark - Private
+
+- (void)cropAndScaleNV12To:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t*)tmpBuffer {
+ // Prepare output pointers.
+ CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
+ if (cvRet != kCVReturnSuccess) {
+ RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
+ }
+ const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
+ const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
+ uint8_t* dstY =
+ reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
+ const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
+ uint8_t* dstUV =
+ reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1));
+ const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
+
+ // Prepare source pointers.
+ CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ const uint8_t* srcY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
+ const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
+ const uint8_t* srcUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
+ const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
+
+ // Crop just by modifying pointers.
+ srcY += srcYStride * _cropY + _cropX;
+ srcUV += srcUVStride * (_cropY / 2) + _cropX;
+
+ webrtc::NV12Scale(tmpBuffer,
+ srcY,
+ srcYStride,
+ srcUV,
+ srcUVStride,
+ _cropWidth,
+ _cropHeight,
+ dstY,
+ dstYStride,
+ dstUV,
+ dstUVStride,
+ dstWidth,
+ dstHeight);
+
+ CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
+}
+
+- (void)cropAndScaleARGBTo:(CVPixelBufferRef)outputPixelBuffer {
+ // Prepare output pointers.
+ CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
+ if (cvRet != kCVReturnSuccess) {
+ RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
+ }
+ const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
+ const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
+
+ uint8_t* dst = reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddress(outputPixelBuffer));
+ const int dstStride = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
+
+ // Prepare source pointers.
+ CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(_pixelBuffer));
+ const int srcStride = CVPixelBufferGetBytesPerRow(_pixelBuffer);
+
+ // Crop just by modifying pointers. Need to ensure that src pointer points to a byte corresponding
+ // to the start of a new pixel (byte with B for BGRA) so that libyuv scales correctly.
+ const int bytesPerPixel = 4;
+ src += srcStride * _cropY + (_cropX * bytesPerPixel);
+
+ // kCVPixelFormatType_32BGRA corresponds to libyuv::FOURCC_ARGB
+ libyuv::ARGBScale(src,
+ srcStride,
+ _cropWidth,
+ _cropHeight,
+ dst,
+ dstStride,
+ dstWidth,
+ dstHeight,
+ libyuv::kFilterBox);
+
+ CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.h b/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.h
new file mode 100644
index 0000000000..32ab6877f0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <CoreMedia/CoreMedia.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface AVCaptureSession (DevicePosition)
+
+// Check the image's EXIF for the camera the image came from.
++ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm b/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm
new file mode 100644
index 0000000000..0814ecc6c5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "AVCaptureSession+DevicePosition.h"
+
+BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) {
+ return CFStringFindWithOptions(theString,
+ stringToFind,
+ CFRangeMake(0, CFStringGetLength(theString)),
+ kCFCompareCaseInsensitive,
+ nil);
+}
+
+@implementation AVCaptureSession (DevicePosition)
+
++ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer {
+ // Check the image's EXIF for the camera the image came from.
+ AVCaptureDevicePosition cameraPosition = AVCaptureDevicePositionUnspecified;
+ CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(
+ kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
+ if (attachments) {
+ int size = CFDictionaryGetCount(attachments);
+ if (size > 0) {
+ CFDictionaryRef cfExifDictVal = nil;
+ if (CFDictionaryGetValueIfPresent(
+ attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfExifDictVal)) {
+ CFStringRef cfLensModelStrVal;
+ if (CFDictionaryGetValueIfPresent(cfExifDictVal,
+ (const void *)CFSTR("LensModel"),
+ (const void **)&cfLensModelStrVal)) {
+ if (CFStringContainsString(cfLensModelStrVal, CFSTR("front"))) {
+ cameraPosition = AVCaptureDevicePositionFront;
+ } else if (CFStringContainsString(cfLensModelStrVal, CFSTR("back"))) {
+ cameraPosition = AVCaptureDevicePositionBack;
+ }
+ }
+ }
+ }
+ CFRelease(attachments);
+ }
+ return cameraPosition;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.h b/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.h
new file mode 100644
index 0000000000..b0324e8a19
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface NSString (StdString)
+
+@property(nonatomic, readonly) std::string stdString;
+
++ (std::string)stdStringForString:(NSString *)nsString;
++ (NSString *)stringForStdString:(const std::string &)stdString;
+
+@end
+
+@interface NSString (AbslStringView)
+
++ (NSString *)stringForAbslStringView:(const absl::string_view)abslStringView;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm b/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm
new file mode 100644
index 0000000000..c98432c445
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "NSString+StdString.h"
+
+#include "absl/strings/string_view.h"
+
+@implementation NSString (StdString)
+
+- (std::string)stdString {
+ return [NSString stdStringForString:self];
+}
+
++ (std::string)stdStringForString:(NSString *)nsString {
+ NSData *charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
+ return std::string(reinterpret_cast<const char *>(charData.bytes),
+ charData.length);
+}
+
++ (NSString *)stringForStdString:(const std::string&)stdString {
+ // std::string may contain null termination character so we construct
+ // using length.
+ return [[NSString alloc] initWithBytes:stdString.data()
+ length:stdString.length()
+ encoding:NSUTF8StringEncoding];
+}
+
+@end
+
+@implementation NSString (AbslStringView)
+
++ (NSString *)stringForAbslStringView:(const absl::string_view)abslStringView {
+ return [[NSString alloc] initWithBytes:abslStringView.data()
+ length:abslStringView.length()
+ encoding:NSUTF8StringEncoding];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.h b/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.h
new file mode 100644
index 0000000000..db9b15a45c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCMacros.h"
+
+@class AVCaptureSession;
+
+/** RTCCameraPreviewView is a view that renders local video from an
+ * AVCaptureSession.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCameraPreviewView) : UIView
+
+/** The capture session being rendered in the view. Capture session
+ * is assigned to AVCaptureVideoPreviewLayer async in the same
+ * queue that the AVCaptureSession is started/stopped.
+ */
+@property(nonatomic, strong) AVCaptureSession* captureSession;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.m b/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.m
new file mode 100644
index 0000000000..12e87d8d64
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.m
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCameraPreviewView.h"
+
+#import <AVFoundation/AVFoundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCDispatcher+Private.h"
+
+@implementation RTC_OBJC_TYPE (RTCCameraPreviewView)
+
+@synthesize captureSession = _captureSession;
+
++ (Class)layerClass {
+ return [AVCaptureVideoPreviewLayer class];
+}
+
+- (instancetype)initWithFrame:(CGRect)aRect {
+ self = [super initWithFrame:aRect];
+ if (self) {
+ [self addOrientationObserver];
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder*)aDecoder {
+ self = [super initWithCoder:aDecoder];
+ if (self) {
+ [self addOrientationObserver];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self removeOrientationObserver];
+}
+
+- (void)setCaptureSession:(AVCaptureSession *)captureSession {
+ if (_captureSession == captureSession) {
+ return;
+ }
+ _captureSession = captureSession;
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeMain
+ block:^{
+ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ previewLayer.session = captureSession;
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeMain
+ block:^{
+ [self setCorrectVideoOrientation];
+ }];
+ }];
+ }];
+}
+
+- (void)layoutSubviews {
+ [super layoutSubviews];
+
+ // Update the video orientation based on the device orientation.
+ [self setCorrectVideoOrientation];
+}
+
+-(void)orientationChanged:(NSNotification *)notification {
+ [self setCorrectVideoOrientation];
+}
+
+- (void)setCorrectVideoOrientation {
+ // Get current device orientation.
+ UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
+ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
+
+ // First check if we are allowed to set the video orientation.
+ if (previewLayer.connection.isVideoOrientationSupported) {
+ // Set the video orientation based on device orientation.
+ if (deviceOrientation == UIInterfaceOrientationPortraitUpsideDown) {
+ previewLayer.connection.videoOrientation =
+ AVCaptureVideoOrientationPortraitUpsideDown;
+ } else if (deviceOrientation == UIInterfaceOrientationLandscapeRight) {
+ previewLayer.connection.videoOrientation =
+ AVCaptureVideoOrientationLandscapeRight;
+ } else if (deviceOrientation == UIInterfaceOrientationLandscapeLeft) {
+ previewLayer.connection.videoOrientation =
+ AVCaptureVideoOrientationLandscapeLeft;
+ } else if (deviceOrientation == UIInterfaceOrientationPortrait) {
+ previewLayer.connection.videoOrientation =
+ AVCaptureVideoOrientationPortrait;
+ }
+ // If device orientation switches to FaceUp or FaceDown, don't change video orientation.
+ }
+}
+
+#pragma mark - Private
+
+- (void)addOrientationObserver {
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(orientationChanged:)
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+}
+
+- (void)removeOrientationObserver {
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+}
+
+- (AVCaptureVideoPreviewLayer *)previewLayer {
+ return (AVCaptureVideoPreviewLayer *)self.layer;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher+Private.h b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher+Private.h
new file mode 100644
index 0000000000..195c651790
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher+Private.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDispatcher.h"
+
+@interface RTC_OBJC_TYPE (RTCDispatcher)
+()
+
+ + (dispatch_queue_t)dispatchQueueForType : (RTCDispatcherQueueType)dispatchType;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.h b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.h
new file mode 100644
index 0000000000..e148af6dea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) {
+ // Main dispatcher queue.
+ RTCDispatcherTypeMain,
+ // Used for starting/stopping AVCaptureSession, and assigning
+ // capture session to AVCaptureVideoPreviewLayer.
+ RTCDispatcherTypeCaptureSession,
+ // Used for operations on AVAudioSession.
+ RTCDispatcherTypeAudioSession,
+ // Used for operations on NWPathMonitor.
+ RTCDispatcherTypeNetworkMonitor,
+};
+
+/** Dispatcher that asynchronously dispatches blocks to a specific
+ * shared dispatch queue.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDispatcher) : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Dispatch the block asynchronously on the queue for dispatchType.
+ * @param dispatchType The queue type to dispatch on.
+ * @param block The block to dispatch asynchronously.
+ */
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType block:(dispatch_block_t)block;
+
+/** Returns YES if run on queue for the dispatchType otherwise NO.
+ * Useful for asserting that a method is run on a correct queue.
+ */
++ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m
new file mode 100644
index 0000000000..4df19bc297
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDispatcher+Private.h"
+
+static dispatch_queue_t kAudioSessionQueue = nil;
+static dispatch_queue_t kCaptureSessionQueue = nil;
+static dispatch_queue_t kNetworkMonitorQueue = nil;
+
+@implementation RTC_OBJC_TYPE (RTCDispatcher)
+
++ (void)initialize {
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ kAudioSessionQueue = dispatch_queue_create(
+ "org.webrtc.RTCDispatcherAudioSession",
+ DISPATCH_QUEUE_SERIAL);
+ kCaptureSessionQueue = dispatch_queue_create(
+ "org.webrtc.RTCDispatcherCaptureSession",
+ DISPATCH_QUEUE_SERIAL);
+ kNetworkMonitorQueue =
+ dispatch_queue_create("org.webrtc.RTCDispatcherNetworkMonitor", DISPATCH_QUEUE_SERIAL);
+ });
+}
+
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
+ block:(dispatch_block_t)block {
+ dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
+ dispatch_async(queue, block);
+}
+
++ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType {
+ dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType];
+ const char* targetLabel = dispatch_queue_get_label(targetQueue);
+ const char* currentLabel = dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
+
+ NSAssert(strlen(targetLabel) > 0, @"Label is required for the target queue.");
+ NSAssert(strlen(currentLabel) > 0, @"Label is required for the current queue.");
+
+ return strcmp(targetLabel, currentLabel) == 0;
+}
+
+#pragma mark - Private
+
++ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
+ switch (dispatchType) {
+ case RTCDispatcherTypeMain:
+ return dispatch_get_main_queue();
+ case RTCDispatcherTypeCaptureSession:
+ return kCaptureSessionQueue;
+ case RTCDispatcherTypeAudioSession:
+ return kAudioSessionQueue;
+ case RTCDispatcherTypeNetworkMonitor:
+ return kNetworkMonitorQueue;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.h b/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.h
new file mode 100644
index 0000000000..ab477e2ada
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+typedef NS_ENUM(NSInteger, RTCDeviceType) {
+ RTCDeviceTypeUnknown,
+ RTCDeviceTypeIPhone1G,
+ RTCDeviceTypeIPhone3G,
+ RTCDeviceTypeIPhone3GS,
+ RTCDeviceTypeIPhone4,
+ RTCDeviceTypeIPhone4Verizon,
+ RTCDeviceTypeIPhone4S,
+ RTCDeviceTypeIPhone5GSM,
+ RTCDeviceTypeIPhone5GSM_CDMA,
+ RTCDeviceTypeIPhone5CGSM,
+ RTCDeviceTypeIPhone5CGSM_CDMA,
+ RTCDeviceTypeIPhone5SGSM,
+ RTCDeviceTypeIPhone5SGSM_CDMA,
+ RTCDeviceTypeIPhone6Plus,
+ RTCDeviceTypeIPhone6,
+ RTCDeviceTypeIPhone6S,
+ RTCDeviceTypeIPhone6SPlus,
+ RTCDeviceTypeIPhone7,
+ RTCDeviceTypeIPhone7Plus,
+ RTCDeviceTypeIPhoneSE,
+ RTCDeviceTypeIPhone8,
+ RTCDeviceTypeIPhone8Plus,
+ RTCDeviceTypeIPhoneX,
+ RTCDeviceTypeIPhoneXS,
+ RTCDeviceTypeIPhoneXSMax,
+ RTCDeviceTypeIPhoneXR,
+ RTCDeviceTypeIPhone11,
+ RTCDeviceTypeIPhone11Pro,
+ RTCDeviceTypeIPhone11ProMax,
+ RTCDeviceTypeIPhone12Mini,
+ RTCDeviceTypeIPhone12,
+ RTCDeviceTypeIPhone12Pro,
+ RTCDeviceTypeIPhone12ProMax,
+ RTCDeviceTypeIPhoneSE2Gen,
+ RTCDeviceTypeIPhone13,
+ RTCDeviceTypeIPhone13Mini,
+ RTCDeviceTypeIPhone13Pro,
+ RTCDeviceTypeIPhone13ProMax,
+
+ RTCDeviceTypeIPodTouch1G,
+ RTCDeviceTypeIPodTouch2G,
+ RTCDeviceTypeIPodTouch3G,
+ RTCDeviceTypeIPodTouch4G,
+ RTCDeviceTypeIPodTouch5G,
+ RTCDeviceTypeIPodTouch6G,
+ RTCDeviceTypeIPodTouch7G,
+ RTCDeviceTypeIPad,
+ RTCDeviceTypeIPad2Wifi,
+ RTCDeviceTypeIPad2GSM,
+ RTCDeviceTypeIPad2CDMA,
+ RTCDeviceTypeIPad2Wifi2,
+ RTCDeviceTypeIPadMiniWifi,
+ RTCDeviceTypeIPadMiniGSM,
+ RTCDeviceTypeIPadMiniGSM_CDMA,
+ RTCDeviceTypeIPad3Wifi,
+ RTCDeviceTypeIPad3GSM_CDMA,
+ RTCDeviceTypeIPad3GSM,
+ RTCDeviceTypeIPad4Wifi,
+ RTCDeviceTypeIPad4GSM,
+ RTCDeviceTypeIPad4GSM_CDMA,
+ RTCDeviceTypeIPad5,
+ RTCDeviceTypeIPad6,
+ RTCDeviceTypeIPadAirWifi,
+ RTCDeviceTypeIPadAirCellular,
+ RTCDeviceTypeIPadAirWifiCellular,
+ RTCDeviceTypeIPadAir2,
+ RTCDeviceTypeIPadMini2GWifi,
+ RTCDeviceTypeIPadMini2GCellular,
+ RTCDeviceTypeIPadMini2GWifiCellular,
+ RTCDeviceTypeIPadMini3,
+ RTCDeviceTypeIPadMini4,
+ RTCDeviceTypeIPadPro9Inch,
+ RTCDeviceTypeIPadPro12Inch,
+ RTCDeviceTypeIPadPro12Inch2,
+ RTCDeviceTypeIPadPro10Inch,
+ RTCDeviceTypeIPad7Gen10Inch,
+ RTCDeviceTypeIPadPro3Gen11Inch,
+ RTCDeviceTypeIPadPro3Gen12Inch,
+ RTCDeviceTypeIPadPro4Gen11Inch,
+ RTCDeviceTypeIPadPro4Gen12Inch,
+ RTCDeviceTypeIPadMini5Gen,
+ RTCDeviceTypeIPadAir3Gen,
+ RTCDeviceTypeIPad8,
+ RTCDeviceTypeIPad9,
+ RTCDeviceTypeIPadMini6,
+ RTCDeviceTypeIPadAir4Gen,
+ RTCDeviceTypeIPadPro5Gen11Inch,
+ RTCDeviceTypeIPadPro5Gen12Inch,
+ RTCDeviceTypeSimulatori386,
+ RTCDeviceTypeSimulatorx86_64,
+};
+
+@interface UIDevice (RTCDevice)
+
++ (RTCDeviceType)deviceType;
++ (BOOL)isIOS11OrLater;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.mm b/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.mm
new file mode 100644
index 0000000000..77a5d79f79
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.mm
@@ -0,0 +1,171 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "UIDevice+RTCDevice.h"
+
+#import <sys/utsname.h>
+#include <memory>
+
+@implementation UIDevice (RTCDevice)
+
++ (RTCDeviceType)deviceType {
+ NSDictionary *machineNameToType = @{
+ @"iPhone1,1" : @(RTCDeviceTypeIPhone1G),
+ @"iPhone1,2" : @(RTCDeviceTypeIPhone3G),
+ @"iPhone2,1" : @(RTCDeviceTypeIPhone3GS),
+ @"iPhone3,1" : @(RTCDeviceTypeIPhone4),
+ @"iPhone3,2" : @(RTCDeviceTypeIPhone4),
+ @"iPhone3,3" : @(RTCDeviceTypeIPhone4Verizon),
+ @"iPhone4,1" : @(RTCDeviceTypeIPhone4S),
+ @"iPhone5,1" : @(RTCDeviceTypeIPhone5GSM),
+ @"iPhone5,2" : @(RTCDeviceTypeIPhone5GSM_CDMA),
+ @"iPhone5,3" : @(RTCDeviceTypeIPhone5CGSM),
+ @"iPhone5,4" : @(RTCDeviceTypeIPhone5CGSM_CDMA),
+ @"iPhone6,1" : @(RTCDeviceTypeIPhone5SGSM),
+ @"iPhone6,2" : @(RTCDeviceTypeIPhone5SGSM_CDMA),
+ @"iPhone7,1" : @(RTCDeviceTypeIPhone6Plus),
+ @"iPhone7,2" : @(RTCDeviceTypeIPhone6),
+ @"iPhone8,1" : @(RTCDeviceTypeIPhone6S),
+ @"iPhone8,2" : @(RTCDeviceTypeIPhone6SPlus),
+ @"iPhone8,4" : @(RTCDeviceTypeIPhoneSE),
+ @"iPhone9,1" : @(RTCDeviceTypeIPhone7),
+ @"iPhone9,2" : @(RTCDeviceTypeIPhone7Plus),
+ @"iPhone9,3" : @(RTCDeviceTypeIPhone7),
+ @"iPhone9,4" : @(RTCDeviceTypeIPhone7Plus),
+ @"iPhone10,1" : @(RTCDeviceTypeIPhone8),
+ @"iPhone10,2" : @(RTCDeviceTypeIPhone8Plus),
+ @"iPhone10,3" : @(RTCDeviceTypeIPhoneX),
+ @"iPhone10,4" : @(RTCDeviceTypeIPhone8),
+ @"iPhone10,5" : @(RTCDeviceTypeIPhone8Plus),
+ @"iPhone10,6" : @(RTCDeviceTypeIPhoneX),
+ @"iPhone11,2" : @(RTCDeviceTypeIPhoneXS),
+ @"iPhone11,4" : @(RTCDeviceTypeIPhoneXSMax),
+ @"iPhone11,6" : @(RTCDeviceTypeIPhoneXSMax),
+ @"iPhone11,8" : @(RTCDeviceTypeIPhoneXR),
+ @"iPhone12,1" : @(RTCDeviceTypeIPhone11),
+ @"iPhone12,3" : @(RTCDeviceTypeIPhone11Pro),
+ @"iPhone12,5" : @(RTCDeviceTypeIPhone11ProMax),
+ @"iPhone12,8" : @(RTCDeviceTypeIPhoneSE2Gen),
+ @"iPhone13,1" : @(RTCDeviceTypeIPhone12Mini),
+ @"iPhone13,2" : @(RTCDeviceTypeIPhone12),
+ @"iPhone13,3" : @(RTCDeviceTypeIPhone12Pro),
+ @"iPhone13,4" : @(RTCDeviceTypeIPhone12ProMax),
+ @"iPhone14,5" : @(RTCDeviceTypeIPhone13),
+ @"iPhone14,4" : @(RTCDeviceTypeIPhone13Mini),
+ @"iPhone14,2" : @(RTCDeviceTypeIPhone13Pro),
+ @"iPhone14,3" : @(RTCDeviceTypeIPhone13ProMax),
+ @"iPod1,1" : @(RTCDeviceTypeIPodTouch1G),
+ @"iPod2,1" : @(RTCDeviceTypeIPodTouch2G),
+ @"iPod3,1" : @(RTCDeviceTypeIPodTouch3G),
+ @"iPod4,1" : @(RTCDeviceTypeIPodTouch4G),
+ @"iPod5,1" : @(RTCDeviceTypeIPodTouch5G),
+ @"iPod7,1" : @(RTCDeviceTypeIPodTouch6G),
+ @"iPod9,1" : @(RTCDeviceTypeIPodTouch7G),
+ @"iPad1,1" : @(RTCDeviceTypeIPad),
+ @"iPad2,1" : @(RTCDeviceTypeIPad2Wifi),
+ @"iPad2,2" : @(RTCDeviceTypeIPad2GSM),
+ @"iPad2,3" : @(RTCDeviceTypeIPad2CDMA),
+ @"iPad2,4" : @(RTCDeviceTypeIPad2Wifi2),
+ @"iPad2,5" : @(RTCDeviceTypeIPadMiniWifi),
+ @"iPad2,6" : @(RTCDeviceTypeIPadMiniGSM),
+ @"iPad2,7" : @(RTCDeviceTypeIPadMiniGSM_CDMA),
+ @"iPad3,1" : @(RTCDeviceTypeIPad3Wifi),
+ @"iPad3,2" : @(RTCDeviceTypeIPad3GSM_CDMA),
+ @"iPad3,3" : @(RTCDeviceTypeIPad3GSM),
+ @"iPad3,4" : @(RTCDeviceTypeIPad4Wifi),
+ @"iPad3,5" : @(RTCDeviceTypeIPad4GSM),
+ @"iPad3,6" : @(RTCDeviceTypeIPad4GSM_CDMA),
+ @"iPad4,1" : @(RTCDeviceTypeIPadAirWifi),
+ @"iPad4,2" : @(RTCDeviceTypeIPadAirCellular),
+ @"iPad4,3" : @(RTCDeviceTypeIPadAirWifiCellular),
+ @"iPad4,4" : @(RTCDeviceTypeIPadMini2GWifi),
+ @"iPad4,5" : @(RTCDeviceTypeIPadMini2GCellular),
+ @"iPad4,6" : @(RTCDeviceTypeIPadMini2GWifiCellular),
+ @"iPad4,7" : @(RTCDeviceTypeIPadMini3),
+ @"iPad4,8" : @(RTCDeviceTypeIPadMini3),
+ @"iPad4,9" : @(RTCDeviceTypeIPadMini3),
+ @"iPad5,1" : @(RTCDeviceTypeIPadMini4),
+ @"iPad5,2" : @(RTCDeviceTypeIPadMini4),
+ @"iPad5,3" : @(RTCDeviceTypeIPadAir2),
+ @"iPad5,4" : @(RTCDeviceTypeIPadAir2),
+ @"iPad6,3" : @(RTCDeviceTypeIPadPro9Inch),
+ @"iPad6,4" : @(RTCDeviceTypeIPadPro9Inch),
+ @"iPad6,7" : @(RTCDeviceTypeIPadPro12Inch),
+ @"iPad6,8" : @(RTCDeviceTypeIPadPro12Inch),
+ @"iPad6,11" : @(RTCDeviceTypeIPad5),
+ @"iPad6,12" : @(RTCDeviceTypeIPad5),
+ @"iPad7,1" : @(RTCDeviceTypeIPadPro12Inch2),
+ @"iPad7,2" : @(RTCDeviceTypeIPadPro12Inch2),
+ @"iPad7,3" : @(RTCDeviceTypeIPadPro10Inch),
+ @"iPad7,4" : @(RTCDeviceTypeIPadPro10Inch),
+ @"iPad7,5" : @(RTCDeviceTypeIPad6),
+ @"iPad7,6" : @(RTCDeviceTypeIPad6),
+ @"iPad7,11" : @(RTCDeviceTypeIPad7Gen10Inch),
+ @"iPad7,12" : @(RTCDeviceTypeIPad7Gen10Inch),
+ @"iPad8,1" : @(RTCDeviceTypeIPadPro3Gen11Inch),
+ @"iPad8,2" : @(RTCDeviceTypeIPadPro3Gen11Inch),
+ @"iPad8,3" : @(RTCDeviceTypeIPadPro3Gen11Inch),
+ @"iPad8,4" : @(RTCDeviceTypeIPadPro3Gen11Inch),
+ @"iPad8,5" : @(RTCDeviceTypeIPadPro3Gen12Inch),
+ @"iPad8,6" : @(RTCDeviceTypeIPadPro3Gen12Inch),
+ @"iPad8,7" : @(RTCDeviceTypeIPadPro3Gen12Inch),
+ @"iPad8,8" : @(RTCDeviceTypeIPadPro3Gen12Inch),
+ @"iPad8,9" : @(RTCDeviceTypeIPadPro4Gen11Inch),
+ @"iPad8,10" : @(RTCDeviceTypeIPadPro4Gen11Inch),
+ @"iPad8,11" : @(RTCDeviceTypeIPadPro4Gen12Inch),
+ @"iPad8,12" : @(RTCDeviceTypeIPadPro4Gen12Inch),
+ @"iPad11,1" : @(RTCDeviceTypeIPadMini5Gen),
+ @"iPad11,2" : @(RTCDeviceTypeIPadMini5Gen),
+ @"iPad11,3" : @(RTCDeviceTypeIPadAir3Gen),
+ @"iPad11,4" : @(RTCDeviceTypeIPadAir3Gen),
+ @"iPad11,6" : @(RTCDeviceTypeIPad8),
+ @"iPad11,7" : @(RTCDeviceTypeIPad8),
+ @"iPad12,1" : @(RTCDeviceTypeIPad9),
+ @"iPad12,2" : @(RTCDeviceTypeIPad9),
+ @"iPad13,1" : @(RTCDeviceTypeIPadAir4Gen),
+ @"iPad13,2" : @(RTCDeviceTypeIPadAir4Gen),
+ @"iPad13,4" : @(RTCDeviceTypeIPadPro5Gen11Inch),
+ @"iPad13,5" : @(RTCDeviceTypeIPadPro5Gen11Inch),
+ @"iPad13,6" : @(RTCDeviceTypeIPadPro5Gen11Inch),
+ @"iPad13,7" : @(RTCDeviceTypeIPadPro5Gen11Inch),
+ @"iPad13,8" : @(RTCDeviceTypeIPadPro5Gen12Inch),
+ @"iPad13,9" : @(RTCDeviceTypeIPadPro5Gen12Inch),
+ @"iPad13,10" : @(RTCDeviceTypeIPadPro5Gen12Inch),
+ @"iPad13,11" : @(RTCDeviceTypeIPadPro5Gen12Inch),
+ @"iPad14,1" : @(RTCDeviceTypeIPadMini6),
+ @"iPad14,2" : @(RTCDeviceTypeIPadMini6),
+ @"i386" : @(RTCDeviceTypeSimulatori386),
+ @"x86_64" : @(RTCDeviceTypeSimulatorx86_64),
+ };
+
+ RTCDeviceType deviceType = RTCDeviceTypeUnknown;
+ NSNumber *typeNumber = machineNameToType[[self machineName]];
+ if (typeNumber) {
+ deviceType = static_cast<RTCDeviceType>(typeNumber.integerValue);
+ }
+ return deviceType;
+}
+
++ (NSString *)machineName {
+ struct utsname systemInfo;
+ uname(&systemInfo);
+ return [[NSString alloc] initWithCString:systemInfo.machine
+ encoding:NSUTF8StringEncoding];
+}
+
++ (double)currentDeviceSystemVersion {
+ return [self currentDevice].systemVersion.doubleValue;
+}
+
++ (BOOL)isIOS11OrLater {
+ return [self currentDeviceSystemVersion] >= 11.0;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/noop.mm b/third_party/libwebrtc/sdk/objc/helpers/noop.mm
new file mode 100644
index 0000000000..16a8e6d5c1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/noop.mm
@@ -0,0 +1,13 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is only needed to make ninja happy on some platforms.
+// On some platforms it is not possible to link an rtc_static_library
+// without any source file listed in the GN target.
diff --git a/third_party/libwebrtc/sdk/objc/helpers/scoped_cftyperef.h b/third_party/libwebrtc/sdk/objc/helpers/scoped_cftyperef.h
new file mode 100644
index 0000000000..092f02b3af
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/scoped_cftyperef.h
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef SDK_OBJC_HELPERS_SCOPED_CFTYPEREF_H_
+#define SDK_OBJC_HELPERS_SCOPED_CFTYPEREF_H_
+
+#include <CoreFoundation/CoreFoundation.h>
+namespace rtc {
+
+// RETAIN: ScopedTypeRef should retain the object when it takes
+// ownership.
+// ASSUME: Assume the object already has already been retained.
+// ScopedTypeRef takes over ownership.
+enum class RetainPolicy { RETAIN, ASSUME };
+
+namespace internal {
+template <typename T>
+struct CFTypeRefTraits {
+ static T InvalidValue() { return nullptr; }
+ static void Release(T ref) { CFRelease(ref); }
+ static T Retain(T ref) {
+ CFRetain(ref);
+ return ref;
+ }
+};
+
+template <typename T, typename Traits>
+class ScopedTypeRef {
+ public:
+ ScopedTypeRef() : ptr_(Traits::InvalidValue()) {}
+ explicit ScopedTypeRef(T ptr) : ptr_(ptr) {}
+ ScopedTypeRef(T ptr, RetainPolicy policy) : ScopedTypeRef(ptr) {
+ if (ptr_ && policy == RetainPolicy::RETAIN)
+ Traits::Retain(ptr_);
+ }
+
+ ScopedTypeRef(const ScopedTypeRef<T, Traits>& rhs) : ptr_(rhs.ptr_) {
+ if (ptr_)
+ ptr_ = Traits::Retain(ptr_);
+ }
+
+ ~ScopedTypeRef() {
+ if (ptr_) {
+ Traits::Release(ptr_);
+ }
+ }
+
+ T get() const { return ptr_; }
+ T operator->() const { return ptr_; }
+ explicit operator bool() const { return ptr_; }
+
+ bool operator!() const { return !ptr_; }
+
+ ScopedTypeRef& operator=(const T& rhs) {
+ if (ptr_)
+ Traits::Release(ptr_);
+ ptr_ = rhs;
+ return *this;
+ }
+
+ ScopedTypeRef& operator=(const ScopedTypeRef<T, Traits>& rhs) {
+ reset(rhs.get(), RetainPolicy::RETAIN);
+ return *this;
+ }
+
+ // This is intended to take ownership of objects that are
+ // created by pass-by-pointer initializers.
+ T* InitializeInto() {
+ RTC_DCHECK(!ptr_);
+ return &ptr_;
+ }
+
+ void reset(T ptr, RetainPolicy policy = RetainPolicy::ASSUME) {
+ if (ptr && policy == RetainPolicy::RETAIN)
+ Traits::Retain(ptr);
+ if (ptr_)
+ Traits::Release(ptr_);
+ ptr_ = ptr;
+ }
+
+ T release() {
+ T temp = ptr_;
+ ptr_ = Traits::InvalidValue();
+ return temp;
+ }
+
+ private:
+ T ptr_;
+};
+} // namespace internal
+
+template <typename T>
+using ScopedCFTypeRef =
+ internal::ScopedTypeRef<T, internal::CFTypeRefTraits<T>>;
+
+template <typename T>
+static ScopedCFTypeRef<T> AdoptCF(T cftype) {
+ return ScopedCFTypeRef<T>(cftype, RetainPolicy::RETAIN);
+}
+
+template <typename T>
+static ScopedCFTypeRef<T> ScopedCF(T cftype) {
+ return ScopedCFTypeRef<T>(cftype);
+}
+
+} // namespace rtc
+
+#endif // SDK_OBJC_HELPERS_SCOPED_CFTYPEREF_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.h b/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.h
new file mode 100644
index 0000000000..3405469709
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_AUDIO_DEVICE_MODULE_H_
+#define SDK_OBJC_NATIVE_API_AUDIO_DEVICE_MODULE_H_
+
+#include <memory>
+
+#include "modules/audio_device/include/audio_device.h"
+
+namespace webrtc {
+
+// If `bypass_voice_processing` is true, WebRTC will attempt to disable hardware
+// audio processing on iOS.
+// Warning: Setting `bypass_voice_processing` will have unpredictable
+// consequences for the audio path in the device. It is not advisable to use in
+// most scenarios.
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
+ bool bypass_voice_processing = false);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_AUDIO_DEVICE_MODULE_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.mm b/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.mm
new file mode 100644
index 0000000000..4e7b681e69
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.mm
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_module.h"
+
+#include "api/make_ref_counted.h"
+#include "rtc_base/logging.h"
+
+#include "sdk/objc/native/src/audio/audio_device_module_ios.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(bool bypass_voice_processing) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+#if defined(WEBRTC_IOS)
+ return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing);
+#else
+ RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!";
+ return nullptr;
+#endif
+}
+}
diff --git a/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.h b/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.h
new file mode 100644
index 0000000000..903c66893d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_
+#define SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_
+
+#include <memory>
+
+#include "rtc_base/network_monitor_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::NetworkMonitorFactory> CreateNetworkMonitorFactory();
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.mm b/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.mm
new file mode 100644
index 0000000000..acde634b1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.mm
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "network_monitor_factory.h"
+
+#if defined(WEBRTC_IOS)
+#include "sdk/objc/native/src/objc_network_monitor.h"
+#endif
+
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::NetworkMonitorFactory> CreateNetworkMonitorFactory() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+#if defined(WEBRTC_IOS)
+ return std::make_unique<ObjCNetworkMonitorFactory>();
+#else
+ return nullptr;
+#endif
+}
+
+}
diff --git a/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.h b/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.h
new file mode 100644
index 0000000000..35ab1be9a8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_SSL_CERTIFICATE_VERIFIER_H_
+#define SDK_OBJC_NATIVE_API_SSL_CERTIFICATE_VERIFIER_H_
+
+#include <memory>
+
+#import "RTCSSLCertificateVerifier.h"
+#include "rtc_base/ssl_certificate.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::SSLCertificateVerifier> ObjCToNativeCertificateVerifier(
+ id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_SSL_CERTIFICATE_VERIFIER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.mm b/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.mm
new file mode 100644
index 0000000000..4437402b9c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.mm
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ssl_certificate_verifier.h"
+
+#include "rtc_base/buffer.h"
+
+namespace {
+
+class SSLCertificateVerifierAdapter final : public rtc::SSLCertificateVerifier {
+ public:
+ SSLCertificateVerifierAdapter(
+ id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier)
+ : objc_certificate_verifier_(objc_certificate_verifier) {
+ RTC_DCHECK(objc_certificate_verifier_ != nil);
+ }
+
+ bool Verify(const rtc::SSLCertificate& certificate) override {
+ @autoreleasepool {
+ rtc::Buffer der_buffer;
+ certificate.ToDER(&der_buffer);
+ NSData* serialized_certificate = [[NSData alloc] initWithBytes:der_buffer.data()
+ length:der_buffer.size()];
+ return [objc_certificate_verifier_ verify:serialized_certificate];
+ }
+ }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier_;
+};
+
+}
+
+namespace webrtc {
+
+std::unique_ptr<rtc::SSLCertificateVerifier> ObjCToNativeCertificateVerifier(
+ id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier) {
+ return std::make_unique<SSLCertificateVerifierAdapter>(objc_certificate_verifier);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_capturer.h b/third_party/libwebrtc/sdk/objc/native/api/video_capturer.h
new file mode 100644
index 0000000000..9847d8148b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_capturer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_CAPTURER_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_CAPTURER_H_
+
+#import "base/RTCVideoCapturer.h"
+
+#include "api/media_stream_interface.h"
+#include "api/scoped_refptr.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
+ RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_CAPTURER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_capturer.mm b/third_party/libwebrtc/sdk/objc/native/api/video_capturer.mm
new file mode 100644
index 0000000000..a7260ab802
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_capturer.mm
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_capturer.h"
+
+#include "absl/memory/memory.h"
+#include "api/video_track_source_proxy_factory.h"
+#include "sdk/objc/native/src/objc_video_track_source.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
+ RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer,
+ rtc::Thread *signaling_thread,
+ rtc::Thread *worker_thread) {
+ RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init];
+ rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source =
+ rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(adapter);
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source =
+ webrtc::CreateVideoTrackSourceProxy(
+ signaling_thread, worker_thread, objc_video_track_source.get());
+
+ objc_video_capturer.delegate = adapter;
+
+ return video_source;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.h b/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.h
new file mode 100644
index 0000000000..03d8af3cfe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_DECODER_FACTORY_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_DECODER_FACTORY_H_
+
+#include <memory>
+
+#import "base/RTCVideoDecoderFactory.h"
+
+#include "api/video_codecs/video_decoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoDecoderFactory> ObjCToNativeVideoDecoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> objc_video_decoder_factory);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_DECODER_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.mm b/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.mm
new file mode 100644
index 0000000000..d418f2fe6f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.mm
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_decoder_factory.h"
+
+#include <memory>
+
+#include "sdk/objc/native/src/objc_video_decoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoDecoderFactory> ObjCToNativeVideoDecoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> objc_video_decoder_factory) {
+ return std::make_unique<ObjCVideoDecoderFactory>(objc_video_decoder_factory);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.h b/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.h
new file mode 100644
index 0000000000..6e551b288d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_ENCODER_FACTORY_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_ENCODER_FACTORY_H_
+
+#include <memory>
+
+#import "base/RTCVideoEncoderFactory.h"
+
+#include "api/video_codecs/video_encoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoEncoderFactory> ObjCToNativeVideoEncoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> objc_video_encoder_factory);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_ENCODER_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.mm b/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.mm
new file mode 100644
index 0000000000..6fa5563f75
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.mm
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_encoder_factory.h"
+
+#include <memory>
+
+#include "sdk/objc/native/src/objc_video_encoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoEncoderFactory> ObjCToNativeVideoEncoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> objc_video_encoder_factory) {
+ return std::make_unique<ObjCVideoEncoderFactory>(objc_video_encoder_factory);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_frame.h b/third_party/libwebrtc/sdk/objc/native/api/video_frame.h
new file mode 100644
index 0000000000..b4416ffabe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_frame.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_FRAME_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_FRAME_H_
+
+#import "base/RTCVideoFrame.h"
+
+#include "api/video/video_frame.h"
+
+namespace webrtc {
+
+RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_FRAME_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_frame.mm b/third_party/libwebrtc/sdk/objc/native/api/video_frame.mm
new file mode 100644
index 0000000000..b82994fd5f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_frame.mm
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_frame.h"
+
+#include "sdk/objc/native/src/objc_video_frame.h"
+
+namespace webrtc {
+
+RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame) {
+ return ToObjCVideoFrame(frame);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.h b/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.h
new file mode 100644
index 0000000000..204d65d850
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_FRAME_BUFFER_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_FRAME_BUFFER_H_
+
+#import "base/RTCVideoFrameBuffer.h"
+
+#include "api/scoped_refptr.h"
+#include "common_video/include/video_frame_buffer.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
+ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer);
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_FRAME_BUFFER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.mm b/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.mm
new file mode 100644
index 0000000000..4fe9037bce
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.mm
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_frame_buffer.h"
+
+#include "api/make_ref_counted.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
+ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer) {
+ return rtc::make_ref_counted<ObjCFrameBuffer>(objc_video_frame_buffer);
+}
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(
+ const rtc::scoped_refptr<VideoFrameBuffer> &buffer) {
+ return ToObjCVideoFrameBuffer(buffer);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_renderer.h b/third_party/libwebrtc/sdk/objc/native/api/video_renderer.h
new file mode 100644
index 0000000000..04796b8049
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_renderer.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_RENDERER_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_RENDERER_H_
+
+#import "base/RTCVideoRenderer.h"
+
+#include <memory>
+
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> ObjCToNativeVideoRenderer(
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> objc_video_renderer);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_RENDERER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_renderer.mm b/third_party/libwebrtc/sdk/objc/native/api/video_renderer.mm
new file mode 100644
index 0000000000..e92d47d1e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_renderer.mm
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_renderer.h"
+
+#include <memory>
+
+#include "sdk/objc/native/src/objc_video_renderer.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> ObjCToNativeVideoRenderer(
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> objc_video_renderer) {
+ return std::make_unique<ObjCVideoRenderer>(objc_video_renderer);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.h b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.h
new file mode 100644
index 0000000000..dc9f462063
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.h
@@ -0,0 +1,308 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_IOS_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_IOS_H_
+
+#include <atomic>
+#include <memory>
+
+#include "api/sequence_checker.h"
+#include "audio_session_observer.h"
+#include "modules/audio_device/audio_device_generic.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "sdk/objc/base/RTCMacros.h"
+#include "voice_processing_audio_unit.h"
+
+RTC_FWD_DECL_OBJC_CLASS(RTCNativeAudioSessionDelegateAdapter);
+
+namespace webrtc {
+
+class FineAudioBuffer;
+
+namespace ios_adm {
+
+// Implements full duplex 16-bit mono PCM audio support for iOS using a
+// Voice-Processing (VP) I/O audio unit in Core Audio. The VP I/O audio unit
+// supports audio echo cancellation. It also adds automatic gain control,
+// adjustment of voice-processing quality and muting.
+//
+// An instance must be created and destroyed on one and the same thread.
+// All supported public methods must also be called on the same thread.
+// A thread checker will RTC_DCHECK if any supported method is called on an
+// invalid thread.
+//
+// Recorded audio will be delivered on a real-time internal I/O thread in the
+// audio unit. The audio unit will also ask for audio data to play out on this
+// same thread.
+class AudioDeviceIOS : public AudioDeviceGeneric,
+ public AudioSessionObserver,
+ public VoiceProcessingAudioUnitObserver,
+ public rtc::MessageHandler {
+ public:
+ explicit AudioDeviceIOS(bool bypass_voice_processing);
+ ~AudioDeviceIOS() override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ InitStatus Init() override;
+ int32_t Terminate() override;
+ bool Initialized() const override;
+
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int32_t StartPlayout() override;
+ int32_t StopPlayout() override;
+ bool Playing() const override;
+
+ int32_t StartRecording() override;
+ int32_t StopRecording() override;
+ bool Recording() const override;
+
+ // These methods returns hard-coded delay values and not dynamic delay
+ // estimates. The reason is that iOS supports a built-in AEC and the WebRTC
+ // AEC will always be disabled in the Libjingle layer to avoid running two
+ // AEC implementations at the same time. And, it saves resources to avoid
+ // updating these delay values continuously.
+ // TODO(henrika): it would be possible to mark these two methods as not
+ // implemented since they are only called for A/V-sync purposes today and
+ // A/V-sync is not supported on iOS. However, we avoid adding error messages
+ // the log by using these dummy implementations instead.
+ int32_t PlayoutDelay(uint16_t& delayMS) const override;
+
+ // No implementation for playout underrun on iOS. We override it to avoid a
+ // periodic log that it isn't available from the base class.
+ int32_t GetPlayoutUnderrunCount() const override { return -1; }
+
+ // Native audio parameters stored during construction.
+ // These methods are unique for the iOS implementation.
+ int GetPlayoutAudioParameters(AudioParameters* params) const override;
+ int GetRecordAudioParameters(AudioParameters* params) const override;
+
+ // These methods are currently not fully implemented on iOS:
+
+ // See audio_device_not_implemented.cc for trivial implementations.
+ int32_t ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer& audioLayer) const override;
+ int32_t PlayoutIsAvailable(bool& available) override;
+ int32_t RecordingIsAvailable(bool& available) override;
+ int16_t PlayoutDevices() override;
+ int16_t RecordingDevices() override;
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override;
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override;
+ int32_t SetPlayoutDevice(uint16_t index) override;
+ int32_t SetPlayoutDevice(
+ AudioDeviceModule::WindowsDeviceType device) override;
+ int32_t SetRecordingDevice(uint16_t index) override;
+ int32_t SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device) override;
+ int32_t InitSpeaker() override;
+ bool SpeakerIsInitialized() const override;
+ int32_t InitMicrophone() override;
+ bool MicrophoneIsInitialized() const override;
+ int32_t SpeakerVolumeIsAvailable(bool& available) override;
+ int32_t SetSpeakerVolume(uint32_t volume) override;
+ int32_t SpeakerVolume(uint32_t& volume) const override;
+ int32_t MaxSpeakerVolume(uint32_t& maxVolume) const override;
+ int32_t MinSpeakerVolume(uint32_t& minVolume) const override;
+ int32_t MicrophoneVolumeIsAvailable(bool& available) override;
+ int32_t SetMicrophoneVolume(uint32_t volume) override;
+ int32_t MicrophoneVolume(uint32_t& volume) const override;
+ int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const override;
+ int32_t MinMicrophoneVolume(uint32_t& minVolume) const override;
+ int32_t MicrophoneMuteIsAvailable(bool& available) override;
+ int32_t SetMicrophoneMute(bool enable) override;
+ int32_t MicrophoneMute(bool& enabled) const override;
+ int32_t SpeakerMuteIsAvailable(bool& available) override;
+ int32_t SetSpeakerMute(bool enable) override;
+ int32_t SpeakerMute(bool& enabled) const override;
+ int32_t StereoPlayoutIsAvailable(bool& available) override;
+ int32_t SetStereoPlayout(bool enable) override;
+ int32_t StereoPlayout(bool& enabled) const override;
+ int32_t StereoRecordingIsAvailable(bool& available) override;
+ int32_t SetStereoRecording(bool enable) override;
+ int32_t StereoRecording(bool& enabled) const override;
+
+ // AudioSessionObserver methods. May be called from any thread.
+ void OnInterruptionBegin() override;
+ void OnInterruptionEnd() override;
+ void OnValidRouteChange() override;
+ void OnCanPlayOrRecordChange(bool can_play_or_record) override;
+ void OnChangedOutputVolume() override;
+
+ // VoiceProcessingAudioUnitObserver methods.
+ OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) override;
+ OSStatus OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) override;
+
+ // Handles messages from posts.
+ void OnMessage(rtc::Message* msg) override;
+
+ bool IsInterrupted();
+
+ private:
+ // Called by the relevant AudioSessionObserver methods on `thread_`.
+ void HandleInterruptionBegin();
+ void HandleInterruptionEnd();
+ void HandleValidRouteChange();
+ void HandleCanPlayOrRecordChange(bool can_play_or_record);
+ void HandleSampleRateChange();
+ void HandlePlayoutGlitchDetected();
+ void HandleOutputVolumeChange();
+
+ // Uses current `playout_parameters_` and `record_parameters_` to inform the
+ // audio device buffer (ADB) about our internal audio parameters.
+ void UpdateAudioDeviceBuffer();
+
+ // Since the preferred audio parameters are only hints to the OS, the actual
+ // values may be different once the AVAudioSession has been activated.
+ // This method asks for the current hardware parameters and takes actions
+ // if they should differ from what we have asked for initially. It also
+ // defines `playout_parameters_` and `record_parameters_`.
+ void SetupAudioBuffersForActiveAudioSession();
+
+ // Creates the audio unit.
+ bool CreateAudioUnit();
+
+ // Updates the audio unit state based on current state.
+ void UpdateAudioUnit(bool can_play_or_record);
+
+ // Configures the audio session for WebRTC.
+ bool ConfigureAudioSession();
+
+ // Like above, but requires caller to already hold session lock.
+ bool ConfigureAudioSessionLocked();
+
+ // Unconfigures the audio session.
+ void UnconfigureAudioSession();
+
+ // Activates our audio session, creates and initializes the voice-processing
+ // audio unit and verifies that we got the preferred native audio parameters.
+ bool InitPlayOrRecord();
+
+ // Closes and deletes the voice-processing I/O unit.
+ void ShutdownPlayOrRecord();
+
+ // Resets thread-checkers before a call is restarted.
+ void PrepareForNewStart();
+
+ // Determines whether voice processing should be enabled or disabled.
+ const bool bypass_voice_processing_;
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker thread_checker_;
+
+ // Native I/O audio thread checker.
+ SequenceChecker io_thread_checker_;
+
+ // Thread that this object is created on.
+ rtc::Thread* thread_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance
+ // and therefore outlives this object.
+ AudioDeviceBuffer* audio_device_buffer_;
+
+ // Contains audio parameters (sample rate, #channels, buffer size etc.) for
+ // the playout and recording sides. These structure is set in two steps:
+ // first, native sample rate and #channels are defined in Init(). Next, the
+ // audio session is activated and we verify that the preferred parameters
+ // were granted by the OS. At this stage it is also possible to add a third
+ // component to the parameters; the native I/O buffer duration.
+ // A RTC_CHECK will be hit if we for some reason fail to open an audio session
+ // using the specified parameters.
+ AudioParameters playout_parameters_;
+ AudioParameters record_parameters_;
+
+ // The AudioUnit used to play and record audio.
+ std::unique_ptr<VoiceProcessingAudioUnit> audio_unit_;
+
+ // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data
+ // in chunks of 10ms. It then allows for this data to be pulled in
+ // a finer or coarser granularity. I.e. interacting with this class instead
+ // of directly with the AudioDeviceBuffer one can ask for any number of
+ // audio data samples. Is also supports a similar scheme for the recording
+ // side.
+ // Example: native buffer size can be 128 audio frames at 16kHz sample rate.
+ // WebRTC will provide 480 audio frames per 10ms but iOS asks for 128
+ // in each callback (one every 8ms). This class can then ask for 128 and the
+ // FineAudioBuffer will ask WebRTC for new data only when needed and also
+ // cache non-utilized audio between callbacks. On the recording side, iOS
+ // can provide audio data frames of size 128 and these are accumulated until
+ // enough data to supply one 10ms call exists. This 10ms chunk is then sent
+ // to WebRTC and the remaining part is stored.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Temporary storage for recorded data. AudioUnitRender() renders into this
+ // array as soon as a frame of the desired buffer size has been recorded.
+ // On real iOS devices, the size will be fixed and set once. For iOS
+ // simulators, the size can vary from callback to callback and the size
+ // will be changed dynamically to account for this behavior.
+ rtc::BufferT<int16_t> record_audio_buffer_;
+
+ // Set to 1 when recording is active and 0 otherwise.
+ std::atomic<int> recording_;
+
+ // Set to 1 when playout is active and 0 otherwise.
+ std::atomic<int> playing_;
+
+ // Set to true after successful call to Init(), false otherwise.
+ bool initialized_ RTC_GUARDED_BY(thread_checker_);
+
+ // Set to true after successful call to InitRecording() or InitPlayout(),
+ // false otherwise.
+ bool audio_is_initialized_;
+
+ // Set to true if audio session is interrupted, false otherwise.
+ bool is_interrupted_;
+
+ // Audio interruption observer instance.
+ RTCNativeAudioSessionDelegateAdapter* audio_session_observer_
+ RTC_GUARDED_BY(thread_checker_);
+
+ // Set to true if we've activated the audio session.
+ bool has_configured_session_ RTC_GUARDED_BY(thread_checker_);
+
+ // Counts number of detected audio glitches on the playout side.
+ int64_t num_detected_playout_glitches_ RTC_GUARDED_BY(thread_checker_);
+ int64_t last_playout_time_ RTC_GUARDED_BY(io_thread_checker_);
+
+ // Counts number of playout callbacks per call.
+ // The value isupdated on the native I/O thread and later read on the
+ // creating thread (see thread_checker_) but at this stage no audio is
+ // active. Hence, it is a "thread safe" design and no lock is needed.
+ int64_t num_playout_callbacks_;
+
+ // Contains the time for when the last output volume change was detected.
+ int64_t last_output_volume_change_time_ RTC_GUARDED_BY(thread_checker_);
+};
+} // namespace ios_adm
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_IOS_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.mm b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.mm
new file mode 100644
index 0000000000..f3f87c04b6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.mm
@@ -0,0 +1,1165 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#include "audio_device_ios.h"
+
+#include <cmath>
+
+#include "api/array_view.h"
+#include "helpers.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/time_utils.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+
+#import "base/RTCLogging.h"
+#import "components/audio/RTCAudioSession+Private.h"
+#import "components/audio/RTCAudioSession.h"
+#import "components/audio/RTCAudioSessionConfiguration.h"
+#import "components/audio/RTCNativeAudioSessionDelegateAdapter.h"
+
+namespace webrtc {
+namespace ios_adm {
+
+#define LOGI() RTC_LOG(LS_INFO) << "AudioDeviceIOS::"
+
+#define LOG_AND_RETURN_IF_ERROR(error, message) \
+ do { \
+ OSStatus err = error; \
+ if (err) { \
+ RTC_LOG(LS_ERROR) << message << ": " << err; \
+ return false; \
+ } \
+ } while (0)
+
+#define LOG_IF_ERROR(error, message) \
+ do { \
+ OSStatus err = error; \
+ if (err) { \
+ RTC_LOG(LS_ERROR) << message << ": " << err; \
+ } \
+ } while (0)
+
+// Hardcoded delay estimates based on real measurements.
+// TODO(henrika): these value is not used in combination with built-in AEC.
+// Can most likely be removed.
+const UInt16 kFixedPlayoutDelayEstimate = 30;
+const UInt16 kFixedRecordDelayEstimate = 30;
+
+enum AudioDeviceMessageType : uint32_t {
+ kMessageTypeInterruptionBegin,
+ kMessageTypeInterruptionEnd,
+ kMessageTypeValidRouteChange,
+ kMessageTypeCanPlayOrRecordChange,
+ kMessageTypePlayoutGlitchDetected,
+ kMessageOutputVolumeChange,
+};
+
+using ios::CheckAndLogError;
+
+#if !defined(NDEBUG)
+// Returns true when the code runs on a device simulator.
+static bool DeviceIsSimulator() {
+ return ios::GetDeviceName() == "x86_64";
+}
+
+// Helper method that logs essential device information strings.
+static void LogDeviceInfo() {
+ RTC_LOG(LS_INFO) << "LogDeviceInfo";
+ @autoreleasepool {
+ RTC_LOG(LS_INFO) << " system name: " << ios::GetSystemName();
+ RTC_LOG(LS_INFO) << " system version: " << ios::GetSystemVersionAsString();
+ RTC_LOG(LS_INFO) << " device type: " << ios::GetDeviceType();
+ RTC_LOG(LS_INFO) << " device name: " << ios::GetDeviceName();
+ RTC_LOG(LS_INFO) << " process name: " << ios::GetProcessName();
+ RTC_LOG(LS_INFO) << " process ID: " << ios::GetProcessID();
+ RTC_LOG(LS_INFO) << " OS version: " << ios::GetOSVersionString();
+ RTC_LOG(LS_INFO) << " processing cores: " << ios::GetProcessorCount();
+ RTC_LOG(LS_INFO) << " low power mode: " << ios::GetLowPowerModeEnabled();
+#if TARGET_IPHONE_SIMULATOR
+ RTC_LOG(LS_INFO) << " TARGET_IPHONE_SIMULATOR is defined";
+#endif
+ RTC_LOG(LS_INFO) << " DeviceIsSimulator: " << DeviceIsSimulator();
+ }
+}
+#endif // !defined(NDEBUG)
+
+AudioDeviceIOS::AudioDeviceIOS(bool bypass_voice_processing)
+ : bypass_voice_processing_(bypass_voice_processing),
+ audio_device_buffer_(nullptr),
+ audio_unit_(nullptr),
+ recording_(0),
+ playing_(0),
+ initialized_(false),
+ audio_is_initialized_(false),
+ is_interrupted_(false),
+ has_configured_session_(false),
+ num_detected_playout_glitches_(0),
+ last_playout_time_(0),
+ num_playout_callbacks_(0),
+ last_output_volume_change_time_(0) {
+ LOGI() << "ctor" << ios::GetCurrentThreadDescription()
+ << ",bypass_voice_processing=" << bypass_voice_processing_;
+ io_thread_checker_.Detach();
+ thread_checker_.Detach();
+ thread_ = rtc::Thread::Current();
+
+ audio_session_observer_ = [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this];
+}
+
+AudioDeviceIOS::~AudioDeviceIOS() {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ LOGI() << "~dtor" << ios::GetCurrentThreadDescription();
+ thread_->Clear(this);
+ Terminate();
+ audio_session_observer_ = nil;
+}
+
+void AudioDeviceIOS::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ LOGI() << "AttachAudioBuffer";
+ RTC_DCHECK(audioBuffer);
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+}
+
+AudioDeviceGeneric::InitStatus AudioDeviceIOS::Init() {
+ LOGI() << "Init";
+ io_thread_checker_.Detach();
+ thread_checker_.Detach();
+
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (initialized_) {
+ return InitStatus::OK;
+ }
+#if !defined(NDEBUG)
+ LogDeviceInfo();
+#endif
+ // Store the preferred sample rate and preferred number of channels already
+ // here. They have not been set and confirmed yet since configureForWebRTC
+ // is not called until audio is about to start. However, it makes sense to
+ // store the parameters now and then verify at a later stage.
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* config =
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
+ playout_parameters_.reset(config.sampleRate, config.outputNumberOfChannels);
+ record_parameters_.reset(config.sampleRate, config.inputNumberOfChannels);
+ // Ensure that the audio device buffer (ADB) knows about the internal audio
+ // parameters. Note that, even if we are unable to get a mono audio session,
+ // we will always tell the I/O audio unit to do a channel format conversion
+ // to guarantee mono on the "input side" of the audio unit.
+ UpdateAudioDeviceBuffer();
+ initialized_ = true;
+ return InitStatus::OK;
+}
+
+int32_t AudioDeviceIOS::Terminate() {
+ LOGI() << "Terminate";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (!initialized_) {
+ return 0;
+ }
+ StopPlayout();
+ StopRecording();
+ initialized_ = false;
+ return 0;
+}
+
+bool AudioDeviceIOS::Initialized() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return initialized_;
+}
+
+int32_t AudioDeviceIOS::InitPlayout() {
+ LOGI() << "InitPlayout";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!audio_is_initialized_);
+ RTC_DCHECK(!playing_.load());
+ if (!audio_is_initialized_) {
+ if (!InitPlayOrRecord()) {
+ RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitPlayout!";
+ return -1;
+ }
+ }
+ audio_is_initialized_ = true;
+ return 0;
+}
+
+bool AudioDeviceIOS::PlayoutIsInitialized() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return audio_is_initialized_;
+}
+
+bool AudioDeviceIOS::RecordingIsInitialized() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return audio_is_initialized_;
+}
+
+int32_t AudioDeviceIOS::InitRecording() {
+ LOGI() << "InitRecording";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!audio_is_initialized_);
+ RTC_DCHECK(!recording_.load());
+ if (!audio_is_initialized_) {
+ if (!InitPlayOrRecord()) {
+ RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitRecording!";
+ return -1;
+ }
+ }
+ audio_is_initialized_ = true;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::StartPlayout() {
+ LOGI() << "StartPlayout";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(audio_is_initialized_);
+ RTC_DCHECK(!playing_.load());
+ RTC_DCHECK(audio_unit_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetPlayout();
+ }
+ if (!recording_.load() && audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ OSStatus result = audio_unit_->Start();
+ if (result != noErr) {
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session notifyAudioUnitStartFailedWithError:result];
+ RTCLogError(@"StartPlayout failed to start audio unit, reason %d", result);
+ return -1;
+ }
+ RTC_LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started";
+ }
+ playing_.store(1, std::memory_order_release);
+ num_playout_callbacks_ = 0;
+ num_detected_playout_glitches_ = 0;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::StopPlayout() {
+ LOGI() << "StopPlayout";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (!audio_is_initialized_ || !playing_.load()) {
+ return 0;
+ }
+ if (!recording_.load()) {
+ ShutdownPlayOrRecord();
+ audio_is_initialized_ = false;
+ }
+ playing_.store(0, std::memory_order_release);
+
+ // Derive average number of calls to OnGetPlayoutData() between detected
+ // audio glitches and add the result to a histogram.
+ int average_number_of_playout_callbacks_between_glitches = 100000;
+ RTC_DCHECK_GE(num_playout_callbacks_, num_detected_playout_glitches_);
+ if (num_detected_playout_glitches_ > 0) {
+ average_number_of_playout_callbacks_between_glitches =
+ num_playout_callbacks_ / num_detected_playout_glitches_;
+ }
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Audio.AveragePlayoutCallbacksBetweenGlitches",
+ average_number_of_playout_callbacks_between_glitches);
+ RTCLog(@"Average number of playout callbacks between glitches: %d",
+ average_number_of_playout_callbacks_between_glitches);
+ return 0;
+}
+
+bool AudioDeviceIOS::Playing() const {
+ return playing_.load();
+}
+
+int32_t AudioDeviceIOS::StartRecording() {
+ LOGI() << "StartRecording";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(audio_is_initialized_);
+ RTC_DCHECK(!recording_.load());
+ RTC_DCHECK(audio_unit_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetRecord();
+ }
+ if (!playing_.load() && audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ OSStatus result = audio_unit_->Start();
+ if (result != noErr) {
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session notifyAudioUnitStartFailedWithError:result];
+ RTCLogError(@"StartRecording failed to start audio unit, reason %d", result);
+ return -1;
+ }
+ RTC_LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started";
+ }
+ recording_.store(1, std::memory_order_release);
+ return 0;
+}
+
+int32_t AudioDeviceIOS::StopRecording() {
+ LOGI() << "StopRecording";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (!audio_is_initialized_ || !recording_.load()) {
+ return 0;
+ }
+ if (!playing_.load()) {
+ ShutdownPlayOrRecord();
+ audio_is_initialized_ = false;
+ }
+ recording_.store(0, std::memory_order_release);
+ return 0;
+}
+
+bool AudioDeviceIOS::Recording() const {
+ return recording_.load();
+}
+
+int32_t AudioDeviceIOS::PlayoutDelay(uint16_t& delayMS) const {
+ delayMS = kFixedPlayoutDelayEstimate;
+ return 0;
+}
+
+int AudioDeviceIOS::GetPlayoutAudioParameters(AudioParameters* params) const {
+ LOGI() << "GetPlayoutAudioParameters";
+ RTC_DCHECK(playout_parameters_.is_valid());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ *params = playout_parameters_;
+ return 0;
+}
+
+int AudioDeviceIOS::GetRecordAudioParameters(AudioParameters* params) const {
+ LOGI() << "GetRecordAudioParameters";
+ RTC_DCHECK(record_parameters_.is_valid());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ *params = record_parameters_;
+ return 0;
+}
+
+void AudioDeviceIOS::OnInterruptionBegin() {
+ RTC_DCHECK(thread_);
+ LOGI() << "OnInterruptionBegin";
+ thread_->Post(RTC_FROM_HERE, this, kMessageTypeInterruptionBegin);
+}
+
+void AudioDeviceIOS::OnInterruptionEnd() {
+ RTC_DCHECK(thread_);
+ LOGI() << "OnInterruptionEnd";
+ thread_->Post(RTC_FROM_HERE, this, kMessageTypeInterruptionEnd);
+}
+
+void AudioDeviceIOS::OnValidRouteChange() {
+ RTC_DCHECK(thread_);
+ thread_->Post(RTC_FROM_HERE, this, kMessageTypeValidRouteChange);
+}
+
+void AudioDeviceIOS::OnCanPlayOrRecordChange(bool can_play_or_record) {
+ RTC_DCHECK(thread_);
+ thread_->Post(RTC_FROM_HERE,
+ this,
+ kMessageTypeCanPlayOrRecordChange,
+ new rtc::TypedMessageData<bool>(can_play_or_record));
+}
+
+void AudioDeviceIOS::OnChangedOutputVolume() {
+ RTC_DCHECK(thread_);
+ thread_->Post(RTC_FROM_HERE, this, kMessageOutputVolumeChange);
+}
+
+OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* /* io_data */) {
+ RTC_DCHECK_RUN_ON(&io_thread_checker_);
+ OSStatus result = noErr;
+ // Simply return if recording is not enabled.
+ if (!recording_.load(std::memory_order_acquire)) return result;
+
+ // Set the size of our own audio buffer and clear it first to avoid copying
+ // in combination with potential reallocations.
+ // On real iOS devices, the size will only be set once (at first callback).
+ record_audio_buffer_.Clear();
+ record_audio_buffer_.SetSize(num_frames);
+
+ // Allocate AudioBuffers to be used as storage for the received audio.
+ // The AudioBufferList structure works as a placeholder for the
+ // AudioBuffer structure, which holds a pointer to the actual data buffer
+ // in `record_audio_buffer_`. Recorded audio will be rendered into this memory
+ // at each input callback when calling AudioUnitRender().
+ AudioBufferList audio_buffer_list;
+ audio_buffer_list.mNumberBuffers = 1;
+ AudioBuffer* audio_buffer = &audio_buffer_list.mBuffers[0];
+ audio_buffer->mNumberChannels = record_parameters_.channels();
+ audio_buffer->mDataByteSize =
+ record_audio_buffer_.size() * VoiceProcessingAudioUnit::kBytesPerSample;
+ audio_buffer->mData = reinterpret_cast<int8_t*>(record_audio_buffer_.data());
+
+ // Obtain the recorded audio samples by initiating a rendering cycle.
+ // Since it happens on the input bus, the `io_data` parameter is a reference
+ // to the preallocated audio buffer list that the audio unit renders into.
+ // We can make the audio unit provide a buffer instead in io_data, but we
+ // currently just use our own.
+ // TODO(henrika): should error handling be improved?
+ result = audio_unit_->Render(flags, time_stamp, bus_number, num_frames, &audio_buffer_list);
+ if (result != noErr) {
+ RTCLogError(@"Failed to render audio.");
+ return result;
+ }
+
+ // Get a pointer to the recorded audio and send it to the WebRTC ADB.
+ // Use the FineAudioBuffer instance to convert between native buffer size
+ // and the 10ms buffer size used by WebRTC.
+ fine_audio_buffer_->DeliverRecordedData(record_audio_buffer_, kFixedRecordDelayEstimate);
+ return noErr;
+}
+
+OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ RTC_DCHECK_RUN_ON(&io_thread_checker_);
+ // Verify 16-bit, noninterleaved mono PCM signal format.
+ RTC_DCHECK_EQ(1, io_data->mNumberBuffers);
+ AudioBuffer* audio_buffer = &io_data->mBuffers[0];
+ RTC_DCHECK_EQ(1, audio_buffer->mNumberChannels);
+
+ // Produce silence and give audio unit a hint about it if playout is not
+ // activated.
+ if (!playing_.load(std::memory_order_acquire)) {
+ const size_t size_in_bytes = audio_buffer->mDataByteSize;
+ RTC_CHECK_EQ(size_in_bytes / VoiceProcessingAudioUnit::kBytesPerSample, num_frames);
+ *flags |= kAudioUnitRenderAction_OutputIsSilence;
+ memset(static_cast<int8_t*>(audio_buffer->mData), 0, size_in_bytes);
+ return noErr;
+ }
+
+ // Measure time since last call to OnGetPlayoutData() and see if it is larger
+ // than a well defined threshold which depends on the current IO buffer size.
+ // If so, we have an indication of a glitch in the output audio since the
+ // core audio layer will most likely run dry in this state.
+ ++num_playout_callbacks_;
+ const int64_t now_time = rtc::TimeMillis();
+ if (time_stamp->mSampleTime != num_frames) {
+ const int64_t delta_time = now_time - last_playout_time_;
+ const int glitch_threshold = 1.6 * playout_parameters_.GetBufferSizeInMilliseconds();
+ if (delta_time > glitch_threshold) {
+ RTCLogWarning(@"Possible playout audio glitch detected.\n"
+ " Time since last OnGetPlayoutData was %lld ms.\n",
+ delta_time);
+ // Exclude extreme delta values since they do most likely not correspond
+ // to a real glitch. Instead, the most probable cause is that a headset
+ // has been plugged in or out. There are more direct ways to detect
+ // audio device changes (see HandleValidRouteChange()) but experiments
+ // show that using it leads to more complex implementations.
+ // TODO(henrika): more tests might be needed to come up with an even
+ // better upper limit.
+ if (glitch_threshold < 120 && delta_time > 120) {
+ RTCLog(@"Glitch warning is ignored. Probably caused by device switch.");
+ } else {
+ thread_->Post(RTC_FROM_HERE, this, kMessageTypePlayoutGlitchDetected);
+ }
+ }
+ }
+ last_playout_time_ = now_time;
+
+ // Read decoded 16-bit PCM samples from WebRTC (using a size that matches
+ // the native I/O audio unit) and copy the result to the audio buffer in the
+ // `io_data` destination.
+ fine_audio_buffer_->GetPlayoutData(
+ rtc::ArrayView<int16_t>(static_cast<int16_t*>(audio_buffer->mData), num_frames),
+ kFixedPlayoutDelayEstimate);
+ return noErr;
+}
+
+void AudioDeviceIOS::OnMessage(rtc::Message* msg) {
+ switch (msg->message_id) {
+ case kMessageTypeInterruptionBegin:
+ HandleInterruptionBegin();
+ break;
+ case kMessageTypeInterruptionEnd:
+ HandleInterruptionEnd();
+ break;
+ case kMessageTypeValidRouteChange:
+ HandleValidRouteChange();
+ break;
+ case kMessageTypeCanPlayOrRecordChange: {
+ rtc::TypedMessageData<bool>* data = static_cast<rtc::TypedMessageData<bool>*>(msg->pdata);
+ HandleCanPlayOrRecordChange(data->data());
+ delete data;
+ break;
+ }
+ case kMessageTypePlayoutGlitchDetected:
+ HandlePlayoutGlitchDetected();
+ break;
+ case kMessageOutputVolumeChange:
+ HandleOutputVolumeChange();
+ break;
+ }
+}
+
+void AudioDeviceIOS::HandleInterruptionBegin() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Interruption begin. IsInterrupted changed from %d to 1.", is_interrupted_);
+ if (audio_unit_ && audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ RTCLog(@"Stopping the audio unit due to interruption begin.");
+ if (!audio_unit_->Stop()) {
+ RTCLogError(@"Failed to stop the audio unit for interruption begin.");
+ }
+ PrepareForNewStart();
+ }
+ is_interrupted_ = true;
+}
+
+void AudioDeviceIOS::HandleInterruptionEnd() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Interruption ended. IsInterrupted changed from %d to 0. "
+ "Updating audio unit state.",
+ is_interrupted_);
+ is_interrupted_ = false;
+ if (!audio_unit_) return;
+ if (webrtc::field_trial::IsEnabled("WebRTC-Audio-iOS-Holding")) {
+ // Work around an issue where audio does not restart properly after an interruption
+ // by restarting the audio unit when the interruption ends.
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ audio_unit_->Stop();
+ PrepareForNewStart();
+ }
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ audio_unit_->Uninitialize();
+ }
+ // Allocate new buffers given the potentially new stream format.
+ SetupAudioBuffersForActiveAudioSession();
+ }
+ UpdateAudioUnit([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord);
+}
+
+void AudioDeviceIOS::HandleValidRouteChange() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ RTCLog(@"%@", session);
+ HandleSampleRateChange();
+}
+
+void AudioDeviceIOS::HandleCanPlayOrRecordChange(bool can_play_or_record) {
+ RTCLog(@"Handling CanPlayOrRecord change to: %d", can_play_or_record);
+ UpdateAudioUnit(can_play_or_record);
+}
+
+void AudioDeviceIOS::HandleSampleRateChange() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Handling sample rate change.");
+
+ // Don't do anything if we're interrupted.
+ if (is_interrupted_) {
+ RTCLog(@"Ignoring sample rate change due to interruption.");
+ return;
+ }
+
+ // If we don't have an audio unit yet, or the audio unit is uninitialized,
+ // there is no work to do.
+ if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) {
+ return;
+ }
+
+ // The audio unit is already initialized or started.
+ // Check to see if the sample rate or buffer size has changed.
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ const double new_sample_rate = session.sampleRate;
+ const NSTimeInterval session_buffer_duration = session.IOBufferDuration;
+ const size_t new_frames_per_buffer =
+ static_cast<size_t>(new_sample_rate * session_buffer_duration + .5);
+ const double current_sample_rate = playout_parameters_.sample_rate();
+ const size_t current_frames_per_buffer = playout_parameters_.frames_per_buffer();
+ RTCLog(@"Handling playout sample rate change:\n"
+ " Session sample rate: %f frames_per_buffer: %lu\n"
+ " ADM sample rate: %f frames_per_buffer: %lu",
+ new_sample_rate,
+ (unsigned long)new_frames_per_buffer,
+ current_sample_rate,
+ (unsigned long)current_frames_per_buffer);
+
+ // Sample rate and buffer size are the same, no work to do.
+ if (std::abs(current_sample_rate - new_sample_rate) <= DBL_EPSILON &&
+ current_frames_per_buffer == new_frames_per_buffer) {
+ RTCLog(@"Ignoring sample rate change since audio parameters are intact.");
+ return;
+ }
+
+ // Extra sanity check to ensure that the new sample rate is valid.
+ if (new_sample_rate <= 0.0) {
+ RTCLogError(@"Sample rate is invalid: %f", new_sample_rate);
+ return;
+ }
+
+ // We need to adjust our format and buffer sizes.
+ // The stream format is about to be changed and it requires that we first
+ // stop and uninitialize the audio unit to deallocate its resources.
+ RTCLog(@"Stopping and uninitializing audio unit to adjust buffers.");
+ bool restart_audio_unit = false;
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ audio_unit_->Stop();
+ restart_audio_unit = true;
+ PrepareForNewStart();
+ }
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ audio_unit_->Uninitialize();
+ }
+
+ // Allocate new buffers given the new stream format.
+ SetupAudioBuffersForActiveAudioSession();
+
+ // Initialize the audio unit again with the new sample rate.
+ if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) {
+ RTCLogError(@"Failed to initialize the audio unit with sample rate: %d",
+ playout_parameters_.sample_rate());
+ return;
+ }
+
+ // Restart the audio unit if it was already running.
+ if (restart_audio_unit) {
+ OSStatus result = audio_unit_->Start();
+ if (result != noErr) {
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session notifyAudioUnitStartFailedWithError:result];
+ RTCLogError(@"Failed to start audio unit with sample rate: %d, reason %d",
+ playout_parameters_.sample_rate(),
+ result);
+ return;
+ }
+ }
+ RTCLog(@"Successfully handled sample rate change.");
+}
+
+void AudioDeviceIOS::HandlePlayoutGlitchDetected() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ // Don't update metrics if we're interrupted since a "glitch" is expected
+ // in this state.
+ if (is_interrupted_) {
+ RTCLog(@"Ignoring audio glitch due to interruption.");
+ return;
+ }
+ // Avoid doing glitch detection for two seconds after a volume change
+ // has been detected to reduce the risk of false alarm.
+ if (last_output_volume_change_time_ > 0 &&
+ rtc::TimeSince(last_output_volume_change_time_) < 2000) {
+ RTCLog(@"Ignoring audio glitch due to recent output volume change.");
+ return;
+ }
+ num_detected_playout_glitches_++;
+ RTCLog(@"Number of detected playout glitches: %lld", num_detected_playout_glitches_);
+
+ int64_t glitch_count = num_detected_playout_glitches_;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session notifyDidDetectPlayoutGlitch:glitch_count];
+ });
+}
+
+void AudioDeviceIOS::HandleOutputVolumeChange() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Output volume change detected.");
+ // Store time of this detection so it can be used to defer detection of
+ // glitches too close in time to this event.
+ last_output_volume_change_time_ = rtc::TimeMillis();
+}
+
+void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
+ LOGI() << "UpdateAudioDevicebuffer";
+ // AttachAudioBuffer() is called at construction by the main class but check
+ // just in case.
+ RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first";
+ RTC_DCHECK_GT(playout_parameters_.sample_rate(), 0);
+ RTC_DCHECK_GT(record_parameters_.sample_rate(), 0);
+ RTC_DCHECK_EQ(playout_parameters_.channels(), 1);
+ RTC_DCHECK_EQ(record_parameters_.channels(), 1);
+ // Inform the audio device buffer (ADB) about the new audio format.
+ audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate());
+ audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels());
+ audio_device_buffer_->SetRecordingSampleRate(record_parameters_.sample_rate());
+ audio_device_buffer_->SetRecordingChannels(record_parameters_.channels());
+}
+
+void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
+ LOGI() << "SetupAudioBuffersForActiveAudioSession";
+ // Verify the current values once the audio session has been activated.
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ double sample_rate = session.sampleRate;
+ NSTimeInterval io_buffer_duration = session.IOBufferDuration;
+ RTCLog(@"%@", session);
+
+ // Log a warning message for the case when we are unable to set the preferred
+ // hardware sample rate but continue and use the non-ideal sample rate after
+ // reinitializing the audio parameters. Most BT headsets only support 8kHz or
+ // 16kHz.
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* webRTCConfig =
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
+ if (sample_rate != webRTCConfig.sampleRate) {
+ RTC_LOG(LS_WARNING) << "Unable to set the preferred sample rate";
+ }
+
+ // Crash reports indicates that it can happen in rare cases that the reported
+ // sample rate is less than or equal to zero. If that happens and if a valid
+ // sample rate has already been set during initialization, the best guess we
+ // can do is to reuse the current sample rate.
+ if (sample_rate <= DBL_EPSILON && playout_parameters_.sample_rate() > 0) {
+ RTCLogError(@"Reported rate is invalid: %f. "
+ "Using %d as sample rate instead.",
+ sample_rate, playout_parameters_.sample_rate());
+ sample_rate = playout_parameters_.sample_rate();
+ }
+
+ // At this stage, we also know the exact IO buffer duration and can add
+ // that info to the existing audio parameters where it is converted into
+ // number of audio frames.
+ // Example: IO buffer size = 0.008 seconds <=> 128 audio frames at 16kHz.
+ // Hence, 128 is the size we expect to see in upcoming render callbacks.
+ playout_parameters_.reset(sample_rate, playout_parameters_.channels(), io_buffer_duration);
+ RTC_DCHECK(playout_parameters_.is_complete());
+ record_parameters_.reset(sample_rate, record_parameters_.channels(), io_buffer_duration);
+ RTC_DCHECK(record_parameters_.is_complete());
+ RTC_LOG(LS_INFO) << " frames per I/O buffer: " << playout_parameters_.frames_per_buffer();
+ RTC_LOG(LS_INFO) << " bytes per I/O buffer: " << playout_parameters_.GetBytesPerBuffer();
+ RTC_DCHECK_EQ(playout_parameters_.GetBytesPerBuffer(), record_parameters_.GetBytesPerBuffer());
+
+ // Update the ADB parameters since the sample rate might have changed.
+ UpdateAudioDeviceBuffer();
+
+ // Create a modified audio buffer class which allows us to ask for,
+ // or deliver, any number of samples (and not only multiple of 10ms) to match
+ // the native audio unit buffer size.
+ RTC_DCHECK(audio_device_buffer_);
+ fine_audio_buffer_.reset(new FineAudioBuffer(audio_device_buffer_));
+}
+
+bool AudioDeviceIOS::CreateAudioUnit() {
+ RTC_DCHECK(!audio_unit_);
+
+ audio_unit_.reset(new VoiceProcessingAudioUnit(bypass_voice_processing_, this));
+ if (!audio_unit_->Init()) {
+ audio_unit_.reset();
+ return false;
+ }
+
+ return true;
+}
+
+void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Updating audio unit state. CanPlayOrRecord=%d IsInterrupted=%d",
+ can_play_or_record,
+ is_interrupted_);
+
+ if (is_interrupted_) {
+ RTCLog(@"Ignoring audio unit update due to interruption.");
+ return;
+ }
+
+ // If we're not initialized we don't need to do anything. Audio unit will
+ // be initialized on initialization.
+ if (!audio_is_initialized_) return;
+
+ // If we're initialized, we must have an audio unit.
+ RTC_DCHECK(audio_unit_);
+
+ bool should_initialize_audio_unit = false;
+ bool should_uninitialize_audio_unit = false;
+ bool should_start_audio_unit = false;
+ bool should_stop_audio_unit = false;
+
+ switch (audio_unit_->GetState()) {
+ case VoiceProcessingAudioUnit::kInitRequired:
+ RTCLog(@"VPAU state: InitRequired");
+ RTC_DCHECK_NOTREACHED();
+ break;
+ case VoiceProcessingAudioUnit::kUninitialized:
+ RTCLog(@"VPAU state: Uninitialized");
+ should_initialize_audio_unit = can_play_or_record;
+ should_start_audio_unit =
+ should_initialize_audio_unit && (playing_.load() || recording_.load());
+ break;
+ case VoiceProcessingAudioUnit::kInitialized:
+ RTCLog(@"VPAU state: Initialized");
+ should_start_audio_unit = can_play_or_record && (playing_.load() || recording_.load());
+ should_uninitialize_audio_unit = !can_play_or_record;
+ break;
+ case VoiceProcessingAudioUnit::kStarted:
+ RTCLog(@"VPAU state: Started");
+ RTC_DCHECK(playing_.load() || recording_.load());
+ should_stop_audio_unit = !can_play_or_record;
+ should_uninitialize_audio_unit = should_stop_audio_unit;
+ break;
+ }
+
+ if (should_initialize_audio_unit) {
+ RTCLog(@"Initializing audio unit for UpdateAudioUnit");
+ ConfigureAudioSession();
+ SetupAudioBuffersForActiveAudioSession();
+ if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) {
+ RTCLogError(@"Failed to initialize audio unit.");
+ return;
+ }
+ }
+
+ if (should_start_audio_unit) {
+ RTCLog(@"Starting audio unit for UpdateAudioUnit");
+ // Log session settings before trying to start audio streaming.
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ RTCLog(@"%@", session);
+ OSStatus result = audio_unit_->Start();
+ if (result != noErr) {
+ [session notifyAudioUnitStartFailedWithError:result];
+ RTCLogError(@"Failed to start audio unit, reason %d", result);
+ return;
+ }
+ }
+
+ if (should_stop_audio_unit) {
+ RTCLog(@"Stopping audio unit for UpdateAudioUnit");
+ if (!audio_unit_->Stop()) {
+ RTCLogError(@"Failed to stop audio unit.");
+ PrepareForNewStart();
+ return;
+ }
+ PrepareForNewStart();
+ }
+
+ if (should_uninitialize_audio_unit) {
+ RTCLog(@"Uninitializing audio unit for UpdateAudioUnit");
+ audio_unit_->Uninitialize();
+ UnconfigureAudioSession();
+ }
+}
+
+bool AudioDeviceIOS::ConfigureAudioSession() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Configuring audio session.");
+ if (has_configured_session_) {
+ RTCLogWarning(@"Audio session already configured.");
+ return false;
+ }
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session lockForConfiguration];
+ bool success = [session configureWebRTCSession:nil];
+ [session unlockForConfiguration];
+ if (success) {
+ has_configured_session_ = true;
+ RTCLog(@"Configured audio session.");
+ } else {
+ RTCLog(@"Failed to configure audio session.");
+ }
+ return success;
+}
+
+bool AudioDeviceIOS::ConfigureAudioSessionLocked() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Configuring audio session.");
+ if (has_configured_session_) {
+ RTCLogWarning(@"Audio session already configured.");
+ return false;
+ }
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ bool success = [session configureWebRTCSession:nil];
+ if (success) {
+ has_configured_session_ = true;
+ RTCLog(@"Configured audio session.");
+ } else {
+ RTCLog(@"Failed to configure audio session.");
+ }
+ return success;
+}
+
+void AudioDeviceIOS::UnconfigureAudioSession() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Unconfiguring audio session.");
+ if (!has_configured_session_) {
+ RTCLogWarning(@"Audio session already unconfigured.");
+ return;
+ }
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session lockForConfiguration];
+ [session unconfigureWebRTCSession:nil];
+ [session endWebRTCSession:nil];
+ [session unlockForConfiguration];
+ has_configured_session_ = false;
+ RTCLog(@"Unconfigured audio session.");
+}
+
+bool AudioDeviceIOS::InitPlayOrRecord() {
+ LOGI() << "InitPlayOrRecord";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ // There should be no audio unit at this point.
+ if (!CreateAudioUnit()) {
+ return false;
+ }
+
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ // Subscribe to audio session events.
+ [session pushDelegate:audio_session_observer_];
+ is_interrupted_ = session.isInterrupted ? true : false;
+
+ // Lock the session to make configuration changes.
+ [session lockForConfiguration];
+ NSError* error = nil;
+ if (![session beginWebRTCSession:&error]) {
+ [session unlockForConfiguration];
+ RTCLogError(@"Failed to begin WebRTC session: %@", error.localizedDescription);
+ audio_unit_.reset();
+ return false;
+ }
+
+ // If we are ready to play or record, and if the audio session can be
+ // configured, then initialize the audio unit.
+ if (session.canPlayOrRecord) {
+ if (!ConfigureAudioSessionLocked()) {
+ // One possible reason for failure is if an attempt was made to use the
+ // audio session during or after a Media Services failure.
+ // See AVAudioSessionErrorCodeMediaServicesFailed for details.
+ [session unlockForConfiguration];
+ audio_unit_.reset();
+ return false;
+ }
+ SetupAudioBuffersForActiveAudioSession();
+ audio_unit_->Initialize(playout_parameters_.sample_rate());
+ }
+
+ // Release the lock.
+ [session unlockForConfiguration];
+ return true;
+}
+
+void AudioDeviceIOS::ShutdownPlayOrRecord() {
+ LOGI() << "ShutdownPlayOrRecord";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ // Stop the audio unit to prevent any additional audio callbacks.
+ audio_unit_->Stop();
+
+ // Close and delete the voice-processing I/O unit.
+ audio_unit_.reset();
+
+ // Detach thread checker for the AURemoteIO::IOThread to ensure that the
+ // next session uses a fresh thread id.
+ io_thread_checker_.Detach();
+
+ // Remove audio session notification observers.
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session removeDelegate:audio_session_observer_];
+
+ // All I/O should be stopped or paused prior to deactivating the audio
+ // session, hence we deactivate as last action.
+ UnconfigureAudioSession();
+}
+
+void AudioDeviceIOS::PrepareForNewStart() {
+ LOGI() << "PrepareForNewStart";
+ // The audio unit has been stopped and preparations are needed for an upcoming
+ // restart. It will result in audio callbacks from a new native I/O thread
+ // which means that we must detach thread checkers here to be prepared for an
+ // upcoming new audio stream.
+ io_thread_checker_.Detach();
+}
+
+bool AudioDeviceIOS::IsInterrupted() {
+ return is_interrupted_;
+}
+
+#pragma mark - Not Implemented
+
+int32_t AudioDeviceIOS::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const {
+ audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+ return 0;
+}
+
+int16_t AudioDeviceIOS::PlayoutDevices() {
+ // TODO(henrika): improve.
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return (int16_t)1;
+}
+
+int16_t AudioDeviceIOS::RecordingDevices() {
+ // TODO(henrika): improve.
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return (int16_t)1;
+}
+
+int32_t AudioDeviceIOS::InitSpeaker() {
+ return 0;
+}
+
+bool AudioDeviceIOS::SpeakerIsInitialized() const {
+ return true;
+}
+
+int32_t AudioDeviceIOS::SpeakerVolumeIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetSpeakerVolume(uint32_t volume) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SpeakerVolume(uint32_t& volume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MaxSpeakerVolume(uint32_t& maxVolume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MinSpeakerVolume(uint32_t& minVolume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SpeakerMuteIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetSpeakerMute(bool enable) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SpeakerMute(bool& enabled) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SetPlayoutDevice(uint16_t index) {
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::InitMicrophone() {
+ return 0;
+}
+
+bool AudioDeviceIOS::MicrophoneIsInitialized() const {
+ return true;
+}
+
+int32_t AudioDeviceIOS::MicrophoneMuteIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetMicrophoneMute(bool enable) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MicrophoneMute(bool& enabled) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::StereoRecordingIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetStereoRecording(bool enable) {
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::StereoRecording(bool& enabled) const {
+ enabled = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::StereoPlayoutIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetStereoPlayout(bool enable) {
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::StereoPlayout(bool& enabled) const {
+ enabled = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::MicrophoneVolumeIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetMicrophoneVolume(uint32_t volume) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MicrophoneVolume(uint32_t& volume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MaxMicrophoneVolume(uint32_t& maxVolume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MinMicrophoneVolume(uint32_t& minVolume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SetRecordingDevice(uint16_t index) {
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::PlayoutIsAvailable(bool& available) {
+ available = true;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::RecordingIsAvailable(bool& available) {
+ available = true;
+ return 0;
+}
+
+} // namespace ios_adm
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.h b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.h
new file mode 100644
index 0000000000..9bcf114e32
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_MODULE_IOS_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_MODULE_IOS_H_
+
+#include <memory>
+
+#include "audio_device_ios.h"
+
+#include "api/task_queue/task_queue_factory.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+class AudioDeviceGeneric;
+
+namespace ios_adm {
+
+class AudioDeviceModuleIOS : public AudioDeviceModule {
+ public:
+ int32_t AttachAudioBuffer();
+
+ explicit AudioDeviceModuleIOS(bool bypass_voice_processing);
+ ~AudioDeviceModuleIOS() override;
+
+ // Retrieve the currently utilized audio layer
+ int32_t ActiveAudioLayer(AudioLayer* audioLayer) const override;
+
+ // Full-duplex transportation of PCM audio
+ int32_t RegisterAudioCallback(AudioTransport* audioCallback) override;
+
+ // Main initializaton and termination
+ int32_t Init() override;
+ int32_t Terminate() override;
+ bool Initialized() const override;
+
+ // Device enumeration
+ int16_t PlayoutDevices() override;
+ int16_t RecordingDevices() override;
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override;
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override;
+
+ // Device selection
+ int32_t SetPlayoutDevice(uint16_t index) override;
+ int32_t SetPlayoutDevice(WindowsDeviceType device) override;
+ int32_t SetRecordingDevice(uint16_t index) override;
+ int32_t SetRecordingDevice(WindowsDeviceType device) override;
+
+ // Audio transport initialization
+ int32_t PlayoutIsAvailable(bool* available) override;
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+ int32_t RecordingIsAvailable(bool* available) override;
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ // Audio transport control
+ int32_t StartPlayout() override;
+ int32_t StopPlayout() override;
+ bool Playing() const override;
+ int32_t StartRecording() override;
+ int32_t StopRecording() override;
+ bool Recording() const override;
+
+ // Audio mixer initialization
+ int32_t InitSpeaker() override;
+ bool SpeakerIsInitialized() const override;
+ int32_t InitMicrophone() override;
+ bool MicrophoneIsInitialized() const override;
+
+ // Speaker volume controls
+ int32_t SpeakerVolumeIsAvailable(bool* available) override;
+ int32_t SetSpeakerVolume(uint32_t volume) override;
+ int32_t SpeakerVolume(uint32_t* volume) const override;
+ int32_t MaxSpeakerVolume(uint32_t* maxVolume) const override;
+ int32_t MinSpeakerVolume(uint32_t* minVolume) const override;
+
+ // Microphone volume controls
+ int32_t MicrophoneVolumeIsAvailable(bool* available) override;
+ int32_t SetMicrophoneVolume(uint32_t volume) override;
+ int32_t MicrophoneVolume(uint32_t* volume) const override;
+ int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override;
+ int32_t MinMicrophoneVolume(uint32_t* minVolume) const override;
+
+ // Speaker mute control
+ int32_t SpeakerMuteIsAvailable(bool* available) override;
+ int32_t SetSpeakerMute(bool enable) override;
+ int32_t SpeakerMute(bool* enabled) const override;
+
+ // Microphone mute control
+ int32_t MicrophoneMuteIsAvailable(bool* available) override;
+ int32_t SetMicrophoneMute(bool enable) override;
+ int32_t MicrophoneMute(bool* enabled) const override;
+
+ // Stereo support
+ int32_t StereoPlayoutIsAvailable(bool* available) const override;
+ int32_t SetStereoPlayout(bool enable) override;
+ int32_t StereoPlayout(bool* enabled) const override;
+ int32_t StereoRecordingIsAvailable(bool* available) const override;
+ int32_t SetStereoRecording(bool enable) override;
+ int32_t StereoRecording(bool* enabled) const override;
+
+ // Delay information and control
+ int32_t PlayoutDelay(uint16_t* delayMS) const override;
+
+ bool BuiltInAECIsAvailable() const override;
+ int32_t EnableBuiltInAEC(bool enable) override;
+ bool BuiltInAGCIsAvailable() const override;
+ int32_t EnableBuiltInAGC(bool enable) override;
+ bool BuiltInNSIsAvailable() const override;
+ int32_t EnableBuiltInNS(bool enable) override;
+
+ int32_t GetPlayoutUnderrunCount() const override;
+
+#if defined(WEBRTC_IOS)
+ int GetPlayoutAudioParameters(AudioParameters* params) const override;
+ int GetRecordAudioParameters(AudioParameters* params) const override;
+#endif // WEBRTC_IOS
+ private:
+ const bool bypass_voice_processing_;
+ bool initialized_ = false;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ std::unique_ptr<AudioDeviceIOS> audio_device_;
+ std::unique_ptr<AudioDeviceBuffer> audio_device_buffer_;
+};
+} // namespace ios_adm
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_MODULE_IOS_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.mm b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.mm
new file mode 100644
index 0000000000..5effef3abd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.mm
@@ -0,0 +1,669 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_module_ios.h"
+
+#include "api/task_queue/default_task_queue_factory.h"
+#include "modules/audio_device/audio_device_config.h"
+#include "modules/audio_device/audio_device_generic.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ref_count.h"
+#include "system_wrappers/include/metrics.h"
+
+#if defined(WEBRTC_IOS)
+#include "audio_device_ios.h"
+#endif
+
+#define CHECKinitialized_() \
+ { \
+ if (!initialized_) { \
+ return -1; \
+ }; \
+ }
+
+#define CHECKinitialized__BOOL() \
+ { \
+ if (!initialized_) { \
+ return false; \
+ }; \
+ }
+
+namespace webrtc {
+namespace ios_adm {
+
+AudioDeviceModuleIOS::AudioDeviceModuleIOS(bool bypass_voice_processing)
+ : bypass_voice_processing_(bypass_voice_processing),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()) {
+ RTC_LOG(LS_INFO) << "current platform is IOS";
+ RTC_LOG(LS_INFO) << "iPhone Audio APIs will be utilized.";
+}
+
+ int32_t AudioDeviceModuleIOS::AttachAudioBuffer() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ audio_device_->AttachAudioBuffer(audio_device_buffer_.get());
+ return 0;
+ }
+
+ AudioDeviceModuleIOS::~AudioDeviceModuleIOS() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ }
+
+ int32_t AudioDeviceModuleIOS::ActiveAudioLayer(AudioLayer* audioLayer) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ AudioLayer activeAudio;
+ if (audio_device_->ActiveAudioLayer(activeAudio) == -1) {
+ return -1;
+ }
+ *audioLayer = activeAudio;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::Init() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (initialized_)
+ return 0;
+
+ audio_device_buffer_.reset(new webrtc::AudioDeviceBuffer(task_queue_factory_.get()));
+ audio_device_.reset(new ios_adm::AudioDeviceIOS(bypass_voice_processing_));
+ RTC_CHECK(audio_device_);
+
+ this->AttachAudioBuffer();
+
+ AudioDeviceGeneric::InitStatus status = audio_device_->Init();
+ RTC_HISTOGRAM_ENUMERATION(
+ "WebRTC.Audio.InitializationResult", static_cast<int>(status),
+ static_cast<int>(AudioDeviceGeneric::InitStatus::NUM_STATUSES));
+ if (status != AudioDeviceGeneric::InitStatus::OK) {
+ RTC_LOG(LS_ERROR) << "Audio device initialization failed.";
+ return -1;
+ }
+ initialized_ = true;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::Terminate() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return 0;
+ if (audio_device_->Terminate() == -1) {
+ return -1;
+ }
+ initialized_ = false;
+ return 0;
+ }
+
+ bool AudioDeviceModuleIOS::Initialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << ": " << initialized_;
+ return initialized_;
+ }
+
+ int32_t AudioDeviceModuleIOS::InitSpeaker() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ return audio_device_->InitSpeaker();
+ }
+
+ int32_t AudioDeviceModuleIOS::InitMicrophone() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ return audio_device_->InitMicrophone();
+ }
+
+ int32_t AudioDeviceModuleIOS::SpeakerVolumeIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->SpeakerVolumeIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetSpeakerVolume(uint32_t volume) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")";
+ CHECKinitialized_();
+ return audio_device_->SetSpeakerVolume(volume);
+ }
+
+ int32_t AudioDeviceModuleIOS::SpeakerVolume(uint32_t* volume) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ uint32_t level = 0;
+ if (audio_device_->SpeakerVolume(level) == -1) {
+ return -1;
+ }
+ *volume = level;
+ RTC_DLOG(LS_INFO) << "output: " << *volume;
+ return 0;
+ }
+
+ bool AudioDeviceModuleIOS::SpeakerIsInitialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isInitialized = audio_device_->SpeakerIsInitialized();
+ RTC_DLOG(LS_INFO) << "output: " << isInitialized;
+ return isInitialized;
+ }
+
+ bool AudioDeviceModuleIOS::MicrophoneIsInitialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isInitialized = audio_device_->MicrophoneIsInitialized();
+ RTC_DLOG(LS_INFO) << "output: " << isInitialized;
+ return isInitialized;
+ }
+
+ int32_t AudioDeviceModuleIOS::MaxSpeakerVolume(uint32_t* maxVolume) const {
+ CHECKinitialized_();
+ uint32_t maxVol = 0;
+ if (audio_device_->MaxSpeakerVolume(maxVol) == -1) {
+ return -1;
+ }
+ *maxVolume = maxVol;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MinSpeakerVolume(uint32_t* minVolume) const {
+ CHECKinitialized_();
+ uint32_t minVol = 0;
+ if (audio_device_->MinSpeakerVolume(minVol) == -1) {
+ return -1;
+ }
+ *minVolume = minVol;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SpeakerMuteIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->SpeakerMuteIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetSpeakerMute(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ return audio_device_->SetSpeakerMute(enable);
+ }
+
+ int32_t AudioDeviceModuleIOS::SpeakerMute(bool* enabled) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool muted = false;
+ if (audio_device_->SpeakerMute(muted) == -1) {
+ return -1;
+ }
+ *enabled = muted;
+ RTC_DLOG(LS_INFO) << "output: " << muted;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MicrophoneMuteIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->MicrophoneMuteIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetMicrophoneMute(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ return (audio_device_->SetMicrophoneMute(enable));
+ }
+
+ int32_t AudioDeviceModuleIOS::MicrophoneMute(bool* enabled) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool muted = false;
+ if (audio_device_->MicrophoneMute(muted) == -1) {
+ return -1;
+ }
+ *enabled = muted;
+ RTC_DLOG(LS_INFO) << "output: " << muted;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MicrophoneVolumeIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->MicrophoneVolumeIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetMicrophoneVolume(uint32_t volume) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")";
+ CHECKinitialized_();
+ return (audio_device_->SetMicrophoneVolume(volume));
+ }
+
+ int32_t AudioDeviceModuleIOS::MicrophoneVolume(uint32_t* volume) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ uint32_t level = 0;
+ if (audio_device_->MicrophoneVolume(level) == -1) {
+ return -1;
+ }
+ *volume = level;
+ RTC_DLOG(LS_INFO) << "output: " << *volume;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::StereoRecordingIsAvailable(
+ bool* available) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->StereoRecordingIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetStereoRecording(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ if (enable) {
+ RTC_LOG(LS_WARNING) << "recording in stereo is not supported";
+ }
+ return -1;
+ }
+
+ int32_t AudioDeviceModuleIOS::StereoRecording(bool* enabled) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool stereo = false;
+ if (audio_device_->StereoRecording(stereo) == -1) {
+ return -1;
+ }
+ *enabled = stereo;
+ RTC_DLOG(LS_INFO) << "output: " << stereo;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::StereoPlayoutIsAvailable(bool* available) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->StereoPlayoutIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetStereoPlayout(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ if (audio_device_->PlayoutIsInitialized()) {
+ RTC_LOG(LS_ERROR) << "unable to set stereo mode while playing side is initialized";
+ return -1;
+ }
+ if (audio_device_->SetStereoPlayout(enable)) {
+ RTC_LOG(LS_WARNING) << "stereo playout is not supported";
+ return -1;
+ }
+ int8_t nChannels(1);
+ if (enable) {
+ nChannels = 2;
+ }
+ audio_device_buffer_.get()->SetPlayoutChannels(nChannels);
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::StereoPlayout(bool* enabled) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool stereo = false;
+ if (audio_device_->StereoPlayout(stereo) == -1) {
+ return -1;
+ }
+ *enabled = stereo;
+ RTC_DLOG(LS_INFO) << "output: " << stereo;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::PlayoutIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->PlayoutIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::RecordingIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->RecordingIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MaxMicrophoneVolume(uint32_t* maxVolume) const {
+ CHECKinitialized_();
+ uint32_t maxVol(0);
+ if (audio_device_->MaxMicrophoneVolume(maxVol) == -1) {
+ return -1;
+ }
+ *maxVolume = maxVol;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MinMicrophoneVolume(uint32_t* minVolume) const {
+ CHECKinitialized_();
+ uint32_t minVol(0);
+ if (audio_device_->MinMicrophoneVolume(minVol) == -1) {
+ return -1;
+ }
+ *minVolume = minVol;
+ return 0;
+ }
+
+ int16_t AudioDeviceModuleIOS::PlayoutDevices() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ uint16_t nPlayoutDevices = audio_device_->PlayoutDevices();
+ RTC_DLOG(LS_INFO) << "output: " << nPlayoutDevices;
+ return (int16_t)(nPlayoutDevices);
+ }
+
+ int32_t AudioDeviceModuleIOS::SetPlayoutDevice(uint16_t index) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")";
+ CHECKinitialized_();
+ return audio_device_->SetPlayoutDevice(index);
+ }
+
+ int32_t AudioDeviceModuleIOS::SetPlayoutDevice(WindowsDeviceType device) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ return audio_device_->SetPlayoutDevice(device);
+ }
+
+ int32_t AudioDeviceModuleIOS::PlayoutDeviceName(
+ uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ", ...)";
+ CHECKinitialized_();
+ if (name == NULL) {
+ return -1;
+ }
+ if (audio_device_->PlayoutDeviceName(index, name, guid) == -1) {
+ return -1;
+ }
+ if (name != NULL) {
+ RTC_DLOG(LS_INFO) << "output: name = " << name;
+ }
+ if (guid != NULL) {
+ RTC_DLOG(LS_INFO) << "output: guid = " << guid;
+ }
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::RecordingDeviceName(
+ uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ", ...)";
+ CHECKinitialized_();
+ if (name == NULL) {
+ return -1;
+ }
+ if (audio_device_->RecordingDeviceName(index, name, guid) == -1) {
+ return -1;
+ }
+ if (name != NULL) {
+ RTC_DLOG(LS_INFO) << "output: name = " << name;
+ }
+ if (guid != NULL) {
+ RTC_DLOG(LS_INFO) << "output: guid = " << guid;
+ }
+ return 0;
+ }
+
+ int16_t AudioDeviceModuleIOS::RecordingDevices() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ uint16_t nRecordingDevices = audio_device_->RecordingDevices();
+ RTC_DLOG(LS_INFO) << "output: " << nRecordingDevices;
+ return (int16_t)nRecordingDevices;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetRecordingDevice(uint16_t index) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")";
+ CHECKinitialized_();
+ return audio_device_->SetRecordingDevice(index);
+ }
+
+ int32_t AudioDeviceModuleIOS::SetRecordingDevice(WindowsDeviceType device) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ return audio_device_->SetRecordingDevice(device);
+ }
+
+ int32_t AudioDeviceModuleIOS::InitPlayout() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ if (PlayoutIsInitialized()) {
+ return 0;
+ }
+ int32_t result = audio_device_->InitPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ int32_t AudioDeviceModuleIOS::InitRecording() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ if (RecordingIsInitialized()) {
+ return 0;
+ }
+ int32_t result = audio_device_->InitRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool AudioDeviceModuleIOS::PlayoutIsInitialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ return audio_device_->PlayoutIsInitialized();
+ }
+
+ bool AudioDeviceModuleIOS::RecordingIsInitialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ return audio_device_->RecordingIsInitialized();
+ }
+
+ int32_t AudioDeviceModuleIOS::StartPlayout() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ if (Playing()) {
+ return 0;
+ }
+ audio_device_buffer_.get()->StartPlayout();
+ int32_t result = audio_device_->StartPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ int32_t AudioDeviceModuleIOS::StopPlayout() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ int32_t result = audio_device_->StopPlayout();
+ audio_device_buffer_.get()->StopPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool AudioDeviceModuleIOS::Playing() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ return audio_device_->Playing();
+ }
+
+ int32_t AudioDeviceModuleIOS::StartRecording() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ if (Recording()) {
+ return 0;
+ }
+ audio_device_buffer_.get()->StartRecording();
+ int32_t result = audio_device_->StartRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ int32_t AudioDeviceModuleIOS::StopRecording() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ int32_t result = audio_device_->StopRecording();
+ audio_device_buffer_.get()->StopRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool AudioDeviceModuleIOS::Recording() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ return audio_device_->Recording();
+ }
+
+ int32_t AudioDeviceModuleIOS::RegisterAudioCallback(
+ AudioTransport* audioCallback) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return audio_device_buffer_.get()->RegisterAudioCallback(audioCallback);
+ }
+
+ int32_t AudioDeviceModuleIOS::PlayoutDelay(uint16_t* delayMS) const {
+ CHECKinitialized_();
+ uint16_t delay = 0;
+ if (audio_device_->PlayoutDelay(delay) == -1) {
+ RTC_LOG(LS_ERROR) << "failed to retrieve the playout delay";
+ return -1;
+ }
+ *delayMS = delay;
+ return 0;
+ }
+
+ bool AudioDeviceModuleIOS::BuiltInAECIsAvailable() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isAvailable = audio_device_->BuiltInAECIsAvailable();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ int32_t AudioDeviceModuleIOS::EnableBuiltInAEC(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ int32_t ok = audio_device_->EnableBuiltInAEC(enable);
+ RTC_DLOG(LS_INFO) << "output: " << ok;
+ return ok;
+ }
+
+ bool AudioDeviceModuleIOS::BuiltInAGCIsAvailable() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isAvailable = audio_device_->BuiltInAGCIsAvailable();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ int32_t AudioDeviceModuleIOS::EnableBuiltInAGC(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ int32_t ok = audio_device_->EnableBuiltInAGC(enable);
+ RTC_DLOG(LS_INFO) << "output: " << ok;
+ return ok;
+ }
+
+ bool AudioDeviceModuleIOS::BuiltInNSIsAvailable() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isAvailable = audio_device_->BuiltInNSIsAvailable();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ int32_t AudioDeviceModuleIOS::EnableBuiltInNS(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ int32_t ok = audio_device_->EnableBuiltInNS(enable);
+ RTC_DLOG(LS_INFO) << "output: " << ok;
+ return ok;
+ }
+
+ int32_t AudioDeviceModuleIOS::GetPlayoutUnderrunCount() const {
+ // Don't log here, as this method can be called very often.
+ CHECKinitialized_();
+ int32_t ok = audio_device_->GetPlayoutUnderrunCount();
+ return ok;
+ }
+
+#if defined(WEBRTC_IOS)
+ int AudioDeviceModuleIOS::GetPlayoutAudioParameters(
+ AudioParameters* params) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ int r = audio_device_->GetPlayoutAudioParameters(params);
+ RTC_DLOG(LS_INFO) << "output: " << r;
+ return r;
+ }
+
+ int AudioDeviceModuleIOS::GetRecordAudioParameters(
+ AudioParameters* params) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ int r = audio_device_->GetRecordAudioParameters(params);
+ RTC_DLOG(LS_INFO) << "output: " << r;
+ return r;
+ }
+#endif // WEBRTC_IOS
+}
+}
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_session_observer.h b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_session_observer.h
new file mode 100644
index 0000000000..f7c44c8184
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_session_observer.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_
+
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+// Observer interface for listening to AVAudioSession events.
+class AudioSessionObserver {
+ public:
+ // Called when audio session interruption begins.
+ virtual void OnInterruptionBegin() = 0;
+
+ // Called when audio session interruption ends.
+ virtual void OnInterruptionEnd() = 0;
+
+ // Called when audio route changes.
+ virtual void OnValidRouteChange() = 0;
+
+ // Called when the ability to play or record changes.
+ virtual void OnCanPlayOrRecordChange(bool can_play_or_record) = 0;
+
+ virtual void OnChangedOutputVolume() = 0;
+
+ protected:
+ virtual ~AudioSessionObserver() {}
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.h b/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.h
new file mode 100644
index 0000000000..ac86258a5e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_HELPERS_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_HELPERS_H_
+
+#include <string>
+
+namespace webrtc {
+namespace ios {
+
+bool CheckAndLogError(BOOL success, NSError* error);
+
+NSString* NSStringFromStdString(const std::string& stdString);
+std::string StdStringFromNSString(NSString* nsString);
+
+// Return thread ID as a string.
+std::string GetThreadId();
+
+// Return thread ID as string suitable for debug logging.
+std::string GetThreadInfo();
+
+// Returns [NSThread currentThread] description as string.
+// Example: <NSThread: 0x170066d80>{number = 1, name = main}
+std::string GetCurrentThreadDescription();
+
+#if defined(WEBRTC_IOS)
+// Returns the current name of the operating system.
+std::string GetSystemName();
+
+// Returns the current version of the operating system as a string.
+std::string GetSystemVersionAsString();
+
+// Returns the version of the operating system in double representation.
+// Uses a cached value of the system version.
+double GetSystemVersion();
+
+// Returns the device type.
+// Examples: ”iPhone” and ”iPod touch”.
+std::string GetDeviceType();
+#endif // defined(WEBRTC_IOS)
+
+// Returns a more detailed device name.
+// Examples: "iPhone 5s (GSM)" and "iPhone 6 Plus".
+std::string GetDeviceName();
+
+// Returns the name of the process. Does not uniquely identify the process.
+std::string GetProcessName();
+
+// Returns the identifier of the process (often called process ID).
+int GetProcessID();
+
+// Returns a string containing the version of the operating system on which the
+// process is executing. The string is string is human readable, localized, and
+// is appropriate for displaying to the user.
+std::string GetOSVersionString();
+
+// Returns the number of processing cores available on the device.
+int GetProcessorCount();
+
+#if defined(WEBRTC_IOS)
+// Indicates whether Low Power Mode is enabled on the iOS device.
+bool GetLowPowerModeEnabled();
+#endif
+
+} // namespace ios
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_HELPERS_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.mm b/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.mm
new file mode 100644
index 0000000000..cd0469656a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.mm
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <sys/sysctl.h>
+#if defined(WEBRTC_IOS)
+#import <UIKit/UIKit.h>
+#endif
+
+#include <memory>
+
+#include "helpers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace ios {
+
+NSString* NSStringFromStdString(const std::string& stdString) {
+ // std::string may contain null termination character so we construct
+ // using length.
+ return [[NSString alloc] initWithBytes:stdString.data()
+ length:stdString.length()
+ encoding:NSUTF8StringEncoding];
+}
+
+std::string StdStringFromNSString(NSString* nsString) {
+ NSData* charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
+ return std::string(reinterpret_cast<const char*>([charData bytes]),
+ [charData length]);
+}
+
+bool CheckAndLogError(BOOL success, NSError* error) {
+ if (!success) {
+ NSString* msg =
+ [NSString stringWithFormat:@"Error: %ld, %@, %@", (long)error.code,
+ error.localizedDescription,
+ error.localizedFailureReason];
+ RTC_LOG(LS_ERROR) << StdStringFromNSString(msg);
+ return false;
+ }
+ return true;
+}
+
+// TODO(henrika): see if it is possible to move to GetThreadName in
+// platform_thread.h and base it on pthread methods instead.
+std::string GetCurrentThreadDescription() {
+ NSString* name = [NSString stringWithFormat:@"%@", [NSThread currentThread]];
+ return StdStringFromNSString(name);
+}
+
+#if defined(WEBRTC_IOS)
+std::string GetSystemName() {
+ NSString* osName = [[UIDevice currentDevice] systemName];
+ return StdStringFromNSString(osName);
+}
+
+std::string GetSystemVersionAsString() {
+ NSString* osVersion = [[UIDevice currentDevice] systemVersion];
+ return StdStringFromNSString(osVersion);
+}
+
+std::string GetDeviceType() {
+ NSString* deviceModel = [[UIDevice currentDevice] model];
+ return StdStringFromNSString(deviceModel);
+}
+
+bool GetLowPowerModeEnabled() {
+ return [NSProcessInfo processInfo].lowPowerModeEnabled;
+}
+#endif
+
+std::string GetDeviceName() {
+ size_t size;
+ sysctlbyname("hw.machine", NULL, &size, NULL, 0);
+ std::unique_ptr<char[]> machine;
+ machine.reset(new char[size]);
+ sysctlbyname("hw.machine", machine.get(), &size, NULL, 0);
+ return std::string(machine.get());
+}
+
+std::string GetProcessName() {
+ NSString* processName = [NSProcessInfo processInfo].processName;
+ return StdStringFromNSString(processName);
+}
+
+int GetProcessID() {
+ return [NSProcessInfo processInfo].processIdentifier;
+}
+
+std::string GetOSVersionString() {
+ NSString* osVersion =
+ [NSProcessInfo processInfo].operatingSystemVersionString;
+ return StdStringFromNSString(osVersion);
+}
+
+int GetProcessorCount() {
+ return [NSProcessInfo processInfo].processorCount;
+}
+
+} // namespace ios
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.h b/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.h
new file mode 100644
index 0000000000..ed9dd98568
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.h
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_VOICE_PROCESSING_AUDIO_UNIT_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_VOICE_PROCESSING_AUDIO_UNIT_H_
+
+#include <AudioUnit/AudioUnit.h>
+
+namespace webrtc {
+namespace ios_adm {
+
+class VoiceProcessingAudioUnitObserver {
+ public:
+ // Callback function called on a real-time priority I/O thread from the audio
+ // unit. This method is used to signal that recorded audio is available.
+ virtual OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) = 0;
+
+ // Callback function called on a real-time priority I/O thread from the audio
+ // unit. This method is used to provide audio samples to the audio unit.
+ virtual OSStatus OnGetPlayoutData(AudioUnitRenderActionFlags* io_action_flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) = 0;
+
+ protected:
+ ~VoiceProcessingAudioUnitObserver() {}
+};
+
+// Convenience class to abstract away the management of a Voice Processing
+// I/O Audio Unit. The Voice Processing I/O unit has the same characteristics
+// as the Remote I/O unit (supports full duplex low-latency audio input and
+// output) and adds AEC for for two-way duplex communication. It also adds AGC,
+// adjustment of voice-processing quality, and muting. Hence, ideal for
+// VoIP applications.
+class VoiceProcessingAudioUnit {
+ public:
+ VoiceProcessingAudioUnit(bool bypass_voice_processing,
+ VoiceProcessingAudioUnitObserver* observer);
+ ~VoiceProcessingAudioUnit();
+
+ // TODO(tkchin): enum for state and state checking.
+ enum State : int32_t {
+ // Init() should be called.
+ kInitRequired,
+ // Audio unit created but not initialized.
+ kUninitialized,
+ // Initialized but not started. Equivalent to stopped.
+ kInitialized,
+ // Initialized and started.
+ kStarted,
+ };
+
+ // Number of bytes per audio sample for 16-bit signed integer representation.
+ static const UInt32 kBytesPerSample;
+
+ // Initializes this class by creating the underlying audio unit instance.
+ // Creates a Voice-Processing I/O unit and configures it for full-duplex
+ // audio. The selected stream format is selected to avoid internal resampling
+ // and to match the 10ms callback rate for WebRTC as well as possible.
+ // Does not intialize the audio unit.
+ bool Init();
+
+ VoiceProcessingAudioUnit::State GetState() const;
+
+ // Initializes the underlying audio unit with the given sample rate.
+ bool Initialize(Float64 sample_rate);
+
+ // Starts the underlying audio unit.
+ OSStatus Start();
+
+ // Stops the underlying audio unit.
+ bool Stop();
+
+ // Uninitializes the underlying audio unit.
+ bool Uninitialize();
+
+ // Calls render on the underlying audio unit.
+ OSStatus Render(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 output_bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+
+ private:
+ // The C API used to set callbacks requires static functions. When these are
+ // called, they will invoke the relevant instance method by casting
+ // in_ref_con to VoiceProcessingAudioUnit*.
+ static OSStatus OnGetPlayoutData(void* in_ref_con,
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+ static OSStatus OnDeliverRecordedData(void* in_ref_con,
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+
+ // Notifies observer that samples are needed for playback.
+ OSStatus NotifyGetPlayoutData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+ // Notifies observer that recorded samples are available for render.
+ OSStatus NotifyDeliverRecordedData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+
+ // Returns the predetermined format with a specific sample rate. See
+ // implementation file for details on format.
+ AudioStreamBasicDescription GetFormat(Float64 sample_rate) const;
+
+ // Deletes the underlying audio unit.
+ void DisposeAudioUnit();
+
+ const bool bypass_voice_processing_;
+ VoiceProcessingAudioUnitObserver* observer_;
+ AudioUnit vpio_unit_;
+ VoiceProcessingAudioUnit::State state_;
+};
+} // namespace ios_adm
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_VOICE_PROCESSING_AUDIO_UNIT_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.mm
new file mode 100644
index 0000000000..3905b6857a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.mm
@@ -0,0 +1,488 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "voice_processing_audio_unit.h"
+
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/metrics.h"
+
+#import "base/RTCLogging.h"
+#import "sdk/objc/components/audio/RTCAudioSessionConfiguration.h"
+
+#if !defined(NDEBUG)
+static void LogStreamDescription(AudioStreamBasicDescription description) {
+ char formatIdString[5];
+ UInt32 formatId = CFSwapInt32HostToBig(description.mFormatID);
+ bcopy(&formatId, formatIdString, 4);
+ formatIdString[4] = '\0';
+ RTCLog(@"AudioStreamBasicDescription: {\n"
+ " mSampleRate: %.2f\n"
+ " formatIDString: %s\n"
+ " mFormatFlags: 0x%X\n"
+ " mBytesPerPacket: %u\n"
+ " mFramesPerPacket: %u\n"
+ " mBytesPerFrame: %u\n"
+ " mChannelsPerFrame: %u\n"
+ " mBitsPerChannel: %u\n"
+ " mReserved: %u\n}",
+ description.mSampleRate, formatIdString,
+ static_cast<unsigned int>(description.mFormatFlags),
+ static_cast<unsigned int>(description.mBytesPerPacket),
+ static_cast<unsigned int>(description.mFramesPerPacket),
+ static_cast<unsigned int>(description.mBytesPerFrame),
+ static_cast<unsigned int>(description.mChannelsPerFrame),
+ static_cast<unsigned int>(description.mBitsPerChannel),
+ static_cast<unsigned int>(description.mReserved));
+}
+#endif
+
+namespace webrtc {
+namespace ios_adm {
+
+// Calls to AudioUnitInitialize() can fail if called back-to-back on different
+// ADM instances. A fall-back solution is to allow multiple sequential calls
+// with as small delay between each. This factor sets the max number of allowed
+// initialization attempts.
+static const int kMaxNumberOfAudioUnitInitializeAttempts = 5;
+// A VP I/O unit's bus 1 connects to input hardware (microphone).
+static const AudioUnitElement kInputBus = 1;
+// A VP I/O unit's bus 0 connects to output hardware (speaker).
+static const AudioUnitElement kOutputBus = 0;
+
+// Returns the automatic gain control (AGC) state on the processed microphone
+// signal. Should be on by default for Voice Processing audio units.
+static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {
+ RTC_DCHECK(audio_unit);
+ UInt32 size = sizeof(*enabled);
+ OSStatus result = AudioUnitGetProperty(audio_unit,
+ kAUVoiceIOProperty_VoiceProcessingEnableAGC,
+ kAudioUnitScope_Global,
+ kInputBus,
+ enabled,
+ &size);
+ RTCLog(@"VPIO unit AGC: %u", static_cast<unsigned int>(*enabled));
+ return result;
+}
+
+VoiceProcessingAudioUnit::VoiceProcessingAudioUnit(bool bypass_voice_processing,
+ VoiceProcessingAudioUnitObserver* observer)
+ : bypass_voice_processing_(bypass_voice_processing),
+ observer_(observer),
+ vpio_unit_(nullptr),
+ state_(kInitRequired) {
+ RTC_DCHECK(observer);
+}
+
+VoiceProcessingAudioUnit::~VoiceProcessingAudioUnit() {
+ DisposeAudioUnit();
+}
+
+const UInt32 VoiceProcessingAudioUnit::kBytesPerSample = 2;
+
+bool VoiceProcessingAudioUnit::Init() {
+ RTC_DCHECK_EQ(state_, kInitRequired);
+
+ // Create an audio component description to identify the Voice Processing
+ // I/O audio unit.
+ AudioComponentDescription vpio_unit_description;
+ vpio_unit_description.componentType = kAudioUnitType_Output;
+ vpio_unit_description.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
+ vpio_unit_description.componentManufacturer = kAudioUnitManufacturer_Apple;
+ vpio_unit_description.componentFlags = 0;
+ vpio_unit_description.componentFlagsMask = 0;
+
+ // Obtain an audio unit instance given the description.
+ AudioComponent found_vpio_unit_ref =
+ AudioComponentFindNext(nullptr, &vpio_unit_description);
+
+ // Create a Voice Processing IO audio unit.
+ OSStatus result = noErr;
+ result = AudioComponentInstanceNew(found_vpio_unit_ref, &vpio_unit_);
+ if (result != noErr) {
+ vpio_unit_ = nullptr;
+ RTCLogError(@"AudioComponentInstanceNew failed. Error=%ld.", (long)result);
+ return false;
+ }
+
+ // Enable input on the input scope of the input element.
+ UInt32 enable_input = 1;
+ result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Input, kInputBus, &enable_input,
+ sizeof(enable_input));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to enable input on input scope of input element. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Enable output on the output scope of the output element.
+ UInt32 enable_output = 1;
+ result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Output, kOutputBus,
+ &enable_output, sizeof(enable_output));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to enable output on output scope of output element. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Specify the callback function that provides audio samples to the audio
+ // unit.
+ AURenderCallbackStruct render_callback;
+ render_callback.inputProc = OnGetPlayoutData;
+ render_callback.inputProcRefCon = this;
+ result = AudioUnitSetProperty(
+ vpio_unit_, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input,
+ kOutputBus, &render_callback, sizeof(render_callback));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to specify the render callback on the output bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Disable AU buffer allocation for the recorder, we allocate our own.
+ // TODO(henrika): not sure that it actually saves resource to make this call.
+ UInt32 flag = 0;
+ result = AudioUnitSetProperty(
+ vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
+ kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to disable buffer allocation on the input bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Specify the callback to be called by the I/O thread to us when input audio
+ // is available. The recorded samples can then be obtained by calling the
+ // AudioUnitRender() method.
+ AURenderCallbackStruct input_callback;
+ input_callback.inputProc = OnDeliverRecordedData;
+ input_callback.inputProcRefCon = this;
+ result = AudioUnitSetProperty(vpio_unit_,
+ kAudioOutputUnitProperty_SetInputCallback,
+ kAudioUnitScope_Global, kInputBus,
+ &input_callback, sizeof(input_callback));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to specify the input callback on the input bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ state_ = kUninitialized;
+ return true;
+}
+
+VoiceProcessingAudioUnit::State VoiceProcessingAudioUnit::GetState() const {
+ return state_;
+}
+
+bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
+ RTC_DCHECK_GE(state_, kUninitialized);
+ RTCLog(@"Initializing audio unit with sample rate: %f", sample_rate);
+
+ OSStatus result = noErr;
+ AudioStreamBasicDescription format = GetFormat(sample_rate);
+ UInt32 size = sizeof(format);
+#if !defined(NDEBUG)
+ LogStreamDescription(format);
+#endif
+
+ // Set the format on the output scope of the input element/bus.
+ result =
+ AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Output, kInputBus, &format, size);
+ if (result != noErr) {
+ RTCLogError(@"Failed to set format on output scope of input bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Set the format on the input scope of the output element/bus.
+ result =
+ AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Input, kOutputBus, &format, size);
+ if (result != noErr) {
+ RTCLogError(@"Failed to set format on input scope of output bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Initialize the Voice Processing I/O unit instance.
+ // Calls to AudioUnitInitialize() can fail if called back-to-back on
+ // different ADM instances. The error message in this case is -66635 which is
+ // undocumented. Tests have shown that calling AudioUnitInitialize a second
+ // time, after a short sleep, avoids this issue.
+ // See webrtc:5166 for details.
+ int failed_initalize_attempts = 0;
+ result = AudioUnitInitialize(vpio_unit_);
+ while (result != noErr) {
+ RTCLogError(@"Failed to initialize the Voice Processing I/O unit. "
+ "Error=%ld.",
+ (long)result);
+ ++failed_initalize_attempts;
+ if (failed_initalize_attempts == kMaxNumberOfAudioUnitInitializeAttempts) {
+ // Max number of initialization attempts exceeded, hence abort.
+ RTCLogError(@"Too many initialization attempts.");
+ return false;
+ }
+ RTCLog(@"Pause 100ms and try audio unit initialization again...");
+ [NSThread sleepForTimeInterval:0.1f];
+ result = AudioUnitInitialize(vpio_unit_);
+ }
+ if (result == noErr) {
+ RTCLog(@"Voice Processing I/O unit is now initialized.");
+ }
+
+ if (bypass_voice_processing_) {
+ // Attempt to disable builtin voice processing.
+ UInt32 toggle = 1;
+ result = AudioUnitSetProperty(vpio_unit_,
+ kAUVoiceIOProperty_BypassVoiceProcessing,
+ kAudioUnitScope_Global,
+ kInputBus,
+ &toggle,
+ sizeof(toggle));
+ if (result == noErr) {
+ RTCLog(@"Successfully bypassed voice processing.");
+ } else {
+ RTCLogError(@"Failed to bypass voice processing. Error=%ld.", (long)result);
+ }
+ state_ = kInitialized;
+ return true;
+ }
+
+ // AGC should be enabled by default for Voice Processing I/O units but it is
+ // checked below and enabled explicitly if needed. This scheme is used
+ // to be absolutely sure that the AGC is enabled since we have seen cases
+ // where only zeros are recorded and a disabled AGC could be one of the
+ // reasons why it happens.
+ int agc_was_enabled_by_default = 0;
+ UInt32 agc_is_enabled = 0;
+ result = GetAGCState(vpio_unit_, &agc_is_enabled);
+ if (result != noErr) {
+ RTCLogError(@"Failed to get AGC state (1st attempt). "
+ "Error=%ld.",
+ (long)result);
+ // Example of error code: kAudioUnitErr_NoConnection (-10876).
+ // All error codes related to audio units are negative and are therefore
+ // converted into a postive value to match the UMA APIs.
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Audio.GetAGCStateErrorCode1", (-1) * result);
+ } else if (agc_is_enabled) {
+ // Remember that the AGC was enabled by default. Will be used in UMA.
+ agc_was_enabled_by_default = 1;
+ } else {
+ // AGC was initially disabled => try to enable it explicitly.
+ UInt32 enable_agc = 1;
+ result =
+ AudioUnitSetProperty(vpio_unit_,
+ kAUVoiceIOProperty_VoiceProcessingEnableAGC,
+ kAudioUnitScope_Global, kInputBus, &enable_agc,
+ sizeof(enable_agc));
+ if (result != noErr) {
+ RTCLogError(@"Failed to enable the built-in AGC. "
+ "Error=%ld.",
+ (long)result);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Audio.SetAGCStateErrorCode", (-1) * result);
+ }
+ result = GetAGCState(vpio_unit_, &agc_is_enabled);
+ if (result != noErr) {
+ RTCLogError(@"Failed to get AGC state (2nd attempt). "
+ "Error=%ld.",
+ (long)result);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Audio.GetAGCStateErrorCode2", (-1) * result);
+ }
+ }
+
+ // Track if the built-in AGC was enabled by default (as it should) or not.
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.BuiltInAGCWasEnabledByDefault",
+ agc_was_enabled_by_default);
+ RTCLog(@"WebRTC.Audio.BuiltInAGCWasEnabledByDefault: %d",
+ agc_was_enabled_by_default);
+ // As a final step, add an UMA histogram for tracking the AGC state.
+ // At this stage, the AGC should be enabled, and if it is not, more work is
+ // needed to find out the root cause.
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.BuiltInAGCIsEnabled", agc_is_enabled);
+ RTCLog(@"WebRTC.Audio.BuiltInAGCIsEnabled: %u",
+ static_cast<unsigned int>(agc_is_enabled));
+
+ state_ = kInitialized;
+ return true;
+}
+
+OSStatus VoiceProcessingAudioUnit::Start() {
+ RTC_DCHECK_GE(state_, kUninitialized);
+ RTCLog(@"Starting audio unit.");
+
+ OSStatus result = AudioOutputUnitStart(vpio_unit_);
+ if (result != noErr) {
+ RTCLogError(@"Failed to start audio unit. Error=%ld", (long)result);
+ return result;
+ } else {
+ RTCLog(@"Started audio unit");
+ }
+ state_ = kStarted;
+ return noErr;
+}
+
+bool VoiceProcessingAudioUnit::Stop() {
+ RTC_DCHECK_GE(state_, kUninitialized);
+ RTCLog(@"Stopping audio unit.");
+
+ OSStatus result = AudioOutputUnitStop(vpio_unit_);
+ if (result != noErr) {
+ RTCLogError(@"Failed to stop audio unit. Error=%ld", (long)result);
+ return false;
+ } else {
+ RTCLog(@"Stopped audio unit");
+ }
+
+ state_ = kInitialized;
+ return true;
+}
+
+bool VoiceProcessingAudioUnit::Uninitialize() {
+ RTC_DCHECK_GE(state_, kUninitialized);
+ RTCLog(@"Unintializing audio unit.");
+
+ OSStatus result = AudioUnitUninitialize(vpio_unit_);
+ if (result != noErr) {
+ RTCLogError(@"Failed to uninitialize audio unit. Error=%ld", (long)result);
+ return false;
+ } else {
+ RTCLog(@"Uninitialized audio unit.");
+ }
+
+ state_ = kUninitialized;
+ return true;
+}
+
+OSStatus VoiceProcessingAudioUnit::Render(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 output_bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ RTC_DCHECK(vpio_unit_) << "Init() not called.";
+
+ OSStatus result = AudioUnitRender(vpio_unit_, flags, time_stamp,
+ output_bus_number, num_frames, io_data);
+ if (result != noErr) {
+ RTCLogError(@"Failed to render audio unit. Error=%ld", (long)result);
+ }
+ return result;
+}
+
+OSStatus VoiceProcessingAudioUnit::OnGetPlayoutData(
+ void* in_ref_con,
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ VoiceProcessingAudioUnit* audio_unit =
+ static_cast<VoiceProcessingAudioUnit*>(in_ref_con);
+ return audio_unit->NotifyGetPlayoutData(flags, time_stamp, bus_number,
+ num_frames, io_data);
+}
+
+OSStatus VoiceProcessingAudioUnit::OnDeliverRecordedData(
+ void* in_ref_con,
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ VoiceProcessingAudioUnit* audio_unit =
+ static_cast<VoiceProcessingAudioUnit*>(in_ref_con);
+ return audio_unit->NotifyDeliverRecordedData(flags, time_stamp, bus_number,
+ num_frames, io_data);
+}
+
+OSStatus VoiceProcessingAudioUnit::NotifyGetPlayoutData(
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ return observer_->OnGetPlayoutData(flags, time_stamp, bus_number, num_frames,
+ io_data);
+}
+
+OSStatus VoiceProcessingAudioUnit::NotifyDeliverRecordedData(
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ return observer_->OnDeliverRecordedData(flags, time_stamp, bus_number,
+ num_frames, io_data);
+}
+
+AudioStreamBasicDescription VoiceProcessingAudioUnit::GetFormat(
+ Float64 sample_rate) const {
+ // Set the application formats for input and output:
+ // - use same format in both directions
+ // - avoid resampling in the I/O unit by using the hardware sample rate
+ // - linear PCM => noncompressed audio data format with one frame per packet
+ // - no need to specify interleaving since only mono is supported
+ AudioStreamBasicDescription format;
+ RTC_DCHECK_EQ(1, kRTCAudioSessionPreferredNumberOfChannels);
+ format.mSampleRate = sample_rate;
+ format.mFormatID = kAudioFormatLinearPCM;
+ format.mFormatFlags =
+ kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
+ format.mBytesPerPacket = kBytesPerSample;
+ format.mFramesPerPacket = 1; // uncompressed.
+ format.mBytesPerFrame = kBytesPerSample;
+ format.mChannelsPerFrame = kRTCAudioSessionPreferredNumberOfChannels;
+ format.mBitsPerChannel = 8 * kBytesPerSample;
+ return format;
+}
+
+void VoiceProcessingAudioUnit::DisposeAudioUnit() {
+ if (vpio_unit_) {
+ switch (state_) {
+ case kStarted:
+ Stop();
+ [[fallthrough]];
+ case kInitialized:
+ Uninitialize();
+ break;
+ case kUninitialized:
+ case kInitRequired:
+ break;
+ }
+
+ RTCLog(@"Disposing audio unit.");
+ OSStatus result = AudioComponentInstanceDispose(vpio_unit_);
+ if (result != noErr) {
+ RTCLogError(@"AudioComponentInstanceDispose failed. Error=%ld.",
+ (long)result);
+ }
+ vpio_unit_ = nullptr;
+ }
+}
+
+} // namespace ios_adm
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/network_monitor_observer.h b/third_party/libwebrtc/sdk/objc/native/src/network_monitor_observer.h
new file mode 100644
index 0000000000..7c411a1db1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/network_monitor_observer.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_NETWORK_MONITOR_OBSERVER_H_
+#define SDK_OBJC_NATIVE_SRC_NETWORK_MONITOR_OBSERVER_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/network_constants.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+// Observer interface for listening to NWPathMonitor updates.
+class NetworkMonitorObserver {
+ public:
+ // Called when a path update occurs, on network monitor dispatch queue.
+ //
+ // `adapter_type_by_name` is a map from interface name (i.e. "pdp_ip0") to
+ // adapter type, for all available interfaces on the current path. If an
+ // interface name isn't present it can be assumed to be unavailable.
+ virtual void OnPathUpdate(
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>
+ adapter_type_by_name) = 0;
+
+ protected:
+ virtual ~NetworkMonitorObserver() {}
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.h b/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.h
new file mode 100644
index 0000000000..9c1ff17876
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_FRAME_BUFFER_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_FRAME_BUFFER_H_
+
+#import <CoreVideo/CoreVideo.h>
+
+#import "base/RTCMacros.h"
+
+#include "common_video/include/video_frame_buffer.h"
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer);
+
+namespace webrtc {
+
+class ObjCFrameBuffer : public VideoFrameBuffer {
+ public:
+ explicit ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>);
+ ~ObjCFrameBuffer() override;
+
+ Type type() const override;
+
+ int width() const override;
+ int height() const override;
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> wrapped_frame_buffer() const;
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer_;
+ int width_;
+ int height_;
+};
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_FRAME_BUFFER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.mm
new file mode 100644
index 0000000000..566733d692
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.mm
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+#include "api/make_ref_counted.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h"
+
+namespace webrtc {
+
+namespace {
+
+/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTC_OBJC_TYPE(RTCI420Buffer) */
+class ObjCI420FrameBuffer : public I420BufferInterface {
+ public:
+ explicit ObjCI420FrameBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer)
+ : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
+ ~ObjCI420FrameBuffer() override {}
+
+ int width() const override { return width_; }
+
+ int height() const override { return height_; }
+
+ const uint8_t* DataY() const override { return frame_buffer_.dataY; }
+
+ const uint8_t* DataU() const override { return frame_buffer_.dataU; }
+
+ const uint8_t* DataV() const override { return frame_buffer_.dataV; }
+
+ int StrideY() const override { return frame_buffer_.strideY; }
+
+ int StrideU() const override { return frame_buffer_.strideU; }
+
+ int StrideV() const override { return frame_buffer_.strideV; }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer_;
+ int width_;
+ int height_;
+};
+
+} // namespace
+
+ObjCFrameBuffer::ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer)
+ : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
+
+ObjCFrameBuffer::~ObjCFrameBuffer() {}
+
+VideoFrameBuffer::Type ObjCFrameBuffer::type() const {
+ return Type::kNative;
+}
+
+int ObjCFrameBuffer::width() const {
+ return width_;
+}
+
+int ObjCFrameBuffer::height() const {
+ return height_;
+}
+
+rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
+ return rtc::make_ref_counted<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
+}
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ObjCFrameBuffer::wrapped_frame_buffer() const {
+ return frame_buffer_;
+}
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
+ if (buffer->type() == VideoFrameBuffer::Type::kNative) {
+ return static_cast<ObjCFrameBuffer*>(buffer.get())->wrapped_frame_buffer();
+ } else {
+ return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:buffer->ToI420()];
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.h b/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.h
new file mode 100644
index 0000000000..709e9dfbe5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_
+
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/field_trials_view.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "rtc_base/network_monitor.h"
+#include "rtc_base/network_monitor_factory.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "sdk/objc/components/network/RTCNetworkMonitor+Private.h"
+#include "sdk/objc/native/src/network_monitor_observer.h"
+
+namespace webrtc {
+
+class ObjCNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+ ObjCNetworkMonitorFactory() = default;
+ ~ObjCNetworkMonitorFactory() override = default;
+
+ rtc::NetworkMonitorInterface* CreateNetworkMonitor(
+ const FieldTrialsView& field_trials) override;
+};
+
+class ObjCNetworkMonitor : public rtc::NetworkMonitorInterface,
+ public NetworkMonitorObserver {
+ public:
+ ObjCNetworkMonitor();
+ ~ObjCNetworkMonitor() override;
+
+ void Start() override;
+ void Stop() override;
+
+ InterfaceInfo GetInterfaceInfo(absl::string_view interface_name) override;
+
+ // NetworkMonitorObserver override.
+ // Fans out updates to observers on the correct thread.
+ void OnPathUpdate(
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>
+ adapter_type_by_name) override;
+
+ private:
+ rtc::Thread* thread_ = nullptr;
+ bool started_ = false;
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>
+ adapter_type_by_name_ RTC_GUARDED_BY(thread_);
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag_;
+ RTCNetworkMonitor* network_monitor_ = nil;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.mm
new file mode 100644
index 0000000000..535548c64c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.mm
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_network_monitor.h"
+#include "absl/strings/string_view.h"
+
+#include <algorithm>
+
+#include "rtc_base/logging.h"
+#include "rtc_base/string_utils.h"
+
+namespace webrtc {
+
+rtc::NetworkMonitorInterface* ObjCNetworkMonitorFactory::CreateNetworkMonitor(
+ const FieldTrialsView& field_trials) {
+ return new ObjCNetworkMonitor();
+}
+
+ObjCNetworkMonitor::ObjCNetworkMonitor() {
+ safety_flag_ = PendingTaskSafetyFlag::Create();
+}
+
+ObjCNetworkMonitor::~ObjCNetworkMonitor() {
+ [network_monitor_ stop];
+ network_monitor_ = nil;
+}
+
+void ObjCNetworkMonitor::Start() {
+ if (started_) {
+ return;
+ }
+ thread_ = rtc::Thread::Current();
+ RTC_DCHECK_RUN_ON(thread_);
+ safety_flag_->SetAlive();
+ network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this];
+ if (network_monitor_ == nil) {
+ RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?";
+ }
+ started_ = true;
+}
+
+void ObjCNetworkMonitor::Stop() {
+ RTC_DCHECK_RUN_ON(thread_);
+ if (!started_) {
+ return;
+ }
+ safety_flag_->SetNotAlive();
+ [network_monitor_ stop];
+ network_monitor_ = nil;
+ started_ = false;
+}
+
+rtc::NetworkMonitorInterface::InterfaceInfo ObjCNetworkMonitor::GetInterfaceInfo(
+ absl::string_view interface_name) {
+ RTC_DCHECK_RUN_ON(thread_);
+ if (adapter_type_by_name_.empty()) {
+ // If we have no path update, assume everything's available, because it's
+ // preferable for WebRTC to try all interfaces rather than none at all.
+ return {
+ .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN,
+ .available = true,
+ };
+ }
+ auto iter = adapter_type_by_name_.find(interface_name);
+ if (iter == adapter_type_by_name_.end()) {
+ return {
+ .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN,
+ .available = false,
+ };
+ }
+
+ return {
+ .adapter_type = iter->second,
+ .available = true,
+ };
+}
+
+void ObjCNetworkMonitor::OnPathUpdate(
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp> adapter_type_by_name) {
+ RTC_DCHECK(network_monitor_ != nil);
+ thread_->PostTask(SafeTask(safety_flag_, [this, adapter_type_by_name] {
+ RTC_DCHECK_RUN_ON(thread_);
+ adapter_type_by_name_ = adapter_type_by_name;
+ InvokeNetworksChangedCallback();
+ }));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.h
new file mode 100644
index 0000000000..30ad8c2a4b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
+
+#import "base/RTCMacros.h"
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "media/base/codec.h"
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory);
+
+namespace webrtc {
+
+class ObjCVideoDecoderFactory : public VideoDecoderFactory {
+ public:
+ explicit ObjCVideoDecoderFactory(id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>);
+ ~ObjCVideoDecoderFactory() override;
+
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> wrapped_decoder_factory() const;
+
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override;
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> decoder_factory_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.mm
new file mode 100644
index 0000000000..da3b302275
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.mm
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_decoder_factory.h"
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoDecoder.h"
+#import "base/RTCVideoDecoderFactory.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_codec/RTCCodecSpecificInfoH264.h"
+#import "sdk/objc/api/peerconnection/RTCEncodedImage+Private.h"
+#import "sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h"
+#import "sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h"
+#import "sdk/objc/helpers/NSString+StdString.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+namespace webrtc {
+
+namespace {
+class ObjCVideoDecoder : public VideoDecoder {
+ public:
+ ObjCVideoDecoder(id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder)
+ : decoder_(decoder), implementation_name_([decoder implementationName].stdString) {}
+
+ bool Configure(const Settings &settings) override {
+ return
+ [decoder_ startDecodeWithNumberOfCores:settings.number_of_cores()] == WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Decode(const EncodedImage &input_image,
+ bool missing_frames,
+ int64_t render_time_ms = -1) override {
+ RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
+ [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:input_image];
+
+ return [decoder_ decode:encodedImage
+ missingFrames:missing_frames
+ codecSpecificInfo:nil
+ renderTimeMs:render_time_ms];
+ }
+
+ int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override {
+ [decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
+ const auto buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
+ VideoFrame videoFrame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_timestamp_rtp((uint32_t)(frame.timeStampNs / rtc::kNumNanosecsPerMicrosec))
+ .set_timestamp_ms(0)
+ .set_rotation((VideoRotation)frame.rotation)
+ .build();
+ videoFrame.set_timestamp(frame.timeStamp);
+
+ callback->Decoded(videoFrame);
+ }];
+
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override { return [decoder_ releaseDecoder]; }
+
+ const char *ImplementationName() const override { return implementation_name_.c_str(); }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder_;
+ const std::string implementation_name_;
+};
+} // namespace
+
+ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> decoder_factory)
+ : decoder_factory_(decoder_factory) {}
+
+ObjCVideoDecoderFactory::~ObjCVideoDecoderFactory() {}
+
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> ObjCVideoDecoderFactory::wrapped_decoder_factory() const {
+ return decoder_factory_;
+}
+
+std::unique_ptr<VideoDecoder> ObjCVideoDecoderFactory::CreateVideoDecoder(
+ const SdpVideoFormat &format) {
+ NSString *codecName = [NSString stringWithUTF8String:format.name.c_str()];
+ for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * codecInfo in decoder_factory_.supportedCodecs) {
+ if ([codecName isEqualToString:codecInfo.name]) {
+ id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder = [decoder_factory_ createDecoder:codecInfo];
+
+ if ([decoder isKindOfClass:[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) class]]) {
+ return [(RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) *)decoder releaseWrappedDecoder];
+ } else {
+ return std::unique_ptr<ObjCVideoDecoder>(new ObjCVideoDecoder(decoder));
+ }
+ }
+ }
+
+ return nullptr;
+}
+
+std::vector<SdpVideoFormat> ObjCVideoDecoderFactory::GetSupportedFormats() const {
+ std::vector<SdpVideoFormat> supported_formats;
+ for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in decoder_factory_.supportedCodecs) {
+ SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
+ supported_formats.push_back(format);
+ }
+
+ return supported_formats;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.h
new file mode 100644
index 0000000000..38db5e6ae7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_ENCODER_FACTORY_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_ENCODER_FACTORY_H_
+
+#import <Foundation/Foundation.h>
+
+#import "base/RTCMacros.h"
+
+#include "api/video_codecs/video_encoder_factory.h"
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory);
+
+namespace webrtc {
+
+class ObjCVideoEncoderFactory : public VideoEncoderFactory {
+ public:
+ explicit ObjCVideoEncoderFactory(id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>);
+ ~ObjCVideoEncoderFactory() override;
+
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> wrapped_encoder_factory() const;
+
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ std::vector<SdpVideoFormat> GetImplementations() const override;
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override;
+ std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> encoder_factory_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_ENCODER_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.mm
new file mode 100644
index 0000000000..d4ea79cc88
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.mm
@@ -0,0 +1,209 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_encoder_factory.h"
+
+#include <string>
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoEncoder.h"
+#import "base/RTCVideoEncoderFactory.h"
+#import "components/video_codec/RTCCodecSpecificInfoH264+Private.h"
+#import "sdk/objc/api/peerconnection/RTCEncodedImage+Private.h"
+#import "sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h"
+#import "sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h"
+#import "sdk/objc/api/video_codec/RTCVideoCodecConstants.h"
+#import "sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h"
+#import "sdk/objc/helpers/NSString+StdString.h"
+
+#include "api/video/video_frame.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/logging.h"
+#include "sdk/objc/native/src/objc_video_frame.h"
+
+namespace webrtc {
+
+namespace {
+
+class ObjCVideoEncoder : public VideoEncoder {
+ public:
+ ObjCVideoEncoder(id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder)
+ : encoder_(encoder), implementation_name_([encoder implementationName].stdString) {}
+
+ int32_t InitEncode(const VideoCodec *codec_settings, const Settings &encoder_settings) override {
+ RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings =
+ [[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] initWithNativeVideoCodec:codec_settings];
+ return [encoder_ startEncodeWithSettings:settings
+ numberOfCores:encoder_settings.number_of_cores];
+ }
+
+ int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override {
+ if (callback) {
+ [encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame,
+ id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> _Nonnull info) {
+ EncodedImage encodedImage = [frame nativeEncodedImage];
+
+ // Handle types that can be converted into one of CodecSpecificInfo's hard coded cases.
+ CodecSpecificInfo codecSpecificInfo;
+ if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) {
+ codecSpecificInfo =
+ [(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo];
+ }
+
+ EncodedImageCallback::Result res = callback->OnEncodedImage(encodedImage, &codecSpecificInfo);
+ return res.error == EncodedImageCallback::Result::OK;
+ }];
+ } else {
+ [encoder_ setCallback:nil];
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override { return [encoder_ releaseEncoder]; }
+
+ int32_t Encode(const VideoFrame &frame,
+ const std::vector<VideoFrameType> *frame_types) override {
+ NSMutableArray<NSNumber *> *rtcFrameTypes = [NSMutableArray array];
+ for (size_t i = 0; i < frame_types->size(); ++i) {
+ [rtcFrameTypes addObject:@(RTCFrameType(frame_types->at(i)))];
+ }
+
+ return [encoder_ encode:ToObjCVideoFrame(frame)
+ codecSpecificInfo:nil
+ frameTypes:rtcFrameTypes];
+ }
+
+ void SetRates(const RateControlParameters &parameters) override {
+ const uint32_t bitrate = parameters.bitrate.get_sum_kbps();
+ const uint32_t framerate = static_cast<uint32_t>(parameters.framerate_fps + 0.5);
+ [encoder_ setBitrate:bitrate framerate:framerate];
+ }
+
+ VideoEncoder::EncoderInfo GetEncoderInfo() const override {
+ EncoderInfo info;
+ info.implementation_name = implementation_name_;
+
+ RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = [encoder_ scalingSettings];
+ info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) :
+ ScalingSettings::kOff;
+
+ info.requested_resolution_alignment = encoder_.resolutionAlignment > 0 ?: 1;
+ info.apply_alignment_to_all_simulcast_layers = encoder_.applyAlignmentToAllSimulcastLayers;
+ info.supports_native_handle = encoder_.supportsNativeHandle;
+ info.is_hardware_accelerated = true;
+ return info;
+ }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder_;
+ const std::string implementation_name_;
+};
+
+class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInterface {
+ public:
+ ObjcVideoEncoderSelector(id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector) {
+ selector_ = selector;
+ }
+ void OnCurrentEncoder(const SdpVideoFormat &format) override {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
+ [selector_ registerCurrentEncoderInfo:info];
+ }
+ absl::optional<SdpVideoFormat> OnEncoderBroken() override {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBrokenEncoder];
+ if (info) {
+ return [info nativeSdpVideoFormat];
+ }
+ return absl::nullopt;
+ }
+ absl::optional<SdpVideoFormat> OnAvailableBitrate(const DataRate &rate) override {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBitrate:rate.kbps<NSInteger>()];
+ if (info) {
+ return [info nativeSdpVideoFormat];
+ }
+ return absl::nullopt;
+ }
+
+ absl::optional<SdpVideoFormat> OnResolutionChange(const RenderResolution &resolution) override {
+ if ([selector_ respondsToSelector:@selector(encoderForResolutionChangeBySize:)]) {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_
+ encoderForResolutionChangeBySize:CGSizeMake(resolution.Width(), resolution.Height())];
+ if (info) {
+ return [info nativeSdpVideoFormat];
+ }
+ }
+ return absl::nullopt;
+ }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector_;
+};
+
+} // namespace
+
+ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> encoder_factory)
+ : encoder_factory_(encoder_factory) {}
+
+ObjCVideoEncoderFactory::~ObjCVideoEncoderFactory() {}
+
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> ObjCVideoEncoderFactory::wrapped_encoder_factory() const {
+ return encoder_factory_;
+}
+
+std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetSupportedFormats() const {
+ std::vector<SdpVideoFormat> supported_formats;
+ for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ supportedCodecs]) {
+ SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
+ supported_formats.push_back(format);
+ }
+
+ return supported_formats;
+}
+
+std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetImplementations() const {
+ if ([encoder_factory_ respondsToSelector:@selector(implementations)]) {
+ std::vector<SdpVideoFormat> supported_formats;
+ for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ implementations]) {
+ SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
+ supported_formats.push_back(format);
+ }
+ return supported_formats;
+ }
+ return GetSupportedFormats();
+}
+
+std::unique_ptr<VideoEncoder> ObjCVideoEncoderFactory::CreateVideoEncoder(
+ const SdpVideoFormat &format) {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
+ id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder = [encoder_factory_ createEncoder:info];
+ if ([encoder isKindOfClass:[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) class]]) {
+ return [(RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) *)encoder releaseWrappedEncoder];
+ } else {
+ return std::unique_ptr<ObjCVideoEncoder>(new ObjCVideoEncoder(encoder));
+ }
+}
+
+std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>
+ ObjCVideoEncoderFactory::GetEncoderSelector() const {
+ if ([encoder_factory_ respondsToSelector:@selector(encoderSelector)]) {
+ id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector = [encoder_factory_ encoderSelector];
+ if (selector) {
+ return absl::make_unique<ObjcVideoEncoderSelector>(selector);
+ }
+ }
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.h
new file mode 100644
index 0000000000..c2931cb2f8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_FRAME_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_FRAME_H_
+
+#import "base/RTCVideoFrame.h"
+
+#include "api/video/video_frame.h"
+
+namespace webrtc {
+
+RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame& frame);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_FRAME_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.mm
new file mode 100644
index 0000000000..2e8ce6153e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.mm
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_frame.h"
+
+#include "rtc_base/time_utils.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+namespace webrtc {
+
+RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) {
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
+ initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer())
+ rotation:RTCVideoRotation(frame.rotation())
+ timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
+ videoFrame.timeStamp = frame.timestamp();
+
+ return videoFrame;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.h
new file mode 100644
index 0000000000..f9c35eae96
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_RENDERER_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_RENDERER_H_
+
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+#import "base/RTCMacros.h"
+
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer);
+
+namespace webrtc {
+
+class ObjCVideoRenderer : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer);
+ void OnFrame(const VideoFrame& nativeVideoFrame) override;
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer_;
+ CGSize size_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_RENDERER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.mm
new file mode 100644
index 0000000000..4a9b647ec3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.mm
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_renderer.h"
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoRenderer.h"
+
+#include "sdk/objc/native/src/objc_video_frame.h"
+
+namespace webrtc {
+
+ObjCVideoRenderer::ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer)
+ : renderer_(renderer), size_(CGSizeZero) {}
+
+void ObjCVideoRenderer::OnFrame(const VideoFrame& nativeVideoFrame) {
+ RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = ToObjCVideoFrame(nativeVideoFrame);
+
+ CGSize current_size = (videoFrame.rotation % 180 == 0) ?
+ CGSizeMake(videoFrame.width, videoFrame.height) :
+ CGSizeMake(videoFrame.height, videoFrame.width);
+
+ if (!CGSizeEqualToSize(size_, current_size)) {
+ size_ = current_size;
+ [renderer_ setSize:size_];
+ }
+ [renderer_ renderFrame:videoFrame];
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.h
new file mode 100644
index 0000000000..19a3d6db43
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_CLASSES_VIDEO_OBJC_VIDEO_TRACK_SOURCE_H_
+#define SDK_OBJC_CLASSES_VIDEO_OBJC_VIDEO_TRACK_SOURCE_H_
+
+#import "base/RTCVideoCapturer.h"
+
+#include "base/RTCMacros.h"
+#include "media/base/adapted_video_track_source.h"
+#include "rtc_base/timestamp_aligner.h"
+
+RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame));
+
+@interface RTCObjCVideoSourceAdapter : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
+@end
+
+namespace webrtc {
+
+class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource {
+ public:
+ ObjCVideoTrackSource();
+ explicit ObjCVideoTrackSource(bool is_screencast);
+ explicit ObjCVideoTrackSource(RTCObjCVideoSourceAdapter* adapter);
+
+ bool is_screencast() const override;
+
+ // Indicates that the encoder should denoise video before encoding it.
+ // If it is not set, the default configuration is used which is different
+ // depending on video codec.
+ absl::optional<bool> needs_denoising() const override;
+
+ SourceState state() const override;
+
+ bool remote() const override;
+
+ void OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame);
+
+ // Called by RTCVideoSource.
+ void OnOutputFormatRequest(int width, int height, int fps);
+
+ private:
+ rtc::VideoBroadcaster broadcaster_;
+ rtc::TimestampAligner timestamp_aligner_;
+
+ RTCObjCVideoSourceAdapter* adapter_;
+ bool is_screencast_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_CLASSES_VIDEO_OBJC_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.mm
new file mode 100644
index 0000000000..7937e90505
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.mm
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_track_source.h"
+
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#include "api/video/i420_buffer.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+@interface RTCObjCVideoSourceAdapter ()
+@property(nonatomic) webrtc::ObjCVideoTrackSource *objCVideoTrackSource;
+@end
+
+@implementation RTCObjCVideoSourceAdapter
+
+@synthesize objCVideoTrackSource = _objCVideoTrackSource;
+
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+ didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ _objCVideoTrackSource->OnCapturedFrame(frame);
+}
+
+@end
+
+namespace webrtc {
+
+ObjCVideoTrackSource::ObjCVideoTrackSource() : ObjCVideoTrackSource(false) {}
+
+ObjCVideoTrackSource::ObjCVideoTrackSource(bool is_screencast)
+ : AdaptedVideoTrackSource(/* required resolution alignment */ 2),
+ is_screencast_(is_screencast) {}
+
+ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) {
+ adapter_.objCVideoTrackSource = this;
+}
+
+bool ObjCVideoTrackSource::is_screencast() const {
+ return is_screencast_;
+}
+
+absl::optional<bool> ObjCVideoTrackSource::needs_denoising() const {
+ return false;
+}
+
+MediaSourceInterface::SourceState ObjCVideoTrackSource::state() const {
+ return SourceState::kLive;
+}
+
+bool ObjCVideoTrackSource::remote() const {
+ return false;
+}
+
+void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) {
+ cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
+ video_adapter()->OnOutputFormatRequest(format);
+}
+
+void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
+ const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
+ const int64_t translated_timestamp_us =
+ timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros());
+
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+ if (!AdaptFrame(frame.width,
+ frame.height,
+ timestamp_us,
+ &adapted_width,
+ &adapted_height,
+ &crop_width,
+ &crop_height,
+ &crop_x,
+ &crop_y)) {
+ return;
+ }
+
+ rtc::scoped_refptr<VideoFrameBuffer> buffer;
+ if (adapted_width == frame.width && adapted_height == frame.height) {
+ // No adaption - optimized path.
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
+ } else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ // Adapted CVPixelBuffer frame.
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
+ initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
+ adaptedWidth:adapted_width
+ adaptedHeight:adapted_height
+ cropWidth:crop_width
+ cropHeight:crop_height
+ cropX:crop_x + rtcPixelBuffer.cropX
+ cropY:crop_y + rtcPixelBuffer.cropY]);
+ } else {
+ // Adapted I420 frame.
+ // TODO(magjed): Optimize this I420 path.
+ rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
+ i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
+ buffer = i420_buffer;
+ }
+
+ // Applying rotation is only supported for legacy reasons and performance is
+ // not critical here.
+ VideoRotation rotation = static_cast<VideoRotation>(frame.rotation);
+ if (apply_rotation() && rotation != kVideoRotation_0) {
+ buffer = I420Buffer::Rotate(*buffer->ToI420(), rotation);
+ rotation = kVideoRotation_0;
+ }
+
+ OnFrame(VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(rotation)
+ .set_timestamp_us(translated_timestamp_us)
+ .build());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
new file mode 100644
index 0000000000..4c8bf348f4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
@@ -0,0 +1,469 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include "sdk/objc/native/src/objc_video_track_source.h"
+
+#import "api/video_frame_buffer/RTCNativeI420Buffer+Private.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#import "frame_buffer_helpers.h"
+
+#include "api/scoped_refptr.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "media/base/fake_video_renderer.h"
+#include "sdk/objc/native/api/video_frame.h"
+
+typedef void (^VideoSinkCallback)(RTC_OBJC_TYPE(RTCVideoFrame) *);
+
+namespace {
+
+class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ ObjCCallbackVideoSink(VideoSinkCallback callback) : callback_(callback) {}
+
+ void OnFrame(const webrtc::VideoFrame &frame) override {
+ callback_(NativeToObjCVideoFrame(frame));
+ }
+
+ private:
+ VideoSinkCallback callback_;
+};
+
+} // namespace
+
+@interface ObjCVideoTrackSourceTests : XCTestCase
+@end
+
+@implementation ObjCVideoTrackSourceTests {
+ rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> _video_source;
+}
+
+- (void)setUp {
+ _video_source = rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>();
+}
+
+- (void)tearDown {
+ _video_source = NULL;
+}
+
+- (void)testOnCapturedFrameAdaptsFrame {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 1);
+ XCTAssertEqual(video_renderer->width(), 360);
+ XCTAssertEqual(video_renderer->height(), 640);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameAdaptsFrameWithAlignment {
+ // Requesting to adapt 1280x720 to 912x514 gives 639x360 without alignment. The 639 causes issues
+ // with some hardware encoders (e.g. HEVC) so in this test we verify that the alignment is set and
+ // respected.
+
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(912, 514, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 1);
+ XCTAssertEqual(video_renderer->width(), 360);
+ XCTAssertEqual(video_renderer->height(), 640);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameAdaptationResultsInCommonResolutions {
+ // Some of the most common resolutions used in the wild are 640x360, 480x270 and 320x180.
+ // Make sure that we properly scale down to exactly these resolutions.
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 1);
+ XCTAssertEqual(video_renderer->width(), 360);
+ XCTAssertEqual(video_renderer->height(), 640);
+
+ _video_source->OnOutputFormatRequest(480, 270, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 2);
+ XCTAssertEqual(video_renderer->width(), 270);
+ XCTAssertEqual(video_renderer->height(), 480);
+
+ _video_source->OnOutputFormatRequest(320, 180, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 3);
+ XCTAssertEqual(video_renderer->width(), 180);
+ XCTAssertEqual(video_renderer->height(), 320);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameWithoutAdaptation {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(frame.width, outputFrame.width);
+ XCTAssertEqual(frame.height, outputFrame.height);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(buffer.cropX, outputBuffer.cropX);
+ XCTAssertEqual(buffer.cropY, outputBuffer.cropY);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameCVPixelBufferNeedsAdaptation {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 0);
+ XCTAssertEqual(outputBuffer.cropY, 0);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameCVPixelBufferNeedsCropping {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 10);
+ XCTAssertEqual(outputBuffer.cropY, 0);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFramePreAdaptedCVPixelBufferNeedsAdaptation {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ // Create a frame that's already adapted down.
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:640
+ adaptedHeight:360
+ cropWidth:720
+ cropHeight:1280
+ cropX:0
+ cropY:0];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 480);
+ XCTAssertEqual(outputFrame.height, 270);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 0);
+ XCTAssertEqual(outputBuffer.cropY, 0);
+ XCTAssertEqual(outputBuffer.cropWidth, 640);
+ XCTAssertEqual(outputBuffer.cropHeight, 360);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(480, 270, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFramePreCroppedCVPixelBufferNeedsCropping {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:370
+ adaptedHeight:640
+ cropWidth:370
+ cropHeight:640
+ cropX:10
+ cropY:0];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 14);
+ XCTAssertEqual(outputBuffer.cropY, 0);
+ XCTAssertEqual(outputBuffer.cropWidth, 360);
+ XCTAssertEqual(outputBuffer.cropHeight, 640);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameSmallerPreCroppedCVPixelBufferNeedsCropping {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:300
+ adaptedHeight:640
+ cropWidth:300
+ cropHeight:640
+ cropX:40
+ cropY:0];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 300);
+ XCTAssertEqual(outputFrame.height, 534);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 40);
+ XCTAssertEqual(outputBuffer.cropY, 52);
+ XCTAssertEqual(outputBuffer.cropWidth, 300);
+ XCTAssertEqual(outputBuffer.cropHeight, 534);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameI420BufferNeedsAdaptation {
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
+ RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
+
+ double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
+ XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testOnCapturedFrameI420BufferNeedsCropping {
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(380, 640);
+ RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
+
+ double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
+ XCTAssertGreaterThanOrEqual(psnr, 40);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm
new file mode 100644
index 0000000000..f8ce844652
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm
@@ -0,0 +1,593 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#if defined(WEBRTC_IOS)
+#import "sdk/objc/native/api/audio_device_module.h"
+#endif
+
+#include "api/scoped_refptr.h"
+
+typedef int32_t(^NeedMorePlayDataBlock)(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms);
+
+typedef int32_t(^RecordedDataIsAvailableBlock)(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel);
+
+
+// This class implements the AudioTransport API and forwards all methods to the appropriate blocks.
+class MockAudioTransport : public webrtc::AudioTransport {
+public:
+ MockAudioTransport() {}
+ ~MockAudioTransport() override {}
+
+ void expectNeedMorePlayData(NeedMorePlayDataBlock block) {
+ needMorePlayDataBlock = block;
+ }
+
+ void expectRecordedDataIsAvailable(RecordedDataIsAvailableBlock block) {
+ recordedDataIsAvailableBlock = block;
+ }
+
+ int32_t NeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {
+ return needMorePlayDataBlock(nSamples,
+ nBytesPerSample,
+ nChannels,
+ samplesPerSec,
+ audioSamples,
+ nSamplesOut,
+ elapsed_time_ms,
+ ntp_time_ms);
+ }
+
+ int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) override {
+ return recordedDataIsAvailableBlock(audioSamples,
+ nSamples,
+ nBytesPerSample,
+ nChannels,
+ samplesPerSec,
+ totalDelayMS,
+ clockDrift,
+ currentMicLevel,
+ keyPressed,
+ newMicLevel);
+ }
+
+ void PullRenderData(int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {}
+
+ private:
+ NeedMorePlayDataBlock needMorePlayDataBlock;
+ RecordedDataIsAvailableBlock recordedDataIsAvailableBlock;
+};
+
+// Number of callbacks (input or output) the tests waits for before we set
+// an event indicating that the test was OK.
+static const NSUInteger kNumCallbacks = 10;
+// Max amount of time we wait for an event to be set while counting callbacks.
+static const NSTimeInterval kTestTimeOutInSec = 20.0;
+// Number of bits per PCM audio sample.
+static const NSUInteger kBitsPerSample = 16;
+// Number of bytes per PCM audio sample.
+static const NSUInteger kBytesPerSample = kBitsPerSample / 8;
+// Average number of audio callbacks per second assuming 10ms packet size.
+static const NSUInteger kNumCallbacksPerSecond = 100;
+// Play out a test file during this time (unit is in seconds).
+static const NSUInteger kFilePlayTimeInSec = 15;
+// Run the full-duplex test during this time (unit is in seconds).
+// Note that first `kNumIgnoreFirstCallbacks` are ignored.
+static const NSUInteger kFullDuplexTimeInSec = 10;
+// Wait for the callback sequence to stabilize by ignoring this amount of the
+// initial callbacks (avoids initial FIFO access).
+// Only used in the RunPlayoutAndRecordingInFullDuplex test.
+static const NSUInteger kNumIgnoreFirstCallbacks = 50;
+
+@interface RTCAudioDeviceModuleTests : XCTestCase {
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule;
+ MockAudioTransport mock;
+}
+
+@property(nonatomic, assign) webrtc::AudioParameters playoutParameters;
+@property(nonatomic, assign) webrtc::AudioParameters recordParameters;
+
+@end
+
+@implementation RTCAudioDeviceModuleTests
+
+@synthesize playoutParameters;
+@synthesize recordParameters;
+
+- (void)setUp {
+ [super setUp];
+ audioDeviceModule = webrtc::CreateAudioDeviceModule();
+ XCTAssertEqual(0, audioDeviceModule->Init());
+ XCTAssertEqual(0, audioDeviceModule->GetPlayoutAudioParameters(&playoutParameters));
+ XCTAssertEqual(0, audioDeviceModule->GetRecordAudioParameters(&recordParameters));
+}
+
+- (void)tearDown {
+ XCTAssertEqual(0, audioDeviceModule->Terminate());
+ audioDeviceModule = nullptr;
+ [super tearDown];
+}
+
+- (void)startPlayout {
+ XCTAssertFalse(audioDeviceModule->Playing());
+ XCTAssertEqual(0, audioDeviceModule->InitPlayout());
+ XCTAssertTrue(audioDeviceModule->PlayoutIsInitialized());
+ XCTAssertEqual(0, audioDeviceModule->StartPlayout());
+ XCTAssertTrue(audioDeviceModule->Playing());
+}
+
+- (void)stopPlayout {
+ XCTAssertEqual(0, audioDeviceModule->StopPlayout());
+ XCTAssertFalse(audioDeviceModule->Playing());
+}
+
+- (void)startRecording{
+ XCTAssertFalse(audioDeviceModule->Recording());
+ XCTAssertEqual(0, audioDeviceModule->InitRecording());
+ XCTAssertTrue(audioDeviceModule->RecordingIsInitialized());
+ XCTAssertEqual(0, audioDeviceModule->StartRecording());
+ XCTAssertTrue(audioDeviceModule->Recording());
+}
+
+- (void)stopRecording{
+ XCTAssertEqual(0, audioDeviceModule->StopRecording());
+ XCTAssertFalse(audioDeviceModule->Recording());
+}
+
+- (NSURL*)fileURLForSampleRate:(int)sampleRate {
+ XCTAssertTrue(sampleRate == 48000 || sampleRate == 44100 || sampleRate == 16000);
+ NSString *filename = [NSString stringWithFormat:@"audio_short%d", sampleRate / 1000];
+ NSURL *url = [[NSBundle mainBundle] URLForResource:filename withExtension:@"pcm"];
+ XCTAssertNotNil(url);
+
+ return url;
+}
+
+#pragma mark - Tests
+
+- (void)testConstructDestruct {
+ // Using the test fixture to create and destruct the audio device module.
+}
+
+- (void)testInitTerminate {
+ // Initialization is part of the test fixture.
+ XCTAssertTrue(audioDeviceModule->Initialized());
+ XCTAssertEqual(0, audioDeviceModule->Terminate());
+ XCTAssertFalse(audioDeviceModule->Initialized());
+}
+
+// Tests that playout can be initiated, started and stopped. No audio callback
+// is registered in this test.
+- (void)testStartStopPlayout {
+ [self startPlayout];
+ [self stopPlayout];
+ [self startPlayout];
+ [self stopPlayout];
+}
+
+// Tests that recording can be initiated, started and stopped. No audio callback
+// is registered in this test.
+- (void)testStartStopRecording {
+ [self startRecording];
+ [self stopRecording];
+ [self startRecording];
+ [self stopRecording];
+}
+// Verify that calling StopPlayout() will leave us in an uninitialized state
+// which will require a new call to InitPlayout(). This test does not call
+// StartPlayout() while being uninitialized since doing so will hit a
+// RTC_DCHECK.
+- (void)testStopPlayoutRequiresInitToRestart {
+ XCTAssertEqual(0, audioDeviceModule->InitPlayout());
+ XCTAssertEqual(0, audioDeviceModule->StartPlayout());
+ XCTAssertEqual(0, audioDeviceModule->StopPlayout());
+ XCTAssertFalse(audioDeviceModule->PlayoutIsInitialized());
+}
+
+// Verify that we can create two ADMs and start playing on the second ADM.
+// Only the first active instance shall activate an audio session and the
+// last active instance shall deactivate the audio session. The test does not
+// explicitly verify correct audio session calls but instead focuses on
+// ensuring that audio starts for both ADMs.
+- (void)testStartPlayoutOnTwoInstances {
+ // Create and initialize a second/extra ADM instance. The default ADM is
+ // created by the test harness.
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> secondAudioDeviceModule =
+ webrtc::CreateAudioDeviceModule();
+ XCTAssertNotEqual(secondAudioDeviceModule.get(), nullptr);
+ XCTAssertEqual(0, secondAudioDeviceModule->Init());
+
+ // Start playout for the default ADM but don't wait here. Instead use the
+ // upcoming second stream for that. We set the same expectation on number
+ // of callbacks as for the second stream.
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.playoutParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startPlayout];
+
+ // Initialize playout for the second ADM. If all is OK, the second ADM shall
+ // reuse the audio session activated when the first ADM started playing.
+ // This call will also ensure that we avoid a problem related to initializing
+ // two different audio unit instances back to back (see webrtc:5166 for
+ // details).
+ XCTAssertEqual(0, secondAudioDeviceModule->InitPlayout());
+ XCTAssertTrue(secondAudioDeviceModule->PlayoutIsInitialized());
+
+ // Start playout for the second ADM and verify that it starts as intended.
+ // Passing this test ensures that initialization of the second audio unit
+ // has been done successfully and that there is no conflict with the already
+ // playing first ADM.
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ __block int num_callbacks = 0;
+
+ MockAudioTransport mock2;
+ mock2.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.playoutParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ if (++num_callbacks == kNumCallbacks) {
+ [playoutExpectation fulfill];
+ }
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, secondAudioDeviceModule->RegisterAudioCallback(&mock2));
+ XCTAssertEqual(0, secondAudioDeviceModule->StartPlayout());
+ XCTAssertTrue(secondAudioDeviceModule->Playing());
+ [self waitForExpectationsWithTimeout:kTestTimeOutInSec handler:nil];
+ [self stopPlayout];
+ XCTAssertEqual(0, secondAudioDeviceModule->StopPlayout());
+ XCTAssertFalse(secondAudioDeviceModule->Playing());
+ XCTAssertFalse(secondAudioDeviceModule->PlayoutIsInitialized());
+
+ XCTAssertEqual(0, secondAudioDeviceModule->Terminate());
+}
+
+// Start playout and verify that the native audio layer starts asking for real
+// audio samples to play out using the NeedMorePlayData callback.
+- (void)testStartPlayoutVerifyCallbacks {
+
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ __block int num_callbacks = 0;
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.playoutParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ if (++num_callbacks == kNumCallbacks) {
+ [playoutExpectation fulfill];
+ }
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+
+ [self startPlayout];
+ [self waitForExpectationsWithTimeout:kTestTimeOutInSec handler:nil];
+ [self stopPlayout];
+}
+
+// Start recording and verify that the native audio layer starts feeding real
+// audio samples via the RecordedDataIsAvailable callback.
+- (void)testStartRecordingVerifyCallbacks {
+ XCTestExpectation *recordExpectation =
+ [self expectationWithDescription:@"RecordedDataIsAvailable"];
+ __block int num_callbacks = 0;
+
+ mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) {
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.recordParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
+ XCTAssertEqual(0, clockDrift);
+ XCTAssertEqual(0u, currentMicLevel);
+ XCTAssertFalse(keyPressed);
+ if (++num_callbacks == kNumCallbacks) {
+ [recordExpectation fulfill];
+ }
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startRecording];
+ [self waitForExpectationsWithTimeout:kTestTimeOutInSec handler:nil];
+ [self stopRecording];
+}
+
+// Start playout and recording (full-duplex audio) and verify that audio is
+// active in both directions.
+- (void)testStartPlayoutAndRecordingVerifyCallbacks {
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ __block NSUInteger callbackCount = 0;
+
+ XCTestExpectation *recordExpectation =
+ [self expectationWithDescription:@"RecordedDataIsAvailable"];
+ recordExpectation.expectedFulfillmentCount = kNumCallbacks;
+
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.playoutParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ if (callbackCount++ >= kNumCallbacks) {
+ [playoutExpectation fulfill];
+ }
+
+ return 0;
+ });
+
+ mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) {
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.recordParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
+ XCTAssertEqual(0, clockDrift);
+ XCTAssertEqual(0u, currentMicLevel);
+ XCTAssertFalse(keyPressed);
+ [recordExpectation fulfill];
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startPlayout];
+ [self startRecording];
+ [self waitForExpectationsWithTimeout:kTestTimeOutInSec handler:nil];
+ [self stopRecording];
+ [self stopPlayout];
+}
+
+// Start playout and read audio from an external PCM file when the audio layer
+// asks for data to play out. Real audio is played out in this test but it does
+// not contain any explicit verification that the audio quality is perfect.
+- (void)testRunPlayoutWithFileAsSource {
+ XCTAssertEqual(1u, playoutParameters.channels());
+
+ // Using XCTestExpectation to count callbacks is very slow.
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ const int expectedCallbackCount = kFilePlayTimeInSec * kNumCallbacksPerSecond;
+ __block int callbackCount = 0;
+
+ NSURL *fileURL = [self fileURLForSampleRate:playoutParameters.sample_rate()];
+ NSInputStream *inputStream = [[NSInputStream alloc] initWithURL:fileURL];
+
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ [inputStream read:(uint8_t *)audioSamples maxLength:nSamples*nBytesPerSample*nChannels];
+ nSamplesOut = nSamples;
+ if (callbackCount++ == expectedCallbackCount) {
+ [playoutExpectation fulfill];
+ }
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startPlayout];
+ NSTimeInterval waitTimeout = kFilePlayTimeInSec * 2.0;
+ [self waitForExpectationsWithTimeout:waitTimeout handler:nil];
+ [self stopPlayout];
+}
+
+- (void)testDevices {
+ // Device enumeration is not supported. Verify fixed values only.
+ XCTAssertEqual(1, audioDeviceModule->PlayoutDevices());
+ XCTAssertEqual(1, audioDeviceModule->RecordingDevices());
+}
+
+// Start playout and recording and store recorded data in an intermediate FIFO
+// buffer from which the playout side then reads its samples in the same order
+// as they were stored. Under ideal circumstances, a callback sequence would
+// look like: ...+-+-+-+-+-+-+-..., where '+' means 'packet recorded' and '-'
+// means 'packet played'. Under such conditions, the FIFO would only contain
+// one packet on average. However, under more realistic conditions, the size
+// of the FIFO will vary more due to an unbalance between the two sides.
+// This test tries to verify that the device maintains a balanced callback-
+// sequence by running in loopback for ten seconds while measuring the size
+// (max and average) of the FIFO. The size of the FIFO is increased by the
+// recording side and decreased by the playout side.
+// TODO(henrika): tune the final test parameters after running tests on several
+// different devices.
+- (void)testRunPlayoutAndRecordingInFullDuplex {
+ XCTAssertEqual(recordParameters.channels(), playoutParameters.channels());
+ XCTAssertEqual(recordParameters.sample_rate(), playoutParameters.sample_rate());
+
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ __block NSUInteger playoutCallbacks = 0;
+ NSUInteger expectedPlayoutCallbacks = kFullDuplexTimeInSec * kNumCallbacksPerSecond;
+
+ // FIFO queue and measurements
+ NSMutableArray *fifoBuffer = [NSMutableArray arrayWithCapacity:20];
+ __block NSUInteger fifoMaxSize = 0;
+ __block NSUInteger fifoTotalWrittenElements = 0;
+ __block NSUInteger fifoWriteCount = 0;
+
+ mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) {
+ if (fifoWriteCount++ < kNumIgnoreFirstCallbacks) {
+ return 0;
+ }
+
+ NSData *data = [NSData dataWithBytes:audioSamples length:nSamples*nBytesPerSample*nChannels];
+ @synchronized(fifoBuffer) {
+ [fifoBuffer addObject:data];
+ fifoMaxSize = MAX(fifoMaxSize, fifoBuffer.count);
+ fifoTotalWrittenElements += fifoBuffer.count;
+ }
+
+ return 0;
+ });
+
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ NSData *data;
+ @synchronized(fifoBuffer) {
+ data = fifoBuffer.firstObject;
+ if (data) {
+ [fifoBuffer removeObjectAtIndex:0];
+ }
+ }
+
+ if (data) {
+ memcpy(audioSamples, (char*) data.bytes, data.length);
+ } else {
+ memset(audioSamples, 0, nSamples*nBytesPerSample*nChannels);
+ }
+
+ if (playoutCallbacks++ == expectedPlayoutCallbacks) {
+ [playoutExpectation fulfill];
+ }
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startRecording];
+ [self startPlayout];
+ NSTimeInterval waitTimeout = kFullDuplexTimeInSec * 2.0;
+ [self waitForExpectationsWithTimeout:waitTimeout handler:nil];
+
+ size_t fifoAverageSize =
+ (fifoTotalWrittenElements == 0)
+ ? 0.0
+ : 0.5 + (double)fifoTotalWrittenElements / (fifoWriteCount - kNumIgnoreFirstCallbacks);
+
+ [self stopPlayout];
+ [self stopRecording];
+ XCTAssertLessThan(fifoAverageSize, 10u);
+ XCTAssertLessThan(fifoMaxSize, 20u);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDevice_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDevice_xctest.mm
new file mode 100644
index 0000000000..e01fdbd6e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDevice_xctest.mm
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#include "api/task_queue/default_task_queue_factory.h"
+
+#import "sdk/objc/components/audio/RTCAudioSession+Private.h"
+#import "sdk/objc/native/api/audio_device_module.h"
+#import "sdk/objc/native/src/audio/audio_device_ios.h"
+
+@interface RTCAudioDeviceTests : XCTestCase {
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
+ std::unique_ptr<webrtc::ios_adm::AudioDeviceIOS> _audio_device;
+}
+
+@property(nonatomic) RTC_OBJC_TYPE(RTCAudioSession) * audioSession;
+
+@end
+
+@implementation RTCAudioDeviceTests
+
+@synthesize audioSession = _audioSession;
+
+- (void)setUp {
+ [super setUp];
+
+ _audioDeviceModule = webrtc::CreateAudioDeviceModule();
+ _audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/false));
+ self.audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+
+ NSError *error = nil;
+ [self.audioSession lockForConfiguration];
+ [self.audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error];
+ XCTAssertNil(error);
+
+ [self.audioSession setMode:AVAudioSessionModeVoiceChat error:&error];
+ XCTAssertNil(error);
+
+ [self.audioSession setActive:YES error:&error];
+ XCTAssertNil(error);
+
+ [self.audioSession unlockForConfiguration];
+}
+
+- (void)tearDown {
+ _audio_device->Terminate();
+ _audio_device.reset(nullptr);
+ _audioDeviceModule = nullptr;
+ [self.audioSession notifyDidEndInterruptionWithShouldResumeSession:NO];
+
+ [super tearDown];
+}
+
+// Verifies that the AudioDeviceIOS is_interrupted_ flag is reset correctly
+// after an iOS AVAudioSessionInterruptionTypeEnded notification event.
+// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted notifications by:
+// - In AudioDeviceIOS.InitPlayOrRecord registers its audio_session_observer_
+// callback with RTC_OBJC_TYPE(RTCAudioSession)'s delegate list.
+// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted notification, it
+// passes the notification to callbacks in its delegate list which sets
+// AudioDeviceIOS's is_interrupted_ flag to true.
+// - When AudioDeviceIOS.ShutdownPlayOrRecord is called, its
+// audio_session_observer_ callback is removed from RTCAudioSessions's
+// delegate list.
+// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption notification,
+// AudioDeviceIOS is not notified as its callback is not in RTC_OBJC_TYPE(RTCAudioSession)'s
+// delegate list. This causes AudioDeviceIOS's is_interrupted_ flag to be in
+// the wrong (true) state and the audio session will ignore audio changes.
+// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is to initialize
+// AudioDeviceIOS's is_interrupted_ flag to RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted
+// flag in AudioDeviceIOS.InitPlayOrRecord.
+- (void)testInterruptedAudioSession {
+ XCTAssertTrue(self.audioSession.isActive);
+ XCTAssertTrue([self.audioSession.category isEqual:AVAudioSessionCategoryPlayAndRecord] ||
+ [self.audioSession.category isEqual:AVAudioSessionCategoryPlayback]);
+ XCTAssertEqual(AVAudioSessionModeVoiceChat, self.audioSession.mode);
+
+ std::unique_ptr<webrtc::TaskQueueFactory> task_queue_factory =
+ webrtc::CreateDefaultTaskQueueFactory();
+ std::unique_ptr<webrtc::AudioDeviceBuffer> audio_buffer;
+ audio_buffer.reset(new webrtc::AudioDeviceBuffer(task_queue_factory.get()));
+ _audio_device->AttachAudioBuffer(audio_buffer.get());
+ XCTAssertEqual(webrtc::AudioDeviceGeneric::InitStatus::OK, _audio_device->Init());
+ XCTAssertEqual(0, _audio_device->InitPlayout());
+ XCTAssertEqual(0, _audio_device->StartPlayout());
+
+ // Force interruption.
+ [self.audioSession notifyDidBeginInterruption];
+
+ // Wait for notification to propagate.
+ rtc::ThreadManager::ProcessAllMessageQueuesForTesting();
+ XCTAssertTrue(_audio_device->IsInterrupted());
+
+ // Force it for testing.
+ _audio_device->StopPlayout();
+
+ [self.audioSession notifyDidEndInterruptionWithShouldResumeSession:YES];
+ // Wait for notification to propagate.
+ rtc::ThreadManager::ProcessAllMessageQueuesForTesting();
+ XCTAssertTrue(_audio_device->IsInterrupted());
+
+ _audio_device->Init();
+ _audio_device->InitPlayout();
+ XCTAssertFalse(_audio_device->IsInterrupted());
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCAudioSessionTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioSessionTest.mm
new file mode 100644
index 0000000000..f62eb46bd5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioSessionTest.mm
@@ -0,0 +1,324 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/event.h"
+#include "rtc_base/gunit.h"
+
+#import "components/audio/RTCAudioSession+Private.h"
+
+#import "components/audio/RTCAudioSession.h"
+#import "components/audio/RTCAudioSessionConfiguration.h"
+
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+(UnitTesting)
+
+ @property(nonatomic,
+ readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
+
+- (instancetype)initWithAudioSession:(id)audioSession;
+
+@end
+
+@interface MockAVAudioSession : NSObject
+
+@property (nonatomic, readwrite, assign) float outputVolume;
+
+@end
+
+@implementation MockAVAudioSession
+@synthesize outputVolume = _outputVolume;
+@end
+
+@interface RTCAudioSessionTestDelegate : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
+
+@property (nonatomic, readonly) float outputVolume;
+
+@end
+
+@implementation RTCAudioSessionTestDelegate
+
+@synthesize outputVolume = _outputVolume;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ _outputVolume = -1;
+ }
+ return self;
+}
+
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ shouldResumeSession:(BOOL)shouldResumeSession {
+}
+
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ reason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
+}
+
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionShouldConfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionShouldUnconfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didChangeOutputVolume:(float)outputVolume {
+ _outputVolume = outputVolume;
+}
+
+@end
+
+// A delegate that adds itself to the audio session on init and removes itself
+// in its dealloc.
+@interface RTCTestRemoveOnDeallocDelegate : RTCAudioSessionTestDelegate
+@end
+
+@implementation RTCTestRemoveOnDeallocDelegate
+
+- (instancetype)init {
+ if (self = [super init]) {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session addDelegate:self];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session removeDelegate:self];
+}
+
+@end
+
+@interface RTCAudioSessionTest : XCTestCase
+
+@end
+
+@implementation RTCAudioSessionTest
+
+- (void)testAddAndRemoveDelegates {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ NSMutableArray *delegates = [NSMutableArray array];
+ const size_t count = 5;
+ for (size_t i = 0; i < count; ++i) {
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ [delegates addObject:delegate];
+ EXPECT_EQ(i + 1, session.delegates.size());
+ }
+ [delegates enumerateObjectsUsingBlock:^(RTCAudioSessionTestDelegate *obj,
+ NSUInteger idx,
+ BOOL *stop) {
+ [session removeDelegate:obj];
+ }];
+ EXPECT_EQ(0u, session.delegates.size());
+}
+
+- (void)testPushDelegate {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ NSMutableArray *delegates = [NSMutableArray array];
+ const size_t count = 2;
+ for (size_t i = 0; i < count; ++i) {
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ [delegates addObject:delegate];
+ }
+ // Test that it gets added to the front of the list.
+ RTCAudioSessionTestDelegate *pushedDelegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session pushDelegate:pushedDelegate];
+ EXPECT_TRUE(pushedDelegate == session.delegates[0]);
+
+ // Test that it stays at the front of the list.
+ for (size_t i = 0; i < count; ++i) {
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ [delegates addObject:delegate];
+ }
+ EXPECT_TRUE(pushedDelegate == session.delegates[0]);
+
+ // Test that the next one goes to the front too.
+ pushedDelegate = [[RTCAudioSessionTestDelegate alloc] init];
+ [session pushDelegate:pushedDelegate];
+ EXPECT_TRUE(pushedDelegate == session.delegates[0]);
+}
+
+// Tests that delegates added to the audio session properly zero out. This is
+// checking an implementation detail (that vectors of __weak work as expected).
+- (void)testZeroingWeakDelegate {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ @autoreleasepool {
+ // Add a delegate to the session. There should be one delegate at this
+ // point.
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ EXPECT_EQ(1u, session.delegates.size());
+ EXPECT_TRUE(session.delegates[0]);
+ }
+ // The previously created delegate should've de-alloced, leaving a nil ptr.
+ EXPECT_FALSE(session.delegates[0]);
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ // On adding a new delegate, nil ptrs should've been cleared.
+ EXPECT_EQ(1u, session.delegates.size());
+ EXPECT_TRUE(session.delegates[0]);
+}
+
+// Tests that we don't crash when removing delegates in dealloc.
+// Added as a regression test.
+- (void)testRemoveDelegateOnDealloc {
+ @autoreleasepool {
+ RTCTestRemoveOnDeallocDelegate *delegate =
+ [[RTCTestRemoveOnDeallocDelegate alloc] init];
+ EXPECT_TRUE(delegate);
+ }
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ EXPECT_EQ(0u, session.delegates.size());
+}
+
+- (void)testAudioSessionActivation {
+ RTC_OBJC_TYPE(RTCAudioSession) *audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ EXPECT_EQ(0, audioSession.activationCount);
+ [audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]];
+ EXPECT_EQ(1, audioSession.activationCount);
+ [audioSession audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
+ EXPECT_EQ(0, audioSession.activationCount);
+}
+
+// Hack - fixes OCMVerify link error
+// Link error is: Undefined symbols for architecture i386:
+// "OCMMakeLocation(objc_object*, char const*, int)", referenced from:
+// -[RTCAudioSessionTest testConfigureWebRTCSession] in RTCAudioSessionTest.o
+// ld: symbol(s) not found for architecture i386
+// REASON: https://github.com/erikdoe/ocmock/issues/238
+OCMLocation *OCMMakeLocation(id testCase, const char *fileCString, int line){
+ return [OCMLocation locationWithTestCase:testCase
+ file:[NSString stringWithUTF8String:fileCString]
+ line:line];
+}
+
+- (void)testConfigureWebRTCSession {
+ NSError *error = nil;
+
+ void (^setActiveBlock)(NSInvocation *invocation) = ^(NSInvocation *invocation) {
+ __autoreleasing NSError **retError;
+ [invocation getArgument:&retError atIndex:4];
+ *retError = [NSError errorWithDomain:@"AVAudioSession"
+ code:AVAudioSessionErrorCodeCannotInterruptOthers
+ userInfo:nil];
+ BOOL failure = NO;
+ [invocation setReturnValue:&failure];
+ };
+
+ id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]);
+ OCMStub([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES
+ withOptions:0
+ error:([OCMArg anyObjectRef])])
+ .andDo(setActiveBlock);
+
+ id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
+ OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
+
+ RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession;
+ EXPECT_EQ(0, audioSession.activationCount);
+ [audioSession lockForConfiguration];
+ // configureWebRTCSession is forced to fail in the above mock interface,
+ // so activationCount should remain 0
+ OCMExpect([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES
+ withOptions:0
+ error:([OCMArg anyObjectRef])])
+ .andDo(setActiveBlock);
+ OCMExpect([mockAudioSession session]).andReturn(mockAVAudioSession);
+ EXPECT_FALSE([audioSession configureWebRTCSession:&error]);
+ EXPECT_EQ(0, audioSession.activationCount);
+
+ id session = audioSession.session;
+ EXPECT_EQ(session, mockAVAudioSession);
+ EXPECT_EQ(NO, [mockAVAudioSession setActive:YES withOptions:0 error:&error]);
+ [audioSession unlockForConfiguration];
+
+ OCMVerify([mockAudioSession session]);
+ OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES withOptions:0 error:&error]);
+ OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:NO withOptions:0 error:&error]);
+
+ [mockAVAudioSession stopMocking];
+ [mockAudioSession stopMocking];
+}
+
+- (void)testConfigureWebRTCSessionWithoutLocking {
+ NSError *error = nil;
+
+ id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]);
+ id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
+ OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
+
+ RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession;
+
+ std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
+ EXPECT_TRUE(thread);
+ EXPECT_TRUE(thread->Start());
+
+ rtc::Event waitLock;
+ rtc::Event waitCleanup;
+ constexpr int timeoutMs = 5000;
+ thread->PostTask([audioSession, &waitLock, &waitCleanup] {
+ [audioSession lockForConfiguration];
+ waitLock.Set();
+ waitCleanup.Wait(timeoutMs);
+ [audioSession unlockForConfiguration];
+ });
+
+ waitLock.Wait(timeoutMs);
+ [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error];
+ EXPECT_TRUE(error != nil);
+ EXPECT_EQ(error.domain, kRTCAudioSessionErrorDomain);
+ EXPECT_EQ(error.code, kRTCAudioSessionErrorLockRequired);
+ waitCleanup.Set();
+ thread->Stop();
+
+ [mockAVAudioSession stopMocking];
+ [mockAudioSession stopMocking];
+}
+
+- (void)testAudioVolumeDidNotify {
+ MockAVAudioSession *mockAVAudioSession = [[MockAVAudioSession alloc] init];
+ RTC_OBJC_TYPE(RTCAudioSession) *session =
+ [[RTC_OBJC_TYPE(RTCAudioSession) alloc] initWithAudioSession:mockAVAudioSession];
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+
+ float expectedVolume = 0.75;
+ mockAVAudioSession.outputVolume = expectedVolume;
+
+ EXPECT_EQ(expectedVolume, delegate.outputVolume);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm
new file mode 100644
index 0000000000..3a1ab24773
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm
@@ -0,0 +1,308 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#import "api/video_frame_buffer/RTCNativeI420Buffer+Private.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "frame_buffer_helpers.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "third_party/libyuv/include/libyuv.h"
+
+@interface RTCCVPixelBufferTests : XCTestCase
+@end
+
+@implementation RTCCVPixelBufferTests {
+}
+
+- (void)testRequiresCroppingNoCrop {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+
+ XCTAssertFalse([buffer requiresCropping]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testRequiresCroppingWithCrop {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *croppedBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:720
+ adaptedHeight:1280
+ cropWidth:360
+ cropHeight:640
+ cropX:100
+ cropY:100];
+
+ XCTAssertTrue([croppedBuffer requiresCropping]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testRequiresScalingNoScale {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testRequiresScalingWithScale {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testRequiresScalingWithScaleAndMatchingCrop {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:720
+ adaptedHeight:1280
+ cropWidth:360
+ cropHeight:640
+ cropX:100
+ cropY:100];
+ XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testBufferSize_NV12 {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testBufferSize_RGB {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testCropAndScale_NV12 {
+ [self cropAndScaleTestWithNV12];
+}
+
+- (void)testCropAndScaleNoOp_NV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputSize:CGSizeMake(720, 1280)];
+}
+
+- (void)testCropAndScale_NV12FullToVideo {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
+}
+
+- (void)testCropAndScaleZeroSizeFrame_NV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputSize:CGSizeMake(0, 0)];
+}
+
+- (void)testCropAndScaleToSmallFormat_NV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputSize:CGSizeMake(148, 320)];
+}
+
+- (void)testCropAndScaleToOddFormat_NV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputSize:CGSizeMake(361, 640)];
+}
+
+- (void)testCropAndScale_32BGRA {
+ [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32BGRA];
+}
+
+- (void)testCropAndScale_32ARGB {
+ [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB];
+}
+
+- (void)testCropAndScaleWithSmallCropInfo_32ARGB {
+ [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB cropX:2 cropY:3];
+}
+
+- (void)testCropAndScaleWithLargeCropInfo_32ARGB {
+ [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB cropX:200 cropY:300];
+}
+
+- (void)testToI420_NV12 {
+ [self toI420WithPixelFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
+}
+
+- (void)testToI420_32BGRA {
+ [self toI420WithPixelFormat:kCVPixelFormatType_32BGRA];
+}
+
+- (void)testToI420_32ARGB {
+ [self toI420WithPixelFormat:kCVPixelFormatType_32ARGB];
+}
+
+#pragma mark - Shared test code
+
+- (void)cropAndScaleTestWithNV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
+}
+
+- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat outputFormat:(OSType)outputFormat {
+ [self cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat
+ outputFormat:(OSType)outputFormat
+ outputSize:CGSizeMake(360, 640)];
+}
+
+- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat
+ outputFormat:(OSType)outputFormat
+ outputSize:(CGSize)outputSize {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 720, 1280, inputFormat, NULL, &pixelBufferRef);
+
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
+ CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertEqual(buffer.width, 720);
+ XCTAssertEqual(buffer.height, 1280);
+
+ CVPixelBufferRef outputPixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, outputSize.width, outputSize.height, outputFormat, NULL, &outputPixelBufferRef);
+
+ std::vector<uint8_t> frameScaleBuffer;
+ if ([buffer requiresScalingToWidth:outputSize.width height:outputSize.height]) {
+ int size =
+ [buffer bufferSizeForCroppingAndScalingToWidth:outputSize.width height:outputSize.height];
+ frameScaleBuffer.resize(size);
+ } else {
+ frameScaleBuffer.clear();
+ }
+ frameScaleBuffer.shrink_to_fit();
+
+ [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()];
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
+ XCTAssertEqual(scaledBuffer.width, outputSize.width);
+ XCTAssertEqual(scaledBuffer.height, outputSize.height);
+
+ if (outputSize.width > 0 && outputSize.height > 0) {
+ RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
+ RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
+ double psnr =
+ I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
+ XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
+ }
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat {
+ [self cropAndScaleTestWithRGBPixelFormat:pixelFormat cropX:0 cropY:0];
+}
+
+- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat cropX:(int)cropX cropY:(int)cropY {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 720, 1280, pixelFormat, NULL, &pixelBufferRef);
+
+ DrawGradientInRGBPixelBuffer(pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
+ initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef)
+ adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef)
+ cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX
+ cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY
+ cropX:cropX
+ cropY:cropY];
+
+ XCTAssertEqual(buffer.width, 720);
+ XCTAssertEqual(buffer.height, 1280);
+
+ CVPixelBufferRef outputPixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &outputPixelBufferRef);
+ [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL];
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
+ XCTAssertEqual(scaledBuffer.width, 360);
+ XCTAssertEqual(scaledBuffer.height, 640);
+
+ RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
+ RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
+ double psnr =
+ I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
+ XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)toI420WithPixelFormat:(OSType)pixelFormat {
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(360, 640);
+
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &pixelBufferRef);
+
+ CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [buffer toI420];
+
+ double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]);
+ double target = webrtc::kPerfectPSNR;
+ if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
+ // libyuv's I420ToRGB functions seem to lose some quality.
+ target = 19.0;
+ }
+ XCTAssertGreaterThanOrEqual(psnr, target);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCCallbackLogger_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCCallbackLogger_xctest.m
new file mode 100644
index 0000000000..1b6fb1c07b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCCallbackLogger_xctest.m
@@ -0,0 +1,244 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/logging/RTCCallbackLogger.h"
+
+#import <XCTest/XCTest.h>
+
+@interface RTCCallbackLoggerTests : XCTestCase
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCallbackLogger) * logger;
+
+@end
+
+@implementation RTCCallbackLoggerTests
+
+@synthesize logger;
+
+- (void)setUp {
+ self.logger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
+}
+
+- (void)tearDown {
+ self.logger = nil;
+}
+
+- (void)testDefaultSeverityLevel {
+ XCTAssertEqual(self.logger.severity, RTCLoggingSeverityInfo);
+}
+
+- (void)testCallbackGetsCalledForAppropriateLevel {
+ self.logger.severity = RTCLoggingSeverityWarning;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
+
+ [self.logger start:^(NSString *message) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ [callbackExpectation fulfill];
+ }];
+
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testCallbackWithSeverityGetsCalledForAppropriateLevel {
+ self.logger.severity = RTCLoggingSeverityWarning;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ XCTAssertEqual(severity, RTCLoggingSeverityError);
+ [callbackExpectation fulfill];
+ }];
+
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testCallbackDoesNotGetCalledForOtherLevels {
+ self.logger.severity = RTCLoggingSeverityError;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackError"];
+
+ [self.logger start:^(NSString *message) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ [callbackExpectation fulfill];
+ }];
+
+ RTCLogInfo("Just some info");
+ RTCLogWarning("Warning warning");
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testCallbackWithSeverityDoesNotGetCalledForOtherLevels {
+ self.logger.severity = RTCLoggingSeverityError;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackError"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ XCTAssertEqual(severity, RTCLoggingSeverityError);
+ [callbackExpectation fulfill];
+ }];
+
+ RTCLogInfo("Just some info");
+ RTCLogWarning("Warning warning");
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testCallbackDoesNotgetCalledForSeverityNone {
+ self.logger.severity = RTCLoggingSeverityNone;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"unexpectedCallback"];
+
+ [self.logger start:^(NSString *message) {
+ [callbackExpectation fulfill];
+ XCTAssertTrue(false);
+ }];
+
+ RTCLogInfo("Just some info");
+ RTCLogWarning("Warning warning");
+ RTCLogError("Horrible error");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testCallbackWithSeverityDoesNotgetCalledForSeverityNone {
+ self.logger.severity = RTCLoggingSeverityNone;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"unexpectedCallback"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
+ [callbackExpectation fulfill];
+ XCTAssertTrue(false);
+ }];
+
+ RTCLogInfo("Just some info");
+ RTCLogWarning("Warning warning");
+ RTCLogError("Horrible error");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testStartingWithNilCallbackDoesNotCrash {
+ [self.logger start:nil];
+
+ RTCLogError("Horrible error");
+}
+
+- (void)testStartingWithNilCallbackWithSeverityDoesNotCrash {
+ [self.logger startWithMessageAndSeverityHandler:nil];
+
+ RTCLogError("Horrible error");
+}
+
+- (void)testStopCallbackLogger {
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"stopped"];
+
+ [self.logger start:^(NSString *message) {
+ [callbackExpectation fulfill];
+ }];
+
+ [self.logger stop];
+
+ RTCLogInfo("Just some info");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testStopCallbackWithSeverityLogger {
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"stopped"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
+ [callbackExpectation fulfill];
+ }];
+
+ [self.logger stop];
+
+ RTCLogInfo("Just some info");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testDestroyingCallbackLogger {
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"destroyed"];
+
+ [self.logger start:^(NSString *message) {
+ [callbackExpectation fulfill];
+ }];
+
+ self.logger = nil;
+
+ RTCLogInfo("Just some info");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testDestroyingCallbackWithSeverityLogger {
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"destroyed"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
+ [callbackExpectation fulfill];
+ }];
+
+ self.logger = nil;
+
+ RTCLogInfo("Just some info");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testCallbackWithSeverityLoggerCannotStartTwice {
+ self.logger.severity = RTCLoggingSeverityWarning;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ XCTAssertEqual(loggingServerity, RTCLoggingSeverityError);
+ [callbackExpectation fulfill];
+ }];
+
+ [self.logger start:^(NSString *message) {
+ [callbackExpectation fulfill];
+ XCTAssertTrue(false);
+ }];
+
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm
new file mode 100644
index 0000000000..5018479157
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm
@@ -0,0 +1,569 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#endif
+
+#import "base/RTCVideoFrame.h"
+#import "components/capturer/RTCCameraVideoCapturer.h"
+#import "helpers/AVCaptureSession+DevicePosition.h"
+#import "helpers/RTCDispatcher.h"
+#import "helpers/scoped_cftyperef.h"
+
+#define WAIT(timeoutMs) \
+ do { \
+ id expectation = [[XCTestExpectation alloc] initWithDescription:@"Dummy"]; \
+ XCTWaiterResult res = [XCTWaiter waitForExpectations:@[ expectation ] \
+ timeout:timeoutMs / 1000.0]; \
+ XCTAssertEqual(XCTWaiterResultTimedOut, res); \
+ } while (false);
+
+#if TARGET_OS_IPHONE
+// Helper method.
+CMSampleBufferRef createTestSampleBufferRef() {
+
+ // This image is already in the testing bundle.
+ UIImage *image = [UIImage imageNamed:@"Default.png"];
+ CGSize size = image.size;
+ CGImageRef imageRef = [image CGImage];
+
+ CVPixelBufferRef pixelBuffer = nullptr;
+ CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, nil,
+ &pixelBuffer);
+
+ CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
+ // We don't care about bitsPerComponent and bytesPerRow so arbitrary value of 8 for both.
+ CGContextRef context = CGBitmapContextCreate(nil, size.width, size.height, 8, 8 * size.width,
+ rgbColorSpace, kCGImageAlphaPremultipliedFirst);
+
+ CGContextDrawImage(
+ context, CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)), imageRef);
+
+ CGColorSpaceRelease(rgbColorSpace);
+ CGContextRelease(context);
+
+ // We don't really care about the timing.
+ CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
+ CMVideoFormatDescriptionRef description = nullptr;
+ CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &description);
+
+ CMSampleBufferRef sampleBuffer = nullptr;
+ CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, NULL, NULL, description,
+ &timing, &sampleBuffer);
+ CFRelease(pixelBuffer);
+
+ return sampleBuffer;
+
+}
+#endif
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
+(Tests)<AVCaptureVideoDataOutputSampleBufferDelegate> -
+ (instancetype)initWithDelegate
+ : (__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate captureSession
+ : (AVCaptureSession *)captureSession;
+@end
+
+@interface RTCCameraVideoCapturerTests : XCTestCase
+@property(nonatomic, strong) id delegateMock;
+@property(nonatomic, strong) id deviceMock;
+@property(nonatomic, strong) id captureConnectionMock;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
+@end
+
+@implementation RTCCameraVideoCapturerTests
+@synthesize delegateMock = _delegateMock;
+@synthesize deviceMock = _deviceMock;
+@synthesize captureConnectionMock = _captureConnectionMock;
+@synthesize capturer = _capturer;
+
+- (void)setUp {
+ self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
+ self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
+ self.capturer =
+ [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock];
+ self.deviceMock = [RTCCameraVideoCapturerTests createDeviceMock];
+}
+
+- (void)tearDown {
+ [self.delegateMock stopMocking];
+ [self.deviceMock stopMocking];
+ self.delegateMock = nil;
+ self.deviceMock = nil;
+ self.capturer = nil;
+}
+
+#pragma mark - utils
+
++ (id)createDeviceMock {
+ return OCMClassMock([AVCaptureDevice class]);
+}
+
+#pragma mark - test cases
+
+- (void)testSetupSession {
+ AVCaptureSession *session = self.capturer.captureSession;
+ XCTAssertTrue(session != nil);
+
+#if TARGET_OS_IPHONE
+ XCTAssertEqual(session.sessionPreset, AVCaptureSessionPresetInputPriority);
+ XCTAssertEqual(session.usesApplicationAudioSession, NO);
+#endif
+ XCTAssertEqual(session.outputs.count, 1u);
+}
+
+- (void)testSetupSessionOutput {
+ AVCaptureVideoDataOutput *videoOutput = self.capturer.captureSession.outputs[0];
+ XCTAssertEqual(videoOutput.alwaysDiscardsLateVideoFrames, NO);
+ XCTAssertEqual(videoOutput.sampleBufferDelegate, self.capturer);
+}
+
+- (void)testSupportedFormatsForDevice {
+ // given
+ id validFormat1 = OCMClassMock([AVCaptureDeviceFormat class]);
+ CMVideoFormatDescriptionRef format;
+
+ // We don't care about width and heigth so arbitrary 123 and 456 values.
+ int width = 123;
+ int height = 456;
+ CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8PlanarFullRange, width, height,
+ nil, &format);
+ OCMStub([validFormat1 formatDescription]).andReturn(format);
+
+ id validFormat2 = OCMClassMock([AVCaptureDeviceFormat class]);
+ CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, width,
+ height, nil, &format);
+ OCMStub([validFormat2 formatDescription]).andReturn(format);
+
+ id invalidFormat = OCMClassMock([AVCaptureDeviceFormat class]);
+ CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil,
+ &format);
+ OCMStub([invalidFormat formatDescription]).andReturn(format);
+
+ NSArray *formats = @[ validFormat1, validFormat2, invalidFormat ];
+ OCMStub([self.deviceMock formats]).andReturn(formats);
+
+ // when
+ NSArray *supportedFormats =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock];
+
+ // then
+ XCTAssertEqual(supportedFormats.count, 3u);
+ XCTAssertTrue([supportedFormats containsObject:validFormat1]);
+ XCTAssertTrue([supportedFormats containsObject:validFormat2]);
+ XCTAssertTrue([supportedFormats containsObject:invalidFormat]);
+
+ // cleanup
+ [validFormat1 stopMocking];
+ [validFormat2 stopMocking];
+ [invalidFormat stopMocking];
+ validFormat1 = nil;
+ validFormat2 = nil;
+ invalidFormat = nil;
+}
+
+- (void)testDelegateCallbackNotCalledWhenInvalidBuffer {
+ // given
+ CMSampleBufferRef sampleBuffer = nullptr;
+ [[self.delegateMock reject] capturer:[OCMArg any] didCaptureVideoFrame:[OCMArg any]];
+
+ // when
+ [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
+ didOutputSampleBuffer:sampleBuffer
+ fromConnection:self.captureConnectionMock];
+
+ // then
+ [self.delegateMock verify];
+}
+
+#if 0
+// See crbug.com/1404878 - XCTExpectFailure and XCTSkip are considered failures
+
+- (void)testDelegateCallbackWithValidBufferAndOrientationUpdate {
+#if TARGET_OS_IPHONE
+ XCTExpectFailure(@"Setting orientation on UIDevice is not supported");
+ [UIDevice.currentDevice setValue:@(UIDeviceOrientationPortraitUpsideDown) forKey:@"orientation"];
+ CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
+
+ // then
+ [[self.delegateMock expect] capturer:self.capturer
+ didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+ expectedFrame) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_270);
+ return YES;
+ }]];
+
+ // when
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+ [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
+
+ // We need to wait for the dispatch to finish.
+ WAIT(1000);
+
+ [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
+ didOutputSampleBuffer:sampleBuffer
+ fromConnection:self.captureConnectionMock];
+
+ [self.delegateMock verify];
+ CFRelease(sampleBuffer);
+#endif
+}
+
+// The XCTest framework considers functions that don't take arguments tests. This is a helper.
+- (void)testRotationCamera:(AVCaptureDevicePosition)camera
+ withOrientation:(UIDeviceOrientation)deviceOrientation {
+#if TARGET_OS_IPHONE
+ // Mock the AVCaptureConnection as we will get the camera position from the connection's
+ // input ports.
+ AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
+ AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
+ NSArray *inputPortsArrayMock = @[captureInputPort];
+ AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
+ OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
+ andReturn(inputPortsArrayMock);
+ OCMStub(captureInputPort.input).andReturn(inputPortMock);
+ OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
+ OCMStub(captureDeviceMock.position).andReturn(camera);
+
+ XCTExpectFailure(@"Setting orientation on UIDevice is not supported");
+ [UIDevice.currentDevice setValue:@(deviceOrientation) forKey:@"orientation"];
+
+ CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
+
+ [[self.delegateMock expect] capturer:self.capturer
+ didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+ expectedFrame) {
+ if (camera == AVCaptureDevicePositionFront) {
+ if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180);
+ } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
+ }
+ } else if (camera == AVCaptureDevicePositionBack) {
+ if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
+ } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180);
+ }
+ }
+ return YES;
+ }]];
+
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+ [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
+
+ // We need to wait for the dispatch to finish.
+ WAIT(1000);
+
+ [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
+ didOutputSampleBuffer:sampleBuffer
+ fromConnection:self.captureConnectionMock];
+
+ [self.delegateMock verify];
+
+ CFRelease(sampleBuffer);
+#endif
+}
+
+- (void)testRotationCameraBackLandscapeLeft {
+ [self testRotationCamera:AVCaptureDevicePositionBack
+ withOrientation:UIDeviceOrientationLandscapeLeft];
+}
+
+- (void)testRotationCameraFrontLandscapeLeft {
+ [self testRotationCamera:AVCaptureDevicePositionFront
+ withOrientation:UIDeviceOrientationLandscapeLeft];
+}
+
+- (void)testRotationCameraBackLandscapeRight {
+ [self testRotationCamera:AVCaptureDevicePositionBack
+ withOrientation:UIDeviceOrientationLandscapeRight];
+}
+
+- (void)testRotationCameraFrontLandscapeRight {
+ [self testRotationCamera:AVCaptureDevicePositionFront
+ withOrientation:UIDeviceOrientationLandscapeRight];
+}
+
+#endif
+
+- (void)setExif:(CMSampleBufferRef)sampleBuffer {
+ rtc::ScopedCFTypeRef<CFMutableDictionaryRef> exif(CFDictionaryCreateMutable(
+ kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
+ CFDictionarySetValue(exif.get(), CFSTR("LensModel"), CFSTR("iPhone SE back camera 4.15mm f/2.2"));
+ CMSetAttachment(sampleBuffer, CFSTR("{Exif}"), exif.get(), kCMAttachmentMode_ShouldPropagate);
+}
+
+#if 0
+// See crbug.com/1404878 - XCTExpectFailure and XCTSkip are considered failures
+
+- (void)testRotationFrame {
+#if TARGET_OS_IPHONE
+ // Mock the AVCaptureConnection as we will get the camera position from the connection's
+ // input ports.
+ AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
+ AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
+ NSArray *inputPortsArrayMock = @[captureInputPort];
+ AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
+ OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
+ andReturn(inputPortsArrayMock);
+ OCMStub(captureInputPort.input).andReturn(inputPortMock);
+ OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
+ OCMStub(captureDeviceMock.position).andReturn(AVCaptureDevicePositionFront);
+
+ XCTExpectFailure(@"Setting orientation on UIDevice is not supported");
+ [UIDevice.currentDevice setValue:@(UIDeviceOrientationLandscapeLeft) forKey:@"orientation"];
+
+ CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
+
+ [[self.delegateMock expect] capturer:self.capturer
+ didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+ expectedFrame) {
+ // Front camera and landscape left should return 180. But the frame's exif
+ // we add below says its from the back camera, so rotation should be 0.
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
+ return YES;
+ }]];
+
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+ [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
+
+ // We need to wait for the dispatch to finish.
+ WAIT(1000);
+
+ [self setExif:sampleBuffer];
+
+ [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
+ didOutputSampleBuffer:sampleBuffer
+ fromConnection:self.captureConnectionMock];
+
+ [self.delegateMock verify];
+ CFRelease(sampleBuffer);
+#endif
+}
+
+#endif
+
+- (void)testImageExif {
+#if TARGET_OS_IPHONE
+ CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
+ [self setExif:sampleBuffer];
+
+ AVCaptureDevicePosition cameraPosition = [AVCaptureSession
+ devicePositionForSampleBuffer:sampleBuffer];
+ XCTAssertEqual(cameraPosition, AVCaptureDevicePositionBack);
+#endif
+}
+
+@end
+
+@interface RTCCameraVideoCapturerTestsWithMockedCaptureSession : XCTestCase
+@property(nonatomic, strong) id delegateMock;
+@property(nonatomic, strong) id deviceMock;
+@property(nonatomic, strong) id captureSessionMock;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
+@end
+
+@implementation RTCCameraVideoCapturerTestsWithMockedCaptureSession
+@synthesize delegateMock = _delegateMock;
+@synthesize deviceMock = _deviceMock;
+@synthesize captureSessionMock = _captureSessionMock;
+@synthesize capturer = _capturer;
+
+- (void)setUp {
+ self.captureSessionMock = OCMStrictClassMock([AVCaptureSession class]);
+ OCMStub([self.captureSessionMock setSessionPreset:[OCMArg any]]);
+ OCMStub([self.captureSessionMock setUsesApplicationAudioSession:NO]);
+ OCMStub([self.captureSessionMock canAddOutput:[OCMArg any]]).andReturn(YES);
+ OCMStub([self.captureSessionMock addOutput:[OCMArg any]]);
+ OCMStub([self.captureSessionMock beginConfiguration]);
+ OCMStub([self.captureSessionMock commitConfiguration]);
+ self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
+ self.capturer =
+ [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock
+ captureSession:self.captureSessionMock];
+ self.deviceMock = [RTCCameraVideoCapturerTests createDeviceMock];
+}
+
+- (void)tearDown {
+ [self.delegateMock stopMocking];
+ [self.deviceMock stopMocking];
+ self.delegateMock = nil;
+ self.deviceMock = nil;
+ self.capturer = nil;
+ self.captureSessionMock = nil;
+}
+
+#pragma mark - test cases
+
+- (void)testStartingAndStoppingCapture {
+ id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
+ .andReturn(expectedDeviceInputMock);
+
+ OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ OCMStub([self.deviceMock unlockForConfiguration]);
+ OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
+ OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
+ OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
+
+ // Set expectation that the capture session should be started with correct device.
+ OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
+ OCMExpect([_captureSessionMock startRunning]);
+ OCMExpect([_captureSessionMock stopRunning]);
+
+ id format = OCMClassMock([AVCaptureDeviceFormat class]);
+ [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
+ [self.capturer stopCapture];
+
+ // Start capture code is dispatched async.
+ OCMVerifyAllWithDelay(_captureSessionMock, 15);
+}
+
+- (void)testStartCaptureFailingToLockForConfiguration {
+ // The captureSessionMock is a strict mock, so this test will crash if the startCapture
+ // method does not return when failing to lock for configuration.
+ OCMExpect([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(NO);
+
+ id format = OCMClassMock([AVCaptureDeviceFormat class]);
+ [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
+
+ // Start capture code is dispatched async.
+ OCMVerifyAllWithDelay(self.deviceMock, 15);
+}
+
+- (void)testStartingAndStoppingCaptureWithCallbacks {
+ id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
+ .andReturn(expectedDeviceInputMock);
+
+ OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ OCMStub([self.deviceMock unlockForConfiguration]);
+ OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
+ OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
+ OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
+
+ // Set expectation that the capture session should be started with correct device.
+ OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
+ OCMExpect([_captureSessionMock startRunning]);
+ OCMExpect([_captureSessionMock stopRunning]);
+
+ dispatch_semaphore_t completedStopSemaphore = dispatch_semaphore_create(0);
+
+ __block BOOL completedStart = NO;
+ id format = OCMClassMock([AVCaptureDeviceFormat class]);
+ [self.capturer startCaptureWithDevice:self.deviceMock
+ format:format
+ fps:30
+ completionHandler:^(NSError *error) {
+ XCTAssertEqual(error, nil);
+ completedStart = YES;
+ }];
+
+ __block BOOL completedStop = NO;
+ [self.capturer stopCaptureWithCompletionHandler:^{
+ completedStop = YES;
+ dispatch_semaphore_signal(completedStopSemaphore);
+ }];
+
+ dispatch_semaphore_wait(completedStopSemaphore,
+ dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
+ OCMVerifyAllWithDelay(_captureSessionMock, 15);
+ XCTAssertTrue(completedStart);
+ XCTAssertTrue(completedStop);
+}
+
+- (void)testStartCaptureFailingToLockForConfigurationWithCallback {
+ id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
+ .andReturn(expectedDeviceInputMock);
+
+ id errorMock = OCMClassMock([NSError class]);
+
+ OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:errorMock]]).andReturn(NO);
+ OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
+ OCMStub([self.deviceMock unlockForConfiguration]);
+
+ OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
+
+ dispatch_semaphore_t completedStartSemaphore = dispatch_semaphore_create(0);
+ __block NSError *callbackError = nil;
+
+ id format = OCMClassMock([AVCaptureDeviceFormat class]);
+ [self.capturer startCaptureWithDevice:self.deviceMock
+ format:format
+ fps:30
+ completionHandler:^(NSError *error) {
+ callbackError = error;
+ dispatch_semaphore_signal(completedStartSemaphore);
+ }];
+
+ long ret = dispatch_semaphore_wait(completedStartSemaphore,
+ dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
+ XCTAssertEqual(ret, 0);
+ XCTAssertEqual(callbackError, errorMock);
+}
+
+- (void)testStartCaptureSetsOutputDimensionsInvalidPixelFormat {
+ id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([captureDeviceInputMock deviceInputWithDevice:_deviceMock error:[OCMArg setTo:nil]])
+ .andReturn(expectedDeviceInputMock);
+
+ OCMStub([_deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ OCMStub([_deviceMock unlockForConfiguration]);
+ OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
+ OCMStub([_captureSessionMock addInput:expectedDeviceInputMock]);
+ OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
+ OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
+ OCMStub([_captureSessionMock startRunning]);
+ OCMStub([_captureSessionMock stopRunning]);
+
+ id deviceFormatMock = OCMClassMock([AVCaptureDeviceFormat class]);
+ CMVideoFormatDescriptionRef formatDescription;
+
+ int width = 110;
+ int height = 220;
+ FourCharCode pixelFormat = 0x18000000;
+ CMVideoFormatDescriptionCreate(nil, pixelFormat, width, height, nil, &formatDescription);
+ OCMStub([deviceFormatMock formatDescription]).andReturn(formatDescription);
+
+ [_capturer startCaptureWithDevice:_deviceMock format:deviceFormatMock fps:30];
+
+ XCTestExpectation *expectation = [self expectationWithDescription:@"StopCompletion"];
+ [_capturer stopCaptureWithCompletionHandler:^(void) {
+ [expectation fulfill];
+ }];
+
+ [self waitForExpectationsWithTimeout:15 handler:nil];
+
+ OCMVerify([_captureSessionMock
+ addOutput:[OCMArg checkWithBlock:^BOOL(AVCaptureVideoDataOutput *output) {
+ if (@available(iOS 16, *)) {
+ XCTAssertEqual(width, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]);
+ XCTAssertEqual(height, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]);
+ } else {
+ XCTAssertEqual(0, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]);
+ XCTAssertEqual(0, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]);
+ }
+ XCTAssertEqual(
+ (FourCharCode)kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ [output.videoSettings[(id)kCVPixelBufferPixelFormatTypeKey] unsignedIntValue]);
+ return YES;
+ }]]);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCCertificateTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCCertificateTest.mm
new file mode 100644
index 0000000000..bc1347336c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCCertificateTest.mm
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCConfiguration+Private.h"
+#import "api/peerconnection/RTCConfiguration.h"
+#import "api/peerconnection/RTCIceServer.h"
+#import "api/peerconnection/RTCMediaConstraints.h"
+#import "api/peerconnection/RTCPeerConnection.h"
+#import "api/peerconnection/RTCPeerConnectionFactory.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCCertificateTest : XCTestCase
+@end
+
+@implementation RTCCertificateTest
+
+- (void)testCertificateIsUsedInConfig {
+ RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+ originalConfig.iceServers = @[ server ];
+
+ // Generate a new certificate.
+ RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = [RTC_OBJC_TYPE(RTCCertificate)
+ generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
+
+ // Store certificate in configuration.
+ originalConfig.certificate = originalCertificate;
+
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ // Create PeerConnection with this certificate.
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithConfiguration:originalConfig constraints:contraints delegate:nil];
+
+ // Retrieve certificate from the configuration.
+ RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = peerConnection.configuration;
+
+ // Extract PEM strings from original certificate.
+ std::string originalPrivateKeyField = [[originalCertificate private_key] UTF8String];
+ std::string originalCertificateField = [[originalCertificate certificate] UTF8String];
+
+ // Extract PEM strings from certificate retrieved from configuration.
+ RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = retrievedConfig.certificate;
+ std::string retrievedPrivateKeyField = [[retrievedCertificate private_key] UTF8String];
+ std::string retrievedCertificateField = [[retrievedCertificate certificate] UTF8String];
+
+ // Check that the original certificate and retrieved certificate match.
+ EXPECT_EQ(originalPrivateKeyField, retrievedPrivateKeyField);
+ EXPECT_EQ(retrievedCertificateField, retrievedCertificateField);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCConfigurationTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCConfigurationTest.mm
new file mode 100644
index 0000000000..18cc97191e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCConfigurationTest.mm
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCConfiguration+Private.h"
+#import "api/peerconnection/RTCConfiguration.h"
+#import "api/peerconnection/RTCIceServer.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCConfigurationTest : XCTestCase
+@end
+
+@implementation RTCConfigurationTest
+
+- (void)testConversionToNativeConfiguration {
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.iceServers = @[ server ];
+ config.iceTransportPolicy = RTCIceTransportPolicyRelay;
+ config.bundlePolicy = RTCBundlePolicyMaxBundle;
+ config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
+ config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
+ config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
+ const int maxPackets = 60;
+ const int timeout = 1;
+ const int interval = 2;
+ config.audioJitterBufferMaxPackets = maxPackets;
+ config.audioJitterBufferFastAccelerate = YES;
+ config.iceConnectionReceivingTimeout = timeout;
+ config.iceBackupCandidatePairPingInterval = interval;
+ config.continualGatheringPolicy =
+ RTCContinualGatheringPolicyGatherContinually;
+ config.shouldPruneTurnPorts = YES;
+ config.cryptoOptions =
+ [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+ srtpEnableAes128Sha1_32CryptoCipher:YES
+ srtpEnableEncryptedRtpHeaderExtensions:YES
+ sframeRequireFrameEncryption:YES];
+ config.rtcpAudioReportIntervalMs = 2500;
+ config.rtcpVideoReportIntervalMs = 3750;
+
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
+ nativeConfig([config createNativeConfiguration]);
+ EXPECT_TRUE(nativeConfig.get());
+ EXPECT_EQ(1u, nativeConfig->servers.size());
+ webrtc::PeerConnectionInterface::IceServer nativeServer =
+ nativeConfig->servers.front();
+ EXPECT_EQ(1u, nativeServer.urls.size());
+ EXPECT_EQ("stun:stun1.example.net", nativeServer.urls.front());
+
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kRelay, nativeConfig->type);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle,
+ nativeConfig->bundle_policy);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate,
+ nativeConfig->rtcp_mux_policy);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled,
+ nativeConfig->tcp_candidate_policy);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost,
+ nativeConfig->candidate_network_policy);
+ EXPECT_EQ(maxPackets, nativeConfig->audio_jitter_buffer_max_packets);
+ EXPECT_EQ(true, nativeConfig->audio_jitter_buffer_fast_accelerate);
+ EXPECT_EQ(timeout, nativeConfig->ice_connection_receiving_timeout);
+ EXPECT_EQ(interval, nativeConfig->ice_backup_candidate_pair_ping_interval);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::GATHER_CONTINUALLY,
+ nativeConfig->continual_gathering_policy);
+ EXPECT_EQ(true, nativeConfig->prune_turn_ports);
+ EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_gcm_crypto_suites);
+ EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_aes128_sha1_32_crypto_cipher);
+ EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_encrypted_rtp_header_extensions);
+ EXPECT_EQ(true, nativeConfig->crypto_options->sframe.require_frame_encryption);
+ EXPECT_EQ(2500, nativeConfig->audio_rtcp_report_interval_ms());
+ EXPECT_EQ(3750, nativeConfig->video_rtcp_report_interval_ms());
+}
+
+- (void)testNativeConversionToConfiguration {
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.iceServers = @[ server ];
+ config.iceTransportPolicy = RTCIceTransportPolicyRelay;
+ config.bundlePolicy = RTCBundlePolicyMaxBundle;
+ config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
+ config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
+ config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
+ const int maxPackets = 60;
+ const int timeout = 1;
+ const int interval = 2;
+ config.audioJitterBufferMaxPackets = maxPackets;
+ config.audioJitterBufferFastAccelerate = YES;
+ config.iceConnectionReceivingTimeout = timeout;
+ config.iceBackupCandidatePairPingInterval = interval;
+ config.continualGatheringPolicy =
+ RTCContinualGatheringPolicyGatherContinually;
+ config.shouldPruneTurnPorts = YES;
+ config.cryptoOptions =
+ [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+ srtpEnableAes128Sha1_32CryptoCipher:NO
+ srtpEnableEncryptedRtpHeaderExtensions:NO
+ sframeRequireFrameEncryption:NO];
+ config.rtcpAudioReportIntervalMs = 1500;
+ config.rtcpVideoReportIntervalMs = 2150;
+
+ webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig =
+ [config createNativeConfiguration];
+ RTC_OBJC_TYPE(RTCConfiguration) *newConfig =
+ [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:*nativeConfig];
+ EXPECT_EQ([config.iceServers count], newConfig.iceServers.count);
+ RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
+ RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
+ EXPECT_EQ(origServer.urlStrings.count, server.urlStrings.count);
+ std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
+ std::string url = newServer.urlStrings.firstObject.UTF8String;
+ EXPECT_EQ(origUrl, url);
+
+ EXPECT_EQ(config.iceTransportPolicy, newConfig.iceTransportPolicy);
+ EXPECT_EQ(config.bundlePolicy, newConfig.bundlePolicy);
+ EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy);
+ EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy);
+ EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy);
+ EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets);
+ EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate);
+ EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout);
+ EXPECT_EQ(config.iceBackupCandidatePairPingInterval,
+ newConfig.iceBackupCandidatePairPingInterval);
+ EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy);
+ EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableGcmCryptoSuites,
+ newConfig.cryptoOptions.srtpEnableGcmCryptoSuites);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher,
+ newConfig.cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions,
+ newConfig.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions);
+ EXPECT_EQ(config.cryptoOptions.sframeRequireFrameEncryption,
+ newConfig.cryptoOptions.sframeRequireFrameEncryption);
+ EXPECT_EQ(config.rtcpAudioReportIntervalMs, newConfig.rtcpAudioReportIntervalMs);
+ EXPECT_EQ(config.rtcpVideoReportIntervalMs, newConfig.rtcpVideoReportIntervalMs);
+}
+
+- (void)testDefaultValues {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ EXPECT_EQ(config.cryptoOptions, nil);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm
new file mode 100644
index 0000000000..ccebd74198
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCDataChannelConfiguration+Private.h"
+#import "api/peerconnection/RTCDataChannelConfiguration.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCDataChannelConfigurationTest : XCTestCase
+@end
+
+@implementation RTCDataChannelConfigurationTest
+
+- (void)testConversionToNativeDataChannelInit {
+ BOOL isOrdered = NO;
+ int maxPacketLifeTime = 5;
+ int maxRetransmits = 4;
+ BOOL isNegotiated = YES;
+ int channelId = 4;
+ NSString *protocol = @"protocol";
+
+ RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
+ [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
+ dataChannelConfig.isOrdered = isOrdered;
+ dataChannelConfig.maxPacketLifeTime = maxPacketLifeTime;
+ dataChannelConfig.maxRetransmits = maxRetransmits;
+ dataChannelConfig.isNegotiated = isNegotiated;
+ dataChannelConfig.channelId = channelId;
+ dataChannelConfig.protocol = protocol;
+
+ webrtc::DataChannelInit nativeInit = dataChannelConfig.nativeDataChannelInit;
+ EXPECT_EQ(isOrdered, nativeInit.ordered);
+ EXPECT_EQ(maxPacketLifeTime, nativeInit.maxRetransmitTime);
+ EXPECT_EQ(maxRetransmits, nativeInit.maxRetransmits);
+ EXPECT_EQ(isNegotiated, nativeInit.negotiated);
+ EXPECT_EQ(channelId, nativeInit.id);
+ EXPECT_EQ(protocol.stdString, nativeInit.protocol);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m
new file mode 100644
index 0000000000..02bef9bfb7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#import <Foundation/Foundation.h>
+
+#import <WebRTC/WebRTC.h>
+
+@interface RTCDoNotPutCPlusPlusInFrameworkHeaders : XCTestCase
+@end
+
+@implementation RTCDoNotPutCPlusPlusInFrameworkHeaders
+
+- (void)testNoCPlusPlusInFrameworkHeaders {
+ NSString *fullPath = [NSString stringWithFormat:@"%s", __FILE__];
+ NSString *extension = fullPath.pathExtension;
+
+ XCTAssertEqualObjects(
+ @"m", extension, @"Do not rename %@. It should end with .m.", fullPath.lastPathComponent);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCEncodedImage_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCEncodedImage_xctest.mm
new file mode 100644
index 0000000000..84804fee87
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCEncodedImage_xctest.mm
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/peerconnection/RTCEncodedImage+Private.h"
+
+#import <XCTest/XCTest.h>
+
+@interface RTCEncodedImageTests : XCTestCase
+@end
+
+@implementation RTCEncodedImageTests
+
+- (void)testInitializedWithNativeEncodedImage {
+ const auto encoded_data = webrtc::EncodedImageBuffer::Create();
+ webrtc::EncodedImage encoded_image;
+ encoded_image.SetEncodedData(encoded_data);
+
+ RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
+ [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
+
+ XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), encoded_data);
+}
+
+- (void)testInitWithNSData {
+ NSData *bufferData = [NSData data];
+ RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
+ encodedImage.buffer = bufferData;
+
+ webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
+ XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr);
+ XCTAssertEqual(result_encoded_image.GetEncodedData()->data(), bufferData.bytes);
+}
+
+- (void)testRetainsNativeEncodedImage {
+ RTC_OBJC_TYPE(RTCEncodedImage) * encodedImage;
+ {
+ const auto encoded_data = webrtc::EncodedImageBuffer::Create();
+ webrtc::EncodedImage encoded_image;
+ encoded_image.SetEncodedData(encoded_data);
+ encodedImage =
+ [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
+ }
+ webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
+ XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr);
+ XCTAssertTrue(result_encoded_image.GetEncodedData()->data() != nullptr);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm
new file mode 100644
index 0000000000..2407c88c1a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/capturer/RTCFileVideoCapturer.h"
+
+#import <XCTest/XCTest.h>
+
+#include "rtc_base/gunit.h"
+
+NSString *const kTestFileName = @"foreman.mp4";
+static const int kTestTimeoutMs = 5 * 1000; // 5secs.
+
+@interface MockCapturerDelegate : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
+
+@property(nonatomic, assign) NSInteger capturedFramesCount;
+
+@end
+
+@implementation MockCapturerDelegate
+@synthesize capturedFramesCount = _capturedFramesCount;
+
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+ didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ self.capturedFramesCount++;
+}
+
+@end
+
+NS_CLASS_AVAILABLE_IOS(10)
+@interface RTCFileVideoCapturerTests : XCTestCase
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * capturer;
+@property(nonatomic, strong) MockCapturerDelegate *mockDelegate;
+
+@end
+
+@implementation RTCFileVideoCapturerTests
+@synthesize capturer = _capturer;
+@synthesize mockDelegate = _mockDelegate;
+
+- (void)setUp {
+ self.mockDelegate = [[MockCapturerDelegate alloc] init];
+ self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:self.mockDelegate];
+}
+
+- (void)tearDown {
+ self.capturer = nil;
+ self.mockDelegate = nil;
+}
+
+- (void)testCaptureWhenFileNotInBundle {
+ __block BOOL errorOccured = NO;
+
+ RTCFileVideoCapturerErrorBlock errorBlock = ^void(NSError *error) {
+ errorOccured = YES;
+ };
+
+ [self.capturer startCapturingFromFileNamed:@"not_in_bundle.mov" onError:errorBlock];
+ ASSERT_TRUE_WAIT(errorOccured, kTestTimeoutMs);
+}
+
+- (void)testSecondStartCaptureCallFails {
+ __block BOOL secondError = NO;
+
+ RTCFileVideoCapturerErrorBlock firstErrorBlock = ^void(NSError *error) {
+ // This block should never be called.
+ NSLog(@"Error: %@", [error userInfo]);
+ ASSERT_TRUE(false);
+ };
+
+ RTCFileVideoCapturerErrorBlock secondErrorBlock = ^void(NSError *error) {
+ secondError = YES;
+ };
+
+ [self.capturer startCapturingFromFileNamed:kTestFileName onError:firstErrorBlock];
+ [self.capturer startCapturingFromFileNamed:kTestFileName onError:secondErrorBlock];
+
+ ASSERT_TRUE_WAIT(secondError, kTestTimeoutMs);
+}
+
+- (void)testStartStopCapturer {
+#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 10, *)) {
+ [self.capturer startCapturingFromFileNamed:kTestFileName onError:nil];
+
+ __block BOOL done = NO;
+ __block NSInteger capturedFrames = -1;
+ NSInteger capturedFramesAfterStop = -1;
+
+ // We're dispatching the `stopCapture` with delay to ensure the capturer has
+ // had the chance to capture several frames.
+ dispatch_time_t captureDelay = dispatch_time(DISPATCH_TIME_NOW, 2 * NSEC_PER_SEC); // 2secs.
+ dispatch_after(captureDelay, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ capturedFrames = self.mockDelegate.capturedFramesCount;
+ [self.capturer stopCapture];
+ done = YES;
+ });
+ WAIT(done, kTestTimeoutMs);
+
+ capturedFramesAfterStop = self.mockDelegate.capturedFramesCount;
+ ASSERT_TRUE(capturedFrames != -1);
+ ASSERT_EQ(capturedFrames, capturedFramesAfterStop);
+ }
+#endif
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m
new file mode 100644
index 0000000000..ec9dc41796
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/video_codec/RTCH264ProfileLevelId.h"
+
+#import <XCTest/XCTest.h>
+
+@interface RTCH264ProfileLevelIdTests : XCTestCase
+
+@end
+
+static NSString *level31ConstrainedHigh = @"640c1f";
+static NSString *level31ConstrainedBaseline = @"42e01f";
+
+@implementation RTCH264ProfileLevelIdTests
+
+- (void)testInitWithString {
+ RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
+ [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedHigh];
+ XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh);
+ XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
+
+ profileLevelId =
+ [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedBaseline];
+ XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline);
+ XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
+}
+
+- (void)testInitWithProfileAndLevel {
+ RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
+ [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithProfile:RTCH264ProfileConstrainedHigh
+ level:RTCH264Level3_1];
+ XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh);
+
+ profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
+ initWithProfile:RTCH264ProfileConstrainedBaseline
+ level:RTCH264Level3_1];
+ XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedBaseline);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCIceCandidateTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCIceCandidateTest.mm
new file mode 100644
index 0000000000..576411985d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCIceCandidateTest.mm
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <memory>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCIceCandidate+Private.h"
+#import "api/peerconnection/RTCIceCandidate.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCIceCandidateTest : XCTestCase
+@end
+
+@implementation RTCIceCandidateTest
+
+- (void)testCandidate {
+ NSString *sdp = @"candidate:4025901590 1 udp 2122265343 "
+ "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
+ "59052 typ host generation 0";
+
+ RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
+ [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp sdpMLineIndex:0 sdpMid:@"audio"];
+
+ std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate =
+ candidate.nativeCandidate;
+ EXPECT_EQ("audio", nativeCandidate->sdp_mid());
+ EXPECT_EQ(0, nativeCandidate->sdp_mline_index());
+
+ std::string sdpString;
+ nativeCandidate->ToString(&sdpString);
+ EXPECT_EQ(sdp.stdString, sdpString);
+}
+
+- (void)testInitFromNativeCandidate {
+ std::string sdp("candidate:4025901590 1 udp 2122265343 "
+ "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
+ "59052 typ host generation 0");
+ webrtc::IceCandidateInterface *nativeCandidate =
+ webrtc::CreateIceCandidate("audio", 0, sdp, nullptr);
+
+ RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate =
+ [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:nativeCandidate];
+ EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]);
+ EXPECT_EQ(0, iceCandidate.sdpMLineIndex);
+
+ EXPECT_EQ(sdp, iceCandidate.sdp.stdString);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCIceServerTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCIceServerTest.mm
new file mode 100644
index 0000000000..772653c4dc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCIceServerTest.mm
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCIceServer+Private.h"
+#import "api/peerconnection/RTCIceServer.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCIceServerTest : XCTestCase
+@end
+
+@implementation RTCIceServerTest
+
+- (void)testOneURLServer {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"stun:stun1.example.net" ]];
+
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("stun:stun1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("", iceStruct.username);
+ EXPECT_EQ("", iceStruct.password);
+}
+
+- (void)testTwoURLServer {
+ RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
+ initWithURLStrings:@[ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
+
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(2u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("turn2:turn2.example.net", iceStruct.urls.back());
+ EXPECT_EQ("", iceStruct.username);
+ EXPECT_EQ("", iceStruct.password);
+}
+
+- (void)testPasswordCredential {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+}
+
+- (void)testHostname {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"
+ tlsCertPolicy:RTCTlsCertPolicySecure
+ hostname:@"hostname"];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+ EXPECT_EQ("hostname", iceStruct.hostname);
+}
+
+- (void)testTlsAlpnProtocols {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"
+ tlsCertPolicy:RTCTlsCertPolicySecure
+ hostname:@"hostname"
+ tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+ EXPECT_EQ("hostname", iceStruct.hostname);
+ EXPECT_EQ(2u, iceStruct.tls_alpn_protocols.size());
+}
+
+- (void)testTlsEllipticCurves {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"
+ tlsCertPolicy:RTCTlsCertPolicySecure
+ hostname:@"hostname"
+ tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
+ tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+ EXPECT_EQ("hostname", iceStruct.hostname);
+ EXPECT_EQ(2u, iceStruct.tls_alpn_protocols.size());
+ EXPECT_EQ(2u, iceStruct.tls_elliptic_curves.size());
+}
+
+- (void)testInitFromNativeServer {
+ webrtc::PeerConnectionInterface::IceServer nativeServer;
+ nativeServer.username = "username";
+ nativeServer.password = "password";
+ nativeServer.urls.push_back("stun:stun.example.net");
+ nativeServer.hostname = "hostname";
+ nativeServer.tls_alpn_protocols.push_back("proto1");
+ nativeServer.tls_alpn_protocols.push_back("proto2");
+ nativeServer.tls_elliptic_curves.push_back("curve1");
+ nativeServer.tls_elliptic_curves.push_back("curve2");
+
+ RTC_OBJC_TYPE(RTCIceServer) *iceServer =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:nativeServer];
+ EXPECT_EQ(1u, iceServer.urlStrings.count);
+ EXPECT_EQ("stun:stun.example.net",
+ [NSString stdStringForString:iceServer.urlStrings.firstObject]);
+ EXPECT_EQ("username", [NSString stdStringForString:iceServer.username]);
+ EXPECT_EQ("password", [NSString stdStringForString:iceServer.credential]);
+ EXPECT_EQ("hostname", [NSString stdStringForString:iceServer.hostname]);
+ EXPECT_EQ(2u, iceServer.tlsAlpnProtocols.count);
+ EXPECT_EQ(2u, iceServer.tlsEllipticCurves.count);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCMTLVideoView_xctest.m
new file mode 100644
index 0000000000..f152eeec91
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCMTLVideoView_xctest.m
@@ -0,0 +1,298 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#import <Foundation/Foundation.h>
+#import <MetalKit/MetalKit.h>
+#import <OCMock/OCMock.h>
+
+#import "components/renderer/metal/RTCMTLVideoView.h"
+
+#import "api/video_frame_buffer/RTCNativeI420Buffer.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/renderer/metal/RTCMTLNV12Renderer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+static size_t kBufferWidth = 200;
+static size_t kBufferHeight = 200;
+
+// Extension of RTC_OBJC_TYPE(RTCMTLVideoView) for testing purposes.
+@interface RTC_OBJC_TYPE (RTCMTLVideoView)
+(Testing)
+
+ @property(nonatomic, readonly) MTKView *metalView;
+
++ (BOOL)isMetalAvailable;
++ (UIView *)createMetalView:(CGRect)frame;
++ (id<RTCMTLRenderer>)createNV12Renderer;
++ (id<RTCMTLRenderer>)createI420Renderer;
+- (void)drawInMTKView:(id)view;
+@end
+
+@interface RTCMTLVideoViewTests : XCTestCase
+@property(nonatomic, strong) id classMock;
+@property(nonatomic, strong) id rendererNV12Mock;
+@property(nonatomic, strong) id rendererI420Mock;
+@property(nonatomic, strong) id frameMock;
+@end
+
+@implementation RTCMTLVideoViewTests
+
+@synthesize classMock = _classMock;
+@synthesize rendererNV12Mock = _rendererNV12Mock;
+@synthesize rendererI420Mock = _rendererI420Mock;
+@synthesize frameMock = _frameMock;
+
+- (void)setUp {
+ self.classMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLVideoView) class]);
+ [self startMockingNilView];
+}
+
+- (void)tearDown {
+ [self.classMock stopMocking];
+ [self.rendererI420Mock stopMocking];
+ [self.rendererNV12Mock stopMocking];
+ [self.frameMock stopMocking];
+ self.classMock = nil;
+ self.rendererI420Mock = nil;
+ self.rendererNV12Mock = nil;
+ self.frameMock = nil;
+}
+
+- (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer {
+ id frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
+ if (hasCVPixelBuffer) {
+ CVPixelBufferRef pixelBufferRef;
+ CVPixelBufferCreate(kCFAllocatorDefault,
+ kBufferWidth,
+ kBufferHeight,
+ kCVPixelFormatType_420YpCbCr8Planar,
+ nil,
+ &pixelBufferRef);
+ OCMStub([frameMock buffer])
+ .andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]);
+ } else {
+ OCMStub([frameMock buffer])
+ .andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithWidth:kBufferWidth
+ height:kBufferHeight]);
+ }
+ OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) width]).andReturn(kBufferWidth);
+ OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) height]).andReturn(kBufferHeight);
+ OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX));
+ return frameMock;
+}
+
+- (id)rendererMockWithSuccessfulSetup:(BOOL)success {
+ id rendererMock = OCMClassMock([RTCMTLRenderer class]);
+ OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(success);
+ return rendererMock;
+}
+
+- (void)startMockingNilView {
+ // Use OCMock 2 syntax here until OCMock is upgraded to 3.4
+ [[[self.classMock stub] andReturn:nil] createMetalView:CGRectZero];
+}
+
+#pragma mark - Test cases
+
+- (void)testInitAssertsIfMetalUnavailabe {
+ // given
+ OCMStub([self.classMock isMetalAvailable]).andReturn(NO);
+
+ // when
+ BOOL asserts = NO;
+ @try {
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
+ (void)realView;
+ } @catch (NSException *ex) {
+ asserts = YES;
+ }
+
+ XCTAssertTrue(asserts);
+}
+
+- (void)testRTCVideoRenderNilFrameCallback {
+ // given
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
+
+ [[self.frameMock reject] buffer];
+ [[self.classMock reject] createNV12Renderer];
+ [[self.classMock reject] createI420Renderer];
+
+ // when
+ [realView renderFrame:nil];
+ [realView drawInMTKView:realView.metalView];
+
+ // then
+ [self.frameMock verify];
+ [self.classMock verify];
+}
+
+- (void)testRTCVideoRenderFrameCallbackI420 {
+ // given
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererI420Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:NO];
+
+ OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
+ [[self.classMock reject] createNV12Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+
+ // when
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ // then
+ [self.rendererI420Mock verify];
+ [self.classMock verify];
+}
+
+- (void)testRTCVideoRenderFrameCallbackNV12 {
+ // given
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+ [[self.classMock reject] createI420Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+
+ // when
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ // then
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+}
+
+- (void)testRTCVideoRenderWorksAfterReconstruction {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+ [[self.classMock reject] createI420Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+
+ // Recreate view.
+ realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ // View hould reinit renderer.
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+}
+
+- (void)testDontRedrawOldFrame {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+ [[self.classMock reject] createI420Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+
+ [[self.rendererNV12Mock reject] drawFrame:[OCMArg any]];
+
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ [self.rendererNV12Mock verify];
+}
+
+- (void)testDoDrawNewFrame {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+ [[self.classMock reject] createI420Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+
+ // Get new frame.
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ [self.rendererNV12Mock verify];
+}
+
+- (void)testReportsSizeChangesToDelegate {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+
+ id delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate)));
+ CGSize size = CGSizeMake(640, 480);
+ OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]);
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ realView.delegate = delegateMock;
+ [realView setSize:size];
+
+ // Delegate method is invoked with a dispatch_async.
+ OCMVerifyAllWithDelay(delegateMock, 1);
+}
+
+- (void)testSetContentMode {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ id metalKitView = OCMClassMock([MTKView class]);
+ [[[[self.classMock stub] ignoringNonObjectArgs] andReturn:metalKitView]
+ createMetalView:CGRectZero];
+ OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]);
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init];
+ [realView setVideoContentMode:UIViewContentModeScaleAspectFill];
+
+ OCMVerify(metalKitView);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCMediaConstraintsTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCMediaConstraintsTest.mm
new file mode 100644
index 0000000000..6ed7859ba1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCMediaConstraintsTest.mm
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <memory>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCMediaConstraints+Private.h"
+#import "api/peerconnection/RTCMediaConstraints.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCMediaConstraintsTests : XCTestCase
+@end
+
+@implementation RTCMediaConstraintsTests
+
+- (void)testMediaConstraints {
+ NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"};
+ NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"};
+
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatory
+ optionalConstraints:optional];
+ std::unique_ptr<webrtc::MediaConstraints> nativeConstraints =
+ [constraints nativeConstraints];
+
+ webrtc::MediaConstraints::Constraints nativeMandatory = nativeConstraints->GetMandatory();
+ [self expectConstraints:mandatory inNativeConstraints:nativeMandatory];
+
+ webrtc::MediaConstraints::Constraints nativeOptional = nativeConstraints->GetOptional();
+ [self expectConstraints:optional inNativeConstraints:nativeOptional];
+}
+
+- (void)expectConstraints:(NSDictionary *)constraints
+ inNativeConstraints:(webrtc::MediaConstraints::Constraints)nativeConstraints {
+ EXPECT_EQ(constraints.count, nativeConstraints.size());
+
+ for (NSString *key in constraints) {
+ NSString *value = [constraints objectForKey:key];
+
+ std::string nativeValue;
+ bool found = nativeConstraints.FindFirst(key.stdString, &nativeValue);
+ EXPECT_TRUE(found);
+ EXPECT_EQ(value.stdString, nativeValue);
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCNV12TextureCache_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCNV12TextureCache_xctest.m
new file mode 100644
index 0000000000..7bdc538f67
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCNV12TextureCache_xctest.m
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <CoreVideo/CoreVideo.h>
+#import <Foundation/Foundation.h>
+#import <GLKit/GLKit.h>
+#import <XCTest/XCTest.h>
+
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/renderer/opengl/RTCNV12TextureCache.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+@interface RTCNV12TextureCacheTests : XCTestCase
+@end
+
+@implementation RTCNV12TextureCacheTests {
+ EAGLContext *_glContext;
+ RTCNV12TextureCache *_nv12TextureCache;
+}
+
+- (void)setUp {
+ [super setUp];
+ _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
+ if (!_glContext) {
+ _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ }
+ _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
+}
+
+- (void)tearDown {
+ _nv12TextureCache = nil;
+ _glContext = nil;
+ [super tearDown];
+}
+
+- (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame {
+ CVPixelBufferRef nullPixelBuffer = NULL;
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:nullPixelBuffer];
+ RTC_OBJC_TYPE(RTCVideoFrame) *badFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+ [_nv12TextureCache uploadFrameToTextures:badFrame];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
new file mode 100644
index 0000000000..5ba5a52a53
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+#import <OCMock/OCMock.h>
+#ifdef __cplusplus
+}
+#endif
+#import "api/peerconnection/RTCPeerConnectionFactory+Native.h"
+#import "api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h"
+#import "api/peerconnection/RTCPeerConnectionFactoryBuilder.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+#include "rtc_base/gunit.h"
+#include "rtc_base/system/unused.h"
+
+@interface RTCPeerConnectionFactoryBuilderTests : XCTestCase
+@end
+
+@implementation RTCPeerConnectionFactoryBuilderTests
+
+- (void)testBuilder {
+ id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
+ OCMExpect([factoryMock alloc]).andReturn(factoryMock);
+ RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] ignoringNonObjectArgs]
+ initWithNativeAudioEncoderFactory:nullptr
+ nativeAudioDecoderFactory:nullptr
+ nativeVideoEncoderFactory:nullptr
+ nativeVideoDecoderFactory:nullptr
+ audioDeviceModule:nullptr
+ audioProcessingModule:nullptr]);
+ RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
+ [builder createPeerConnectionFactory];
+ EXPECT_TRUE(peerConnectionFactory != nil);
+ OCMVerifyAll(factoryMock);
+}
+
+- (void)testDefaultComponentsBuilder {
+ id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
+ OCMExpect([factoryMock alloc]).andReturn(factoryMock);
+ RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] ignoringNonObjectArgs]
+ initWithNativeAudioEncoderFactory:nullptr
+ nativeAudioDecoderFactory:nullptr
+ nativeVideoEncoderFactory:nullptr
+ nativeVideoDecoderFactory:nullptr
+ audioDeviceModule:nullptr
+ audioProcessingModule:nullptr]);
+ RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
+ [builder createPeerConnectionFactory];
+ EXPECT_TRUE(peerConnectionFactory != nil);
+ OCMVerifyAll(factoryMock);
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m
new file mode 100644
index 0000000000..0c5a96cd2f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m
@@ -0,0 +1,381 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/peerconnection/RTCAudioSource.h"
+#import "api/peerconnection/RTCConfiguration.h"
+#import "api/peerconnection/RTCDataChannel.h"
+#import "api/peerconnection/RTCDataChannelConfiguration.h"
+#import "api/peerconnection/RTCMediaConstraints.h"
+#import "api/peerconnection/RTCMediaStreamTrack.h"
+#import "api/peerconnection/RTCPeerConnection.h"
+#import "api/peerconnection/RTCPeerConnectionFactory.h"
+#import "api/peerconnection/RTCRtpReceiver.h"
+#import "api/peerconnection/RTCRtpSender.h"
+#import "api/peerconnection/RTCRtpTransceiver.h"
+#import "api/peerconnection/RTCSessionDescription.h"
+#import "api/peerconnection/RTCVideoSource.h"
+#import "rtc_base/system/unused.h"
+
+#import <XCTest/XCTest.h>
+
+@interface RTCPeerConnectionFactoryTests : XCTestCase
+@end
+
+@implementation RTCPeerConnectionFactoryTests
+
+- (void)testPeerConnectionLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ [peerConnection close];
+ factory = nil;
+ }
+ peerConnection = nil;
+ }
+
+ XCTAssertTrue(true, @"Expect test does not crash");
+}
+
+- (void)testMediaStreamLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCMediaStream) * mediaStream;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ mediaStream = [factory mediaStreamWithStreamId:@"mediaStream"];
+ factory = nil;
+ }
+ mediaStream = nil;
+ RTC_UNUSED(mediaStream);
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testDataChannelLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
+ [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+ RTC_OBJC_TYPE(RTCDataChannel) * dataChannel;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ dataChannel =
+ [peerConnection dataChannelForLabel:@"test_channel" configuration:dataChannelConfig];
+ XCTAssertNotNil(dataChannel);
+ [peerConnection close];
+ peerConnection = nil;
+ factory = nil;
+ }
+ dataChannel = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testRTCRtpTransceiverLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
+ [[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+ RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
+ tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio init:init];
+ XCTAssertNotNil(tranceiver);
+ [peerConnection close];
+ peerConnection = nil;
+ factory = nil;
+ }
+ tranceiver = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+// TODO(crbug.com/webrtc/13989): Remove call to CreateSender in senderWithKind.
+#if !TARGET_IPHONE_SIMULATOR
+- (void)testRTCRtpSenderLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+ RTC_OBJC_TYPE(RTCRtpSender) * sender;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"];
+ XCTAssertNotNil(sender);
+ [peerConnection close];
+ peerConnection = nil;
+ factory = nil;
+ }
+ sender = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testRTCRtpReceiverLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
+ RTC_OBJC_TYPE(RTCPeerConnection) * pc2;
+
+ NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers1;
+ NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers2;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ [pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
+
+ pc2 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ [pc2 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
+
+ NSTimeInterval negotiationTimeout = 15;
+ XCTAssertTrue([self negotiatePeerConnection:pc1
+ withPeerConnection:pc2
+ negotiationTimeout:negotiationTimeout]);
+
+ XCTAssertEqual(pc1.signalingState, RTCSignalingStateStable);
+ XCTAssertEqual(pc2.signalingState, RTCSignalingStateStable);
+
+ receivers1 = pc1.receivers;
+ receivers2 = pc2.receivers;
+ XCTAssertTrue(receivers1.count > 0);
+ XCTAssertTrue(receivers2.count > 0);
+ [pc1 close];
+ [pc2 close];
+ pc1 = nil;
+ pc2 = nil;
+ factory = nil;
+ }
+ receivers1 = nil;
+ receivers2 = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+#endif
+
+- (void)testAudioSourceLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCAudioSource) * audioSource;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ audioSource = [factory audioSourceWithConstraints:nil];
+ XCTAssertNotNil(audioSource);
+ factory = nil;
+ }
+ audioSource = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testVideoSourceLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCVideoSource) * videoSource;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ videoSource = [factory videoSource];
+ XCTAssertNotNil(videoSource);
+ factory = nil;
+ }
+ videoSource = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testAudioTrackLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCAudioTrack) * audioTrack;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ audioTrack = [factory audioTrackWithTrackId:@"audioTrack"];
+ XCTAssertNotNil(audioTrack);
+ factory = nil;
+ }
+ audioTrack = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testVideoTrackLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCVideoTrack) * videoTrack;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ videoTrack = [factory videoTrackWithSource:[factory videoSource] trackId:@"videoTrack"];
+ XCTAssertNotNil(videoTrack);
+ factory = nil;
+ }
+ videoTrack = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testRollback {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
+ kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
+ }
+ optionalConstraints:nil];
+
+ __block RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ __block RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
+ RTCSessionDescription *rollback = [[RTCSessionDescription alloc] initWithType:RTCSdpTypeRollback
+ sdp:@""];
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
+ [pc1 offerForConstraints:constraints
+ completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
+ XCTAssertNil(error);
+ XCTAssertNotNil(offer);
+
+ __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
+ [pc1 setLocalDescription:offer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ [weakPC1 setLocalDescription:rollback
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ }];
+ }];
+ NSTimeInterval negotiationTimeout = 15;
+ dispatch_semaphore_wait(
+ negotiatedSem,
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(negotiationTimeout * NSEC_PER_SEC)));
+
+ XCTAssertEqual(pc1.signalingState, RTCSignalingStateStable);
+
+ [pc1 close];
+ pc1 = nil;
+ factory = nil;
+ }];
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+ }
+}
+
+- (bool)negotiatePeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc1
+ withPeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc2
+ negotiationTimeout:(NSTimeInterval)timeout {
+ __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
+ __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
+ kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
+ }
+ optionalConstraints:nil];
+
+ dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
+ [weakPC1 offerForConstraints:sdpConstraints
+ completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
+ XCTAssertNil(error);
+ XCTAssertNotNil(offer);
+ [weakPC1
+ setLocalDescription:offer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ [weakPC2
+ setRemoteDescription:offer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ [weakPC2
+ answerForConstraints:sdpConstraints
+ completionHandler:^(
+ RTC_OBJC_TYPE(RTCSessionDescription) * answer,
+ NSError * error) {
+ XCTAssertNil(error);
+ XCTAssertNotNil(answer);
+ [weakPC2
+ setLocalDescription:answer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ [weakPC1
+ setRemoteDescription:answer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ dispatch_semaphore_signal(negotiatedSem);
+ }];
+ }];
+ }];
+ }];
+ }];
+ }];
+
+ return 0 ==
+ dispatch_semaphore_wait(negotiatedSem,
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC)));
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionTest.mm
new file mode 100644
index 0000000000..9ca8403559
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionTest.mm
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <memory>
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCConfiguration+Private.h"
+#import "api/peerconnection/RTCConfiguration.h"
+#import "api/peerconnection/RTCCryptoOptions.h"
+#import "api/peerconnection/RTCIceCandidate.h"
+#import "api/peerconnection/RTCIceServer.h"
+#import "api/peerconnection/RTCMediaConstraints.h"
+#import "api/peerconnection/RTCPeerConnection.h"
+#import "api/peerconnection/RTCPeerConnectionFactory+Native.h"
+#import "api/peerconnection/RTCPeerConnectionFactory.h"
+#import "api/peerconnection/RTCSessionDescription.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCPeerConnectionTests : XCTestCase
+@end
+
+@implementation RTCPeerConnectionTests
+
+- (void)testConfigurationGetter {
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ config.iceServers = @[ server ];
+ config.iceTransportPolicy = RTCIceTransportPolicyRelay;
+ config.bundlePolicy = RTCBundlePolicyMaxBundle;
+ config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
+ config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
+ config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
+ const int maxPackets = 60;
+ const int timeout = 1500;
+ const int interval = 2000;
+ config.audioJitterBufferMaxPackets = maxPackets;
+ config.audioJitterBufferFastAccelerate = YES;
+ config.iceConnectionReceivingTimeout = timeout;
+ config.iceBackupCandidatePairPingInterval = interval;
+ config.continualGatheringPolicy =
+ RTCContinualGatheringPolicyGatherContinually;
+ config.shouldPruneTurnPorts = YES;
+ config.activeResetSrtpParams = YES;
+ config.cryptoOptions =
+ [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+ srtpEnableAes128Sha1_32CryptoCipher:YES
+ srtpEnableEncryptedRtpHeaderExtensions:NO
+ sframeRequireFrameEncryption:NO];
+
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ RTC_OBJC_TYPE(RTCConfiguration) * newConfig;
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
+ newConfig = peerConnection.configuration;
+
+ EXPECT_TRUE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:100000]
+ currentBitrateBps:[NSNumber numberWithInt:5000000]
+ maxBitrateBps:[NSNumber numberWithInt:500000000]]);
+ EXPECT_FALSE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:2]
+ currentBitrateBps:[NSNumber numberWithInt:1]
+ maxBitrateBps:nil]);
+ }
+
+ EXPECT_EQ([config.iceServers count], [newConfig.iceServers count]);
+ RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
+ RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
+ std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
+ std::string url = newServer.urlStrings.firstObject.UTF8String;
+ EXPECT_EQ(origUrl, url);
+
+ EXPECT_EQ(config.iceTransportPolicy, newConfig.iceTransportPolicy);
+ EXPECT_EQ(config.bundlePolicy, newConfig.bundlePolicy);
+ EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy);
+ EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy);
+ EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy);
+ EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets);
+ EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate);
+ EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout);
+ EXPECT_EQ(config.iceBackupCandidatePairPingInterval,
+ newConfig.iceBackupCandidatePairPingInterval);
+ EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy);
+ EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts);
+ EXPECT_EQ(config.activeResetSrtpParams, newConfig.activeResetSrtpParams);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableGcmCryptoSuites,
+ newConfig.cryptoOptions.srtpEnableGcmCryptoSuites);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher,
+ newConfig.cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions,
+ newConfig.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions);
+ EXPECT_EQ(config.cryptoOptions.sframeRequireFrameEncryption,
+ newConfig.cryptoOptions.sframeRequireFrameEncryption);
+}
+
+- (void)testWithDependencies {
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ config.iceServers = @[ server ];
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ std::unique_ptr<webrtc::PeerConnectionDependencies> pc_dependencies =
+ std::make_unique<webrtc::PeerConnectionDependencies>(nullptr);
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithDependencies:config
+ constraints:contraints
+ dependencies:std::move(pc_dependencies)
+ delegate:nil];
+ ASSERT_NE(peerConnection, nil);
+ }
+}
+
+- (void)testWithInvalidSDP {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
+
+ dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
+ [peerConnection setRemoteDescription:[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
+ initWithType:RTCSdpTypeOffer
+ sdp:@"invalid"]
+ completionHandler:^(NSError *error) {
+ ASSERT_NE(error, nil);
+ if (error != nil) {
+ dispatch_semaphore_signal(negotiatedSem);
+ }
+ }];
+
+ NSTimeInterval timeout = 5;
+ ASSERT_EQ(
+ 0,
+ dispatch_semaphore_wait(negotiatedSem,
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC))));
+ [peerConnection close];
+}
+
+- (void)testWithInvalidIceCandidate {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
+
+ dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
+ [peerConnection addIceCandidate:[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:@"invalid"
+ sdpMLineIndex:-1
+ sdpMid:nil]
+ completionHandler:^(NSError *error) {
+ ASSERT_NE(error, nil);
+ if (error != nil) {
+ dispatch_semaphore_signal(negotiatedSem);
+ }
+ }];
+
+ NSTimeInterval timeout = 5;
+ ASSERT_EQ(
+ 0,
+ dispatch_semaphore_wait(negotiatedSem,
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC))));
+ [peerConnection close];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCSessionDescriptionTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCSessionDescriptionTest.mm
new file mode 100644
index 0000000000..70c82f78ce
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCSessionDescriptionTest.mm
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCSessionDescription+Private.h"
+#import "api/peerconnection/RTCSessionDescription.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCSessionDescriptionTests : XCTestCase
+@end
+
+@implementation RTCSessionDescriptionTests
+
+/**
+ * Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a native
+ * SessionDescriptionInterface (based on the types and SDP strings being equal).
+ */
+- (void)testSessionDescriptionConversion {
+ RTC_OBJC_TYPE(RTCSessionDescription) *description =
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:[self sdp]];
+
+ std::unique_ptr<webrtc::SessionDescriptionInterface> nativeDescription =
+ description.nativeDescription;
+
+ EXPECT_EQ(RTCSdpTypeAnswer,
+ [RTC_OBJC_TYPE(RTCSessionDescription) typeForStdString:nativeDescription->type()]);
+
+ std::string sdp;
+ nativeDescription->ToString(&sdp);
+ EXPECT_EQ([self sdp].stdString, sdp);
+}
+
+- (void)testInitFromNativeSessionDescription {
+ webrtc::SessionDescriptionInterface *nativeDescription;
+
+ nativeDescription = webrtc::CreateSessionDescription(
+ webrtc::SessionDescriptionInterface::kAnswer,
+ [self sdp].stdString,
+ nullptr);
+
+ RTC_OBJC_TYPE(RTCSessionDescription) *description =
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:nativeDescription];
+ EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer,
+ [RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]);
+ EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]);
+}
+
+- (NSString *)sdp {
+ return @"v=0\r\n"
+ "o=- 5319989746393411314 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE audio video\r\n"
+ "a=msid-semantic: WMS ARDAMS\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111 103 9 0 8 126\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:f3o+0HG7l9nwIWFY\r\n"
+ "a=ice-pwd:VDctmJNCptR2TB7+meDpw7w5\r\n"
+ "a=fingerprint:sha-256 A9:D5:8D:A8:69:22:39:60:92:AD:94:1A:22:2D:5E:"
+ "A5:4A:A9:18:C2:35:5D:46:5E:59:BD:1C:AF:38:9F:E6:E1\r\n"
+ "a=setup:active\r\n"
+ "a=mid:audio\r\n"
+ "a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\n"
+ "a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/"
+ "abs-send-time\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=fmtp:111 minptime=10;useinbandfec=1\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:9 G722/8000\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=maxptime:60\r\n"
+ "a=ssrc:1504474588 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:1504474588 msid:ARDAMS ARDAMSa0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 100 116 117 96\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:f3o+0HG7l9nwIWFY\r\n"
+ "a=ice-pwd:VDctmJNCptR2TB7+meDpw7w5\r\n"
+ "a=fingerprint:sha-256 A9:D5:8D:A8:69:22:39:60:92:AD:94:1A:22:2D:5E:"
+ "A5:4A:A9:18:C2:35:5D:46:5E:59:BD:1C:AF:38:9F:E6:E1\r\n"
+ "a=setup:active\r\n"
+ "a=mid:video\r\n"
+ "a=extmap:2 urn:ietf:params:rtp-hdrext:toffset\r\n"
+ "a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/"
+ "abs-send-time\r\n"
+ "a=extmap:4 urn:3gpp:video-orientation\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:100 VP8/90000\r\n"
+ "a=rtcp-fb:100 ccm fir\r\n"
+ "a=rtcp-fb:100 goog-lntf\r\n"
+ "a=rtcp-fb:100 nack\r\n"
+ "a=rtcp-fb:100 nack pli\r\n"
+ "a=rtcp-fb:100 goog-remb\r\n"
+ "a=rtpmap:116 red/90000\r\n"
+ "a=rtpmap:117 ulpfec/90000\r\n"
+ "a=rtpmap:96 rtx/90000\r\n"
+ "a=fmtp:96 apt=100\r\n"
+ "a=ssrc-group:FID 498297514 1644357692\r\n"
+ "a=ssrc:498297514 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:498297514 msid:ARDAMS ARDAMSv0\r\n"
+ "a=ssrc:1644357692 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:1644357692 msid:ARDAMS ARDAMSv0\r\n";
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCTracingTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCTracingTest.mm
new file mode 100644
index 0000000000..ff93047bdf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCTracingTest.mm
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCTracing.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCTracingTests : XCTestCase
+@end
+
+@implementation RTCTracingTests
+
+- (NSString *)documentsFilePathForFileName:(NSString *)fileName {
+ NSParameterAssert(fileName.length);
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
+ NSString *documentsDirPath = paths.firstObject;
+ NSString *filePath =
+ [documentsDirPath stringByAppendingPathComponent:fileName];
+ return filePath;
+}
+
+- (void)testTracingTestNoInitialization {
+ NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"];
+ EXPECT_EQ(NO, RTCStartInternalCapture(filePath));
+ RTCStopInternalCapture();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/audio_short16.pcm b/third_party/libwebrtc/sdk/objc/unittests/audio_short16.pcm
new file mode 100644
index 0000000000..15a0f1811c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/audio_short16.pcm
Binary files differ
diff --git a/third_party/libwebrtc/sdk/objc/unittests/audio_short44.pcm b/third_party/libwebrtc/sdk/objc/unittests/audio_short44.pcm
new file mode 100644
index 0000000000..011cdce959
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/audio_short44.pcm
Binary files differ
diff --git a/third_party/libwebrtc/sdk/objc/unittests/audio_short48.pcm b/third_party/libwebrtc/sdk/objc/unittests/audio_short48.pcm
new file mode 100644
index 0000000000..06fd8261cd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/audio_short48.pcm
Binary files differ
diff --git a/third_party/libwebrtc/sdk/objc/unittests/avformatmappertests.mm b/third_party/libwebrtc/sdk/objc/unittests/avformatmappertests.mm
new file mode 100644
index 0000000000..35e95a8c22
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/avformatmappertests.mm
@@ -0,0 +1,243 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+
+#include "rtc_base/gunit.h"
+
+// Width and height don't play any role so lets use predefined values throughout
+// the tests.
+static const int kFormatWidth = 789;
+static const int kFormatHeight = 987;
+
+// Hardcoded framrate to be used throughout the tests.
+static const int kFramerate = 30;
+
+// Same width and height is used so it's ok to expect same cricket::VideoFormat
+static cricket::VideoFormat expectedFormat =
+ cricket::VideoFormat(kFormatWidth,
+ kFormatHeight,
+ cricket::VideoFormat::FpsToInterval(kFramerate),
+ cricket::FOURCC_NV12);
+
+// Mock class for AVCaptureDeviceFormat.
+// Custom implementation needed because OCMock cannot handle the
+// CMVideoDescriptionRef mocking.
+@interface AVCaptureDeviceFormatMock : NSObject
+
+@property (nonatomic, assign) CMVideoFormatDescriptionRef format;
+@property (nonatomic, strong) OCMockObject *rangeMock;
+
+- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
+ minFps:(float)minFps
+ maxFps:(float)maxFps;
++ (instancetype)validFormat;
++ (instancetype)invalidFpsFormat;
++ (instancetype)invalidMediaSubtypeFormat;
+
+@end
+
+@implementation AVCaptureDeviceFormatMock
+
+@synthesize format = _format;
+@synthesize rangeMock = _rangeMock;
+
+- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
+ minFps:(float)minFps
+ maxFps:(float)maxFps {
+ if (self = [super init]) {
+ CMVideoFormatDescriptionCreate(nil, subtype, kFormatWidth, kFormatHeight,
+ nil, &_format);
+ // We can use OCMock for the range.
+ _rangeMock = [OCMockObject mockForClass:[AVFrameRateRange class]];
+ [[[_rangeMock stub] andReturnValue:@(minFps)] minFrameRate];
+ [[[_rangeMock stub] andReturnValue:@(maxFps)] maxFrameRate];
+ }
+
+ return self;
+}
+
++ (instancetype)validFormat {
+ AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ minFps:0.0
+ maxFps:30.0];
+ return instance;
+}
+
++ (instancetype)invalidFpsFormat {
+ AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ minFps:0.0
+ maxFps:22.0];
+ return instance;
+}
+
++ (instancetype)invalidMediaSubtypeFormat {
+ AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8Planar
+ minFps:0.0
+ maxFps:60.0];
+ return instance;
+}
+
+- (void)dealloc {
+ if (_format != nil) {
+ CFRelease(_format);
+ _format = nil;
+ }
+}
+
+// Redefinition of AVCaptureDevice methods we want to mock.
+- (CMVideoFormatDescriptionRef)formatDescription {
+ return self.format;
+}
+
+- (NSArray *)videoSupportedFrameRateRanges {
+ return @[ self.rangeMock ];
+}
+
+@end
+
+TEST(AVFormatMapperTest, SuportedCricketFormatsWithInvalidFramerateFormats) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+
+ // Valid media subtype, invalid framerate
+ AVCaptureDeviceFormatMock* mock =
+ [AVCaptureDeviceFormatMock invalidFpsFormat];
+ OCMStub([mockDevice formats]).andReturn(@[ mock ]);
+
+ // when
+ std::set<cricket::VideoFormat> result =
+ webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
+
+ // then
+ EXPECT_TRUE(result.empty());
+}
+
+TEST(AVFormatMapperTest, SuportedCricketFormatsWithInvalidFormats) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+
+ // Invalid media subtype, valid framerate
+ AVCaptureDeviceFormatMock* mock =
+ [AVCaptureDeviceFormatMock invalidMediaSubtypeFormat];
+ OCMStub([mockDevice formats]).andReturn(@[ mock ]);
+
+ // when
+ std::set<cricket::VideoFormat> result =
+ webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
+
+ // then
+ EXPECT_TRUE(result.empty());
+}
+
+TEST(AVFormatMapperTest, SuportedCricketFormats) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+
+ // valid media subtype, valid framerate
+ AVCaptureDeviceFormatMock* mock = [AVCaptureDeviceFormatMock validFormat];
+ OCMStub([mockDevice formats]).andReturn(@[ mock ]);
+
+ // when
+ std::set<cricket::VideoFormat> result =
+ webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
+
+ // then
+ EXPECT_EQ(1u, result.size());
+ // make sure the set has the expected format
+ EXPECT_EQ(expectedFormat, *result.begin());
+}
+
+TEST(AVFormatMapperTest, MediaSubtypePreference) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+
+ // valid media subtype, valid framerate
+ AVCaptureDeviceFormatMock* mockOne = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
+ minFps:0.0
+ maxFps:30.0];
+ // valid media subtype, valid framerate.
+ // This media subtype should be the preffered one.
+ AVCaptureDeviceFormatMock* mockTwo = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ minFps:0.0
+ maxFps:30.0];
+ OCMStub([mockDevice lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ OCMStub([mockDevice unlockForConfiguration]);
+ NSArray* array = @[ mockOne, mockTwo ];
+ OCMStub([mockDevice formats]).andReturn(array);
+
+ // to verify
+ OCMExpect([mockDevice setActiveFormat:(AVCaptureDeviceFormat*)mockTwo]);
+ OCMExpect(
+ [mockDevice setActiveVideoMinFrameDuration:CMTimeMake(1, kFramerate)]);
+
+ // when
+ bool resultFormat =
+ webrtc::SetFormatForCaptureDevice(mockDevice, nil, expectedFormat);
+
+ // then
+ EXPECT_TRUE(resultFormat);
+ [mockDevice verify];
+}
+
+TEST(AVFormatMapperTest, SetFormatWhenDeviceCannotLock) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ [[[mockDevice stub] andReturnValue:@(NO)]
+ lockForConfiguration:[OCMArg setTo:nil]];
+ [[[mockDevice stub] andReturn:@[]] formats];
+
+ // when
+ bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil,
+ cricket::VideoFormat());
+
+ // then
+ EXPECT_FALSE(resultFormat);
+}
+
+TEST(AVFormatMapperTest, SetFormatWhenFormatIsIncompatible) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice formats]).andReturn(@[]);
+ OCMStub([mockDevice lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ NSException* testException =
+ [NSException exceptionWithName:@"Test exception"
+ reason:@"Raised from unit tests"
+ userInfo:nil];
+ OCMStub([mockDevice setActiveFormat:[OCMArg any]]).andThrow(testException);
+ OCMExpect([mockDevice unlockForConfiguration]);
+
+ // when
+ bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil,
+ cricket::VideoFormat());
+
+ // then
+ EXPECT_FALSE(resultFormat);
+
+ // TODO(denicija): Remove try-catch when Chromium rolls this change:
+ // https://github.com/erikdoe/ocmock/commit/de1419415581dc307045e54bfe9c98c86efea96b
+ // Without it, stubbed exceptions are being re-raised on [mock verify].
+ // More information here:
+ //https://github.com/erikdoe/ocmock/issues/241
+ @try {
+ [mockDevice verify];
+ } @catch (NSException* exception) {
+ if ([exception.reason isEqual:testException.reason]) {
+ // Nothing dangerous here
+ EXPECT_TRUE([exception.reason isEqualToString:exception.reason]);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/objc/unittests/foreman.mp4 b/third_party/libwebrtc/sdk/objc/unittests/foreman.mp4
new file mode 100644
index 0000000000..ccffbf4722
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/foreman.mp4
Binary files differ
diff --git a/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.h b/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.h
new file mode 100644
index 0000000000..76c0d15c7e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#include "api/video/i420_buffer.h"
+
+void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer);
+
+rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width,
+ int height);
+
+void CopyI420BufferToCVPixelBuffer(
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
+ CVPixelBufferRef pixelBuffer);
diff --git a/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.mm b/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.mm
new file mode 100644
index 0000000000..98b86c54c0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.mm
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/unittests/frame_buffer_helpers.h"
+
+#include "third_party/libyuv/include/libyuv.h"
+
+void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer) {
+ CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ void* baseAddr = CVPixelBufferGetBaseAddress(pixelBuffer);
+ size_t width = CVPixelBufferGetWidth(pixelBuffer);
+ size_t height = CVPixelBufferGetHeight(pixelBuffer);
+ CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
+ int byteOrder = CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32ARGB ?
+ kCGBitmapByteOrder32Little :
+ 0;
+ CGContextRef cgContext = CGBitmapContextCreate(baseAddr,
+ width,
+ height,
+ 8,
+ CVPixelBufferGetBytesPerRow(pixelBuffer),
+ colorSpace,
+ byteOrder | kCGImageAlphaNoneSkipLast);
+
+ // Create a gradient
+ CGFloat colors[] = {
+ 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0,
+ };
+ CGGradientRef gradient = CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 4);
+
+ CGContextDrawLinearGradient(
+ cgContext, gradient, CGPointMake(0, 0), CGPointMake(width, height), 0);
+ CGGradientRelease(gradient);
+
+ CGImageRef cgImage = CGBitmapContextCreateImage(cgContext);
+ CGContextRelease(cgContext);
+ CGImageRelease(cgImage);
+ CGColorSpaceRelease(colorSpace);
+
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+}
+
+rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width, int height) {
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer(webrtc::I420Buffer::Create(width, height));
+ // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
+ for (int x = 0; x < width; x++) {
+ for (int y = 0; y < height; y++) {
+ buffer->MutableDataY()[x + y * width] = 128 * (x * height + y * width) / (width * height);
+ }
+ }
+ int chroma_width = buffer->ChromaWidth();
+ int chroma_height = buffer->ChromaHeight();
+ for (int x = 0; x < chroma_width; x++) {
+ for (int y = 0; y < chroma_height; y++) {
+ buffer->MutableDataU()[x + y * chroma_width] = 255 * x / (chroma_width - 1);
+ buffer->MutableDataV()[x + y * chroma_width] = 255 * y / (chroma_height - 1);
+ }
+ }
+ return buffer;
+}
+
+void CopyI420BufferToCVPixelBuffer(rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
+ CVPixelBufferRef pixelBuffer) {
+ CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+
+ const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+ if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
+ pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
+ // NV12
+ uint8_t* dstY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
+ const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
+ uint8_t* dstUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
+ const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
+
+ libyuv::I420ToNV12(i420Buffer->DataY(),
+ i420Buffer->StrideY(),
+ i420Buffer->DataU(),
+ i420Buffer->StrideU(),
+ i420Buffer->DataV(),
+ i420Buffer->StrideV(),
+ dstY,
+ dstYStride,
+ dstUV,
+ dstUVStride,
+ i420Buffer->width(),
+ i420Buffer->height());
+ } else {
+ uint8_t* dst = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixelBuffer));
+ const int bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
+
+ if (pixelFormat == kCVPixelFormatType_32BGRA) {
+ // Corresponds to libyuv::FOURCC_ARGB
+ libyuv::I420ToARGB(i420Buffer->DataY(),
+ i420Buffer->StrideY(),
+ i420Buffer->DataU(),
+ i420Buffer->StrideU(),
+ i420Buffer->DataV(),
+ i420Buffer->StrideV(),
+ dst,
+ bytesPerRow,
+ i420Buffer->width(),
+ i420Buffer->height());
+ } else if (pixelFormat == kCVPixelFormatType_32ARGB) {
+ // Corresponds to libyuv::FOURCC_BGRA
+ libyuv::I420ToBGRA(i420Buffer->DataY(),
+ i420Buffer->StrideY(),
+ i420Buffer->DataU(),
+ i420Buffer->StrideU(),
+ i420Buffer->DataV(),
+ i420Buffer->StrideV(),
+ dst,
+ bytesPerRow,
+ i420Buffer->width(),
+ i420Buffer->height());
+ }
+ }
+
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+}
diff --git a/third_party/libwebrtc/sdk/objc/unittests/main.mm b/third_party/libwebrtc/sdk/objc/unittests/main.mm
new file mode 100644
index 0000000000..9c513762c1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/main.mm
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+#include "rtc_base/thread.h"
+#include "test/ios/coverage_util_ios.h"
+
+int main(int argc, char* argv[]) {
+ rtc::test::ConfigureCoverageReportPath();
+
+ rtc::AutoThread main_thread;
+
+ @autoreleasepool {
+ return UIApplicationMain(argc, argv, nil, nil);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/objc/unittests/nalu_rewriter_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/nalu_rewriter_xctest.mm
new file mode 100644
index 0000000000..82da549bb6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/nalu_rewriter_xctest.mm
@@ -0,0 +1,374 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/h264/h264_common.h"
+#include "components/video_codec/nalu_rewriter.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/gunit.h"
+
+#import <XCTest/XCTest.h>
+
+#if TARGET_OS_IPHONE
+#import <AVFoundation/AVFoundation.h>
+#import <UIKit/UIKit.h>
+#endif
+
+@interface NaluRewriterTests : XCTestCase
+
+@end
+
+static const uint8_t NALU_TEST_DATA_0[] = {0xAA, 0xBB, 0xCC};
+static const uint8_t NALU_TEST_DATA_1[] = {0xDE, 0xAD, 0xBE, 0xEF};
+
+// clang-format off
+static const uint8_t SPS_PPS_BUFFER[] = {
+ // SPS nalu.
+ 0x00, 0x00, 0x00, 0x01, 0x27, 0x42, 0x00, 0x1E, 0xAB, 0x40, 0xF0, 0x28,
+ 0xD3, 0x70, 0x20, 0x20, 0x20, 0x20,
+ // PPS nalu.
+ 0x00, 0x00, 0x00, 0x01, 0x28, 0xCE, 0x3C, 0x30};
+// clang-format on
+
+@implementation NaluRewriterTests
+
+- (void)testCreateVideoFormatDescription {
+ CMVideoFormatDescriptionRef description =
+ webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, arraysize(SPS_PPS_BUFFER));
+ XCTAssertTrue(description);
+ if (description) {
+ CFRelease(description);
+ description = nullptr;
+ }
+
+ // clang-format off
+ const uint8_t sps_pps_not_at_start_buffer[] = {
+ // Add some non-SPS/PPS NALUs at the beginning
+ 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x01, 0xFF, 0x00, 0x00, 0x00, 0x01,
+ 0xAB, 0x33, 0x21,
+ // SPS nalu.
+ 0x00, 0x00, 0x01, 0x27, 0x42, 0x00, 0x1E, 0xAB, 0x40, 0xF0, 0x28, 0xD3,
+ 0x70, 0x20, 0x20, 0x20, 0x20,
+ // PPS nalu.
+ 0x00, 0x00, 0x01, 0x28, 0xCE, 0x3C, 0x30};
+ // clang-format on
+ description = webrtc::CreateVideoFormatDescription(sps_pps_not_at_start_buffer,
+ arraysize(sps_pps_not_at_start_buffer));
+
+ XCTAssertTrue(description);
+
+ if (description) {
+ CFRelease(description);
+ description = nullptr;
+ }
+
+ const uint8_t other_buffer[] = {0x00, 0x00, 0x00, 0x01, 0x28};
+ XCTAssertFalse(webrtc::CreateVideoFormatDescription(other_buffer, arraysize(other_buffer)));
+}
+
+- (void)testReadEmptyInput {
+ const uint8_t annex_b_test_data[] = {0x00};
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, 0);
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testReadSingleNalu {
+ const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x00, 0x01, 0xAA};
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
+ XCTAssertTrue(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(annex_b_test_data + 4, nalu);
+ XCTAssertEqual(1u, nalu_length);
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testReadSingleNalu3ByteHeader {
+ const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x01, 0xAA};
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
+ XCTAssertTrue(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(annex_b_test_data + 3, nalu);
+ XCTAssertEqual(1u, nalu_length);
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testReadMissingNalu {
+ // clang-format off
+ const uint8_t annex_b_test_data[] = {0x01,
+ 0x00, 0x01,
+ 0x00, 0x00, 0x00, 0xFF};
+ // clang-format on
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testReadMultipleNalus {
+ // clang-format off
+ const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x00, 0x01, 0xFF,
+ 0x01,
+ 0x00, 0x01,
+ 0x00, 0x00, 0x00, 0xFF,
+ 0x00, 0x00, 0x01, 0xAA, 0xBB};
+ // clang-format on
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
+ XCTAssertTrue(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(annex_b_test_data + 4, nalu);
+ XCTAssertEqual(8u, nalu_length);
+ XCTAssertEqual(5u, reader.BytesRemaining());
+ XCTAssertTrue(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(annex_b_test_data + 15, nalu);
+ XCTAssertEqual(2u, nalu_length);
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testEmptyOutputBuffer {
+ const uint8_t expected_buffer[] = {0x00};
+ const size_t buffer_size = 1;
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ memset(buffer.get(), 0, buffer_size);
+ webrtc::AvccBufferWriter writer(buffer.get(), 0);
+ XCTAssertEqual(0u, writer.BytesRemaining());
+ XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
+ XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
+}
+
+- (void)testWriteSingleNalu {
+ const uint8_t expected_buffer[] = {
+ 0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC,
+ };
+ const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + 4;
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
+ XCTAssertEqual(buffer_size, writer.BytesRemaining());
+ XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
+ XCTAssertEqual(0u, writer.BytesRemaining());
+ XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
+ XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
+}
+
+- (void)testWriteMultipleNalus {
+ // clang-format off
+ const uint8_t expected_buffer[] = {
+ 0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC,
+ 0x00, 0x00, 0x00, 0x04, 0xDE, 0xAD, 0xBE, 0xEF
+ };
+ // clang-format on
+ const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + arraysize(NALU_TEST_DATA_1) + 8;
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
+ XCTAssertEqual(buffer_size, writer.BytesRemaining());
+ XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
+ XCTAssertEqual(buffer_size - (arraysize(NALU_TEST_DATA_0) + 4), writer.BytesRemaining());
+ XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
+ XCTAssertEqual(0u, writer.BytesRemaining());
+ XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
+}
+
+- (void)testOverflow {
+ const uint8_t expected_buffer[] = {0x00, 0x00, 0x00};
+ const size_t buffer_size = arraysize(NALU_TEST_DATA_0);
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ memset(buffer.get(), 0, buffer_size);
+ webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
+ XCTAssertEqual(buffer_size, writer.BytesRemaining());
+ XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
+ XCTAssertEqual(buffer_size, writer.BytesRemaining());
+ XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
+}
+
+- (void)testH264AnnexBBufferToCMSampleBuffer {
+ // clang-format off
+ const uint8_t annex_b_test_data[] = {
+ 0x00,
+ 0x00, 0x00, 0x01,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x01,
+ 0xAA, 0xFF, // second chunk, 2 bytes
+ 0x00, 0x00, 0x01,
+ 0xBB}; // third chunk, 1 byte, will not fit into output array
+
+ const uint8_t expected_cmsample_data[] = {
+ 0x00, 0x00, 0x00, 0x04,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x02,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+ // clang-format on
+
+ CMMemoryPoolRef memory_pool = CMMemoryPoolCreate(nil);
+ CMSampleBufferRef out_sample_buffer = nil;
+ CMVideoFormatDescriptionRef description = [self createDescription];
+
+ Boolean result = webrtc::H264AnnexBBufferToCMSampleBuffer(annex_b_test_data,
+ arraysize(annex_b_test_data),
+ description,
+ &out_sample_buffer,
+ memory_pool);
+
+ XCTAssertTrue(result);
+
+ XCTAssertEqual(description, CMSampleBufferGetFormatDescription(out_sample_buffer));
+
+ char* data_ptr = nullptr;
+ CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(out_sample_buffer);
+ size_t block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
+ CMBlockBufferGetDataPointer(block_buffer, 0, nullptr, nullptr, &data_ptr);
+ XCTAssertEqual(block_buffer_size, arraysize(annex_b_test_data));
+
+ int data_comparison_result =
+ memcmp(expected_cmsample_data, data_ptr, arraysize(expected_cmsample_data));
+
+ XCTAssertEqual(0, data_comparison_result);
+
+ if (description) {
+ CFRelease(description);
+ description = nullptr;
+ }
+
+ CMMemoryPoolInvalidate(memory_pool);
+ CFRelease(memory_pool);
+}
+
+- (void)testH264CMSampleBufferToAnnexBBuffer {
+ // clang-format off
+ const uint8_t cmsample_data[] = {
+ 0x00, 0x00, 0x00, 0x04,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x02,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+
+ const uint8_t expected_annex_b_data[] = {
+ 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x01,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+ // clang-format on
+
+ rtc::Buffer annexb_buffer(arraysize(cmsample_data));
+ CMSampleBufferRef sample_buffer =
+ [self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)];
+
+ Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer,
+ /* is_keyframe */ false,
+ &annexb_buffer);
+
+ XCTAssertTrue(result);
+
+ XCTAssertEqual(arraysize(expected_annex_b_data), annexb_buffer.size());
+
+ int data_comparison_result =
+ memcmp(expected_annex_b_data, annexb_buffer.data(), arraysize(expected_annex_b_data));
+
+ XCTAssertEqual(0, data_comparison_result);
+}
+
+- (void)testH264CMSampleBufferToAnnexBBufferWithKeyframe {
+ // clang-format off
+ const uint8_t cmsample_data[] = {
+ 0x00, 0x00, 0x00, 0x04,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x02,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+
+ const uint8_t expected_annex_b_data[] = {
+ 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x01,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+ // clang-format on
+
+ rtc::Buffer annexb_buffer(arraysize(cmsample_data));
+ CMSampleBufferRef sample_buffer =
+ [self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)];
+
+ Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer,
+ /* is_keyframe */ true,
+ &annexb_buffer);
+
+ XCTAssertTrue(result);
+
+ XCTAssertEqual(arraysize(SPS_PPS_BUFFER) + arraysize(expected_annex_b_data),
+ annexb_buffer.size());
+
+ XCTAssertEqual(0, memcmp(SPS_PPS_BUFFER, annexb_buffer.data(), arraysize(SPS_PPS_BUFFER)));
+
+ XCTAssertEqual(0,
+ memcmp(expected_annex_b_data,
+ annexb_buffer.data() + arraysize(SPS_PPS_BUFFER),
+ arraysize(expected_annex_b_data)));
+}
+
+- (CMVideoFormatDescriptionRef)createDescription {
+ CMVideoFormatDescriptionRef description =
+ webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, arraysize(SPS_PPS_BUFFER));
+ XCTAssertTrue(description);
+ return description;
+}
+
+- (CMSampleBufferRef)createCMSampleBufferRef:(void*)cmsampleData cmsampleSize:(size_t)cmsampleSize {
+ CMSampleBufferRef sample_buffer = nil;
+ OSStatus status;
+
+ CMVideoFormatDescriptionRef description = [self createDescription];
+ CMBlockBufferRef block_buffer = nullptr;
+
+ status = CMBlockBufferCreateWithMemoryBlock(nullptr,
+ cmsampleData,
+ cmsampleSize,
+ nullptr,
+ nullptr,
+ 0,
+ cmsampleSize,
+ kCMBlockBufferAssureMemoryNowFlag,
+ &block_buffer);
+ XCTAssertEqual(kCMBlockBufferNoErr, status);
+
+ status = CMSampleBufferCreate(nullptr,
+ block_buffer,
+ true,
+ nullptr,
+ nullptr,
+ description,
+ 1,
+ 0,
+ nullptr,
+ 0,
+ nullptr,
+ &sample_buffer);
+ XCTAssertEqual(noErr, status);
+
+ return sample_buffer;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/objc_video_decoder_factory_tests.mm b/third_party/libwebrtc/sdk/objc/unittests/objc_video_decoder_factory_tests.mm
new file mode 100644
index 0000000000..f44d831d29
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/objc_video_decoder_factory_tests.mm
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#include "sdk/objc/native/src/objc_video_decoder_factory.h"
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoDecoder.h"
+#import "base/RTCVideoDecoderFactory.h"
+#include "media/base/codec.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/gunit.h"
+
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateDecoderFactoryReturning(int return_code) {
+ id decoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoder)));
+ OCMStub([decoderMock startDecodeWithNumberOfCores:1]).andReturn(return_code);
+ OCMStub([decoderMock decode:[OCMArg any]
+ missingFrames:NO
+ codecSpecificInfo:[OCMArg any]
+ renderTimeMs:0])
+ .andReturn(return_code);
+ OCMStub([decoderMock releaseDecoder]).andReturn(return_code);
+
+ id decoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory)));
+ RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
+ OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
+ OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock);
+ return decoderFactoryMock;
+}
+
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateOKDecoderFactory() {
+ return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
+}
+
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateErrorDecoderFactory() {
+ return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> factory) {
+ webrtc::ObjCVideoDecoderFactory decoder_factory(factory);
+ return decoder_factory.CreateVideoDecoder(webrtc::SdpVideoFormat(cricket::kH264CodecName));
+}
+
+#pragma mark -
+
+@interface ObjCVideoDecoderFactoryTests : XCTestCase
+@end
+
+@implementation ObjCVideoDecoderFactoryTests
+
+- (void)testConfigureReturnsTrueOnSuccess {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
+
+ webrtc::VideoDecoder::Settings settings;
+ EXPECT_TRUE(decoder->Configure(settings));
+}
+
+- (void)testConfigureReturnsFalseOnFail {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
+
+ webrtc::VideoDecoder::Settings settings;
+ EXPECT_FALSE(decoder->Configure(settings));
+}
+
+- (void)testDecodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
+
+ webrtc::EncodedImage encoded_image;
+ encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create());
+
+ EXPECT_EQ(decoder->Decode(encoded_image, false, 0), WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testDecodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
+
+ webrtc::EncodedImage encoded_image;
+ encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create());
+
+ EXPECT_EQ(decoder->Decode(encoded_image, false, 0), WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+- (void)testReleaseDecodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
+
+ EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testReleaseDecodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
+
+ EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_ERROR);
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/third_party/libwebrtc/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
new file mode 100644
index 0000000000..9a4fee2e95
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#include "sdk/objc/native/src/objc_video_encoder_factory.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder.h"
+#import "base/RTCVideoEncoder.h"
+#import "base/RTCVideoEncoderFactory.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/gunit.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateEncoderFactoryReturning(int return_code) {
+ id encoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoder)));
+ OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1])
+ .andReturn(return_code);
+ OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]])
+ .andReturn(return_code);
+ OCMStub([encoderMock releaseEncoder]).andReturn(return_code);
+ OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code);
+
+ id encoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory)));
+ RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
+ OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
+ OCMStub([encoderFactoryMock implementations]).andReturn(@[ supported ]);
+ OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock);
+ return encoderFactoryMock;
+}
+
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateOKEncoderFactory() {
+ return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
+}
+
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateErrorEncoderFactory() {
+ return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> factory) {
+ webrtc::ObjCVideoEncoderFactory encoder_factory(factory);
+ webrtc::SdpVideoFormat format("H264");
+ return encoder_factory.CreateVideoEncoder(format);
+}
+
+#pragma mark -
+
+@interface ObjCVideoEncoderFactoryTests : XCTestCase
+@end
+
+@implementation ObjCVideoEncoderFactoryTests
+
+- (void)testInitEncodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
+
+ auto* settings = new webrtc::VideoCodec();
+ const webrtc::VideoEncoder::Capabilities kCapabilities(false);
+ EXPECT_EQ(encoder->InitEncode(settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)),
+ WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testInitEncodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
+
+ auto* settings = new webrtc::VideoCodec();
+ const webrtc::VideoEncoder::Capabilities kCapabilities(false);
+ EXPECT_EQ(encoder->InitEncode(settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)),
+ WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+- (void)testEncodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
+
+ CVPixelBufferRef pixel_buffer;
+ CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
+ webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(webrtc::kVideoRotation_0)
+ .set_timestamp_us(0)
+ .build();
+ std::vector<webrtc::VideoFrameType> frame_types;
+
+ EXPECT_EQ(encoder->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testEncodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
+
+ CVPixelBufferRef pixel_buffer;
+ CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
+ webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(webrtc::kVideoRotation_0)
+ .set_timestamp_us(0)
+ .build();
+ std::vector<webrtc::VideoFrameType> frame_types;
+
+ EXPECT_EQ(encoder->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+- (void)testReleaseEncodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
+
+ EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testReleaseEncodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
+
+ EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+- (void)testGetSupportedFormats {
+ webrtc::ObjCVideoEncoderFactory encoder_factory(CreateOKEncoderFactory());
+ std::vector<webrtc::SdpVideoFormat> supportedFormats = encoder_factory.GetSupportedFormats();
+ EXPECT_EQ(supportedFormats.size(), 1u);
+ EXPECT_EQ(supportedFormats[0].name, "H264");
+}
+
+- (void)testGetImplementations {
+ webrtc::ObjCVideoEncoderFactory encoder_factory(CreateOKEncoderFactory());
+ std::vector<webrtc::SdpVideoFormat> supportedFormats = encoder_factory.GetImplementations();
+ EXPECT_EQ(supportedFormats.size(), 1u);
+ EXPECT_EQ(supportedFormats[0].name, "H264");
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/scoped_cftyperef_tests.mm b/third_party/libwebrtc/sdk/objc/unittests/scoped_cftyperef_tests.mm
new file mode 100644
index 0000000000..be26720b95
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/scoped_cftyperef_tests.mm
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#include "sdk/objc/helpers/scoped_cftyperef.h"
+
+#include "test/gtest.h"
+
+namespace {
+struct TestType {
+ TestType() : has_value(true) {}
+ TestType(bool b) : has_value(b) {}
+ explicit operator bool() { return has_value; }
+ bool has_value;
+ int retain_count = 0;
+};
+
+typedef TestType* TestTypeRef;
+
+struct TestTypeTraits {
+ static TestTypeRef InvalidValue() { return TestTypeRef(false); }
+ static void Release(TestTypeRef t) { t->retain_count--; }
+ static TestTypeRef Retain(TestTypeRef t) {
+ t->retain_count++;
+ return t;
+ }
+};
+} // namespace
+
+using ScopedTestType = rtc::internal::ScopedTypeRef<TestTypeRef, TestTypeTraits>;
+
+// In these tests we sometime introduce variables just to
+// observe side-effects. Ignore the compilers complaints.
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wunused-variable"
+
+@interface ScopedTypeRefTests : XCTestCase
+@end
+
+@implementation ScopedTypeRefTests
+
+- (void)testShouldNotRetainByDefault {
+ TestType a;
+ ScopedTestType ref(&a);
+ EXPECT_EQ(0, a.retain_count);
+}
+
+- (void)testShouldRetainWithPolicy {
+ TestType a;
+ ScopedTestType ref(&a, rtc::RetainPolicy::RETAIN);
+ EXPECT_EQ(1, a.retain_count);
+}
+
+- (void)testShouldReleaseWhenLeavingScope {
+ TestType a;
+ EXPECT_EQ(0, a.retain_count);
+ {
+ ScopedTestType ref(&a, rtc::RetainPolicy::RETAIN);
+ EXPECT_EQ(1, a.retain_count);
+ }
+ EXPECT_EQ(0, a.retain_count);
+}
+
+- (void)testShouldBeCopyable {
+ TestType a;
+ EXPECT_EQ(0, a.retain_count);
+ {
+ ScopedTestType ref1(&a, rtc::RetainPolicy::RETAIN);
+ EXPECT_EQ(1, a.retain_count);
+ ScopedTestType ref2 = ref1;
+ EXPECT_EQ(2, a.retain_count);
+ }
+ EXPECT_EQ(0, a.retain_count);
+}
+
+- (void)testCanReleaseOwnership {
+ TestType a;
+ EXPECT_EQ(0, a.retain_count);
+ {
+ ScopedTestType ref(&a, rtc::RetainPolicy::RETAIN);
+ EXPECT_EQ(1, a.retain_count);
+ TestTypeRef b = ref.release();
+ }
+ EXPECT_EQ(1, a.retain_count);
+}
+
+- (void)testShouldBeTestableForTruthiness {
+ ScopedTestType ref;
+ EXPECT_FALSE(ref);
+ TestType a;
+ ref = &a;
+ EXPECT_TRUE(ref);
+ ref.release();
+ EXPECT_FALSE(ref);
+}
+
+- (void)testShouldProvideAccessToWrappedType {
+ TestType a;
+ ScopedTestType ref(&a);
+ EXPECT_EQ(&(a.retain_count), &(ref->retain_count));
+}
+
+@end
+
+#pragma clang diagnostic pop
diff --git a/third_party/libwebrtc/sdk/videocapture_objc_gn/moz.build b/third_party/libwebrtc/sdk/videocapture_objc_gn/moz.build
new file mode 100644
index 0000000000..063ec72ed8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/videocapture_objc_gn/moz.build
@@ -0,0 +1,65 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+CMFLAGS += [
+ "-fobjc-arc"
+]
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_AVX2"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MAC"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_POSIX"] = True
+DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+DEFINES["__STDC_CONSTANT_MACROS"] = True
+DEFINES["__STDC_FORMAT_MACROS"] = True
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/sdk/objc/",
+ "/third_party/libwebrtc/sdk/objc/base/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m",
+ "/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+Library("videocapture_objc_gn")
diff --git a/third_party/libwebrtc/sdk/videoframebuffer_objc_gn/moz.build b/third_party/libwebrtc/sdk/videoframebuffer_objc_gn/moz.build
new file mode 100644
index 0000000000..ea8b12fff2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/videoframebuffer_objc_gn/moz.build
@@ -0,0 +1,68 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+CMMFLAGS += [
+ "-fobjc-arc"
+]
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_AVX2"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MAC"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_POSIX"] = True
+DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+DEFINES["__STDC_CONSTANT_MACROS"] = True
+DEFINES["__STDC_FORMAT_MACROS"] = True
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/media/libyuv/",
+ "/media/libyuv/libyuv/include/",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/sdk/objc/",
+ "/third_party/libwebrtc/sdk/objc/base/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm",
+ "/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm",
+ "/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+Library("videoframebuffer_objc_gn")