summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk/android
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
commit6bf0a5cb5034a7e684dcc3500e841785237ce2dd (patch)
treea68f146d7fa01f0134297619fbe7e33db084e0aa /third_party/libwebrtc/sdk/android
parentInitial commit. (diff)
downloadthunderbird-6bf0a5cb5034a7e684dcc3500e841785237ce2dd.tar.xz
thunderbird-6bf0a5cb5034a7e684dcc3500e841785237ce2dd.zip
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/sdk/android')
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AddIceObserver.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java21
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java21
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioSource.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/AudioTrack.java32
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java23
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java23
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java41
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Capturer.java33
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java190
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Capturer.java36
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java251
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java206
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerator.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java172
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CapturerObserver.java27
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/CryptoOptions.java145
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/DataChannel.java196
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Dav1dDecoder.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java69
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/DtmfSender.java96
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase.java255
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase10.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase14.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EglRenderer.java787
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/EncodedImage.java183
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java22
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java201
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/FrameDecryptor.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/FrameEncryptor.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/GlRectDrawer.java31
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/GlShader.java131
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java122
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/GlUtil.java66
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java57
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/IceCandidateErrorEvent.java43
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java200
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Encoder.java25
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java25
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java22
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java27
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/MediaConstraints.java99
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/MediaSource.java74
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java129
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Metrics.java81
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java24
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java20
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/NetEqFactoryFactory.java21
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java39
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/Predicate.java73
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/RefCounted.java28
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/RendererCommon.java259
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SSLCertificateVerifier.java27
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java212
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SdpObserver.java26
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SessionDescription.java56
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java53
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java58
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/StatsObserver.java17
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/StatsReport.java63
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java160
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java390
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java300
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/TextureBufferImpl.java203
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/TimestampAligner.java59
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/TurnCustomizer.java41
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCapturer.java53
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java86
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java42
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoder.java94
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java30
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java31
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoder.java385
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java72
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java36
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java162
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrame.java218
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameBufferType.java33
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java241
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoProcessor.java76
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSink.java23
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSource.java162
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/VideoTrack.java76
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoDecoder.java38
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoEncoder.java49
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/YuvConverter.java252
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/YuvHelper.java200
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java38
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java436
-rw-r--r--third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java46
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java684
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java38
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java31
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java29
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java33
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java340
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java428
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java458
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java72
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java98
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java365
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java271
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java17
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java26
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java281
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java52
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java763
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java39
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java28
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java23
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java129
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java139
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java55
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java22
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java115
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java73
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java69
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java99
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java53
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java51
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java63
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java29
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java27
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java46
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java27
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java110
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java81
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java83
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java227
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java122
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java743
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java585
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java308
135 files changed, 16726 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AddIceObserver.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AddIceObserver.java
new file mode 100644
index 0000000000..ff2c690029
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AddIceObserver.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface to handle completion of addIceCandidate */
+public interface AddIceObserver {
+ /** Called when ICE candidate added successfully.*/
+ @CalledByNative public void onAddSuccess();
+
+ /** Called when ICE candidate addition failed.*/
+ @CalledByNative public void onAddFailure(String error);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java
new file mode 100644
index 0000000000..dd3e262896
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioDecoderFactory}.
+ */
+public interface AudioDecoderFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::AudioDecoderFactory}. The caller takes ownership.
+ */
+ long createNativeAudioDecoderFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java
new file mode 100644
index 0000000000..814b71aba1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioEncoderFactory}.
+ */
+public interface AudioEncoderFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::AudioEncoderFactory}. The caller takes ownership.
+ */
+ long createNativeAudioEncoderFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java
new file mode 100644
index 0000000000..bd8fdb8989
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioProcessingFactory.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::AudioProcessing instances. */
+public interface AudioProcessingFactory {
+ /**
+ * Dynamically allocates a webrtc::AudioProcessing instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ public long createNative();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioSource.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioSource.java
new file mode 100644
index 0000000000..f8104e5904
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioSource.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+ public AudioSource(long nativeSource) {
+ super(nativeSource);
+ }
+
+ /** Returns a pointer to webrtc::AudioSourceInterface. */
+ long getNativeAudioSource() {
+ return getNativeMediaSource();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioTrack.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioTrack.java
new file mode 100644
index 0000000000..ca745db634
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/AudioTrack.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+ public AudioTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+
+ /** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
+ * 0 to 10.
+ */
+ public void setVolume(double volume) {
+ nativeSetVolume(getNativeAudioTrack(), volume);
+ }
+
+ /** Returns a pointer to webrtc::AudioTrackInterface. */
+ long getNativeAudioTrack() {
+ return getNativeMediaStreamTrack();
+ }
+
+ private static native void nativeSetVolume(long track, double volume);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java
new file mode 100644
index 0000000000..5ebc19f25d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Creates a native {@code webrtc::AudioDecoderFactory} with the builtin audio decoders.
+ */
+public class BuiltinAudioDecoderFactoryFactory implements AudioDecoderFactoryFactory {
+ @Override
+ public long createNativeAudioDecoderFactory() {
+ return nativeCreateBuiltinAudioDecoderFactory();
+ }
+
+ private static native long nativeCreateBuiltinAudioDecoderFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java
new file mode 100644
index 0000000000..e884d4c3b9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * This class creates a native {@code webrtc::AudioEncoderFactory} with the builtin audio encoders.
+ */
+public class BuiltinAudioEncoderFactoryFactory implements AudioEncoderFactoryFactory {
+ @Override
+ public long createNativeAudioEncoderFactory() {
+ return nativeCreateBuiltinAudioEncoderFactory();
+ }
+
+ private static native long nativeCreateBuiltinAudioEncoderFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java
new file mode 100644
index 0000000000..f4edb58847
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+ private long nativeSink;
+
+ public static byte[] getLogData(String dirPath) {
+ if (dirPath == null) {
+ throw new IllegalArgumentException("dirPath may not be null.");
+ }
+ return nativeGetLogData(dirPath);
+ }
+
+ public CallSessionFileRotatingLogSink(
+ String dirPath, int maxFileSize, Logging.Severity severity) {
+ if (dirPath == null) {
+ throw new IllegalArgumentException("dirPath may not be null.");
+ }
+ nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+ }
+
+ public void dispose() {
+ if (nativeSink != 0) {
+ nativeDeleteSink(nativeSink);
+ nativeSink = 0;
+ }
+ }
+
+ private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
+ private static native void nativeDeleteSink(long sink);
+ private static native byte[] nativeGetLogData(String dirPath);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Capturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Capturer.java
new file mode 100644
index 0000000000..de172aa1d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Capturer.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+public class Camera1Capturer extends CameraCapturer {
+ private final boolean captureToTexture;
+
+ public Camera1Capturer(
+ String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
+ super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
+
+ this.captureToTexture = captureToTexture;
+ }
+
+ @Override
+ protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+ CameraSession.Events events, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+ int framerate) {
+ Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
+ surfaceTextureHelper, cameraName, width, height, framerate);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java
new file mode 100644
index 0000000000..4a1aacdb05
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera1Enumerator.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.SystemClock;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+@SuppressWarnings("deprecation")
+public class Camera1Enumerator implements CameraEnumerator {
+ private final static String TAG = "Camera1Enumerator";
+ // Each entry contains the supported formats for corresponding camera index. The formats for all
+ // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+ // reference.
+ private static List<List<CaptureFormat>> cachedSupportedFormats;
+
+ private final boolean captureToTexture;
+
+ public Camera1Enumerator() {
+ this(true /* captureToTexture */);
+ }
+
+ public Camera1Enumerator(boolean captureToTexture) {
+ this.captureToTexture = captureToTexture;
+ }
+
+ // Returns device names that can be used to create a new VideoCapturerAndroid.
+ @Override
+ public String[] getDeviceNames() {
+ ArrayList<String> namesList = new ArrayList<>();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ String name = getDeviceName(i);
+ if (name != null) {
+ namesList.add(name);
+ Logging.d(TAG, "Index: " + i + ". " + name);
+ } else {
+ Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
+ }
+ }
+ String[] namesArray = new String[namesList.size()];
+ return namesList.toArray(namesArray);
+ }
+
+ @Override
+ public boolean isFrontFacing(String deviceName) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+ return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
+ }
+
+ @Override
+ public boolean isBackFacing(String deviceName) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+ return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
+ }
+
+ @Override
+ public boolean isInfrared(String deviceName) {
+ return false;
+ }
+
+ @Override
+ public List<CaptureFormat> getSupportedFormats(String deviceName) {
+ return getSupportedFormats(getCameraIndex(deviceName));
+ }
+
+ @Override
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
+ }
+
+ private static @Nullable android.hardware.Camera.CameraInfo getCameraInfo(int index) {
+ android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ try {
+ android.hardware.Camera.getCameraInfo(index, info);
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo failed on index " + index, e);
+ return null;
+ }
+ return info;
+ }
+
+ static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
+ if (cachedSupportedFormats == null) {
+ cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ cachedSupportedFormats.add(enumerateFormats(i));
+ }
+ }
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ private static List<CaptureFormat> enumerateFormats(int cameraId) {
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ final android.hardware.Camera.Parameters parameters;
+ android.hardware.Camera camera = null;
+ try {
+ Logging.d(TAG, "Opening camera with index " + cameraId);
+ camera = android.hardware.Camera.open(cameraId);
+ parameters = camera.getParameters();
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+ return new ArrayList<CaptureFormat>();
+ } finally {
+ if (camera != null) {
+ camera.release();
+ }
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ try {
+ int minFps = 0;
+ int maxFps = 0;
+ final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+ if (listFpsRange != null) {
+ // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+ // corresponding to the highest fps.
+ final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+ minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ }
+ for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
+ formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+ }
+
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+
+ // Convert from android.hardware.Camera.Size to Size.
+ static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
+ final List<Size> sizes = new ArrayList<Size>();
+ for (android.hardware.Camera.Size size : cameraSizes) {
+ sizes.add(new Size(size.width, size.height));
+ }
+ return sizes;
+ }
+
+ // Convert from int[2] to CaptureFormat.FramerateRange.
+ static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
+ final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+ for (int[] range : arrayRanges) {
+ ranges.add(new CaptureFormat.FramerateRange(
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
+ }
+ return ranges;
+ }
+
+ // Returns the camera index for camera with name `deviceName`, or throws IllegalArgumentException
+ // if no such camera can be found.
+ static int getCameraIndex(String deviceName) {
+ Logging.d(TAG, "getCameraIndex: " + deviceName);
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ if (deviceName.equals(getDeviceName(i))) {
+ return i;
+ }
+ }
+ throw new IllegalArgumentException("No such camera: " + deviceName);
+ }
+
+ // Returns the name of the camera with camera index. Returns null if the
+ // camera can not be used.
+ static @Nullable String getDeviceName(int index) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(index);
+ if (info == null) {
+ return null;
+ }
+
+ String facing =
+ (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+ return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Capturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Capturer.java
new file mode 100644
index 0000000000..c4becf4819
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Capturer.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.camera2.CameraManager;
+import androidx.annotation.Nullable;
+
+public class Camera2Capturer extends CameraCapturer {
+ private final Context context;
+ @Nullable private final CameraManager cameraManager;
+
+ public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
+ super(cameraName, eventsHandler, new Camera2Enumerator(context));
+
+ this.context = context;
+ cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+ CameraSession.Events events, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+ int framerate) {
+ Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
+ surfaceTextureHelper, cameraName, width, height, framerate);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java
new file mode 100644
index 0000000000..44e239ad8e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Camera2Enumerator.java
@@ -0,0 +1,251 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.Range;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+public class Camera2Enumerator implements CameraEnumerator {
+ private final static String TAG = "Camera2Enumerator";
+ private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+ // Each entry contains the supported formats for a given camera index. The formats are enumerated
+ // lazily in getSupportedFormats(), and cached for future reference.
+ private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
+ new HashMap<String, List<CaptureFormat>>();
+
+ final Context context;
+ @Nullable final CameraManager cameraManager;
+
+ public Camera2Enumerator(Context context) {
+ this.context = context;
+ this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ public String[] getDeviceNames() {
+ try {
+ return cameraManager.getCameraIdList();
+ } catch (CameraAccessException e) {
+ Logging.e(TAG, "Camera access exception", e);
+ return new String[] {};
+ }
+ }
+
+ @Override
+ public boolean isFrontFacing(String deviceName) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+ return characteristics != null
+ && characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+ }
+
+ @Override
+ public boolean isBackFacing(String deviceName) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+ return characteristics != null
+ && characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_BACK;
+ }
+
+ @Override
+ public boolean isInfrared(String deviceName) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ Integer colors = characteristics.get(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
+ return colors != null && colors.equals(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR);
+ }
+
+ return false;
+ }
+
+ @Nullable
+ @Override
+ public List<CaptureFormat> getSupportedFormats(String deviceName) {
+ return getSupportedFormats(context, deviceName);
+ }
+
+ @Override
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return new Camera2Capturer(context, deviceName, eventsHandler);
+ }
+
+ private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) {
+ try {
+ return cameraManager.getCameraCharacteristics(deviceName);
+ } catch (CameraAccessException | RuntimeException e) {
+ Logging.e(TAG, "Camera access exception", e);
+ return null;
+ }
+ }
+
+ /**
+ * Checks if API is supported and all cameras have better than legacy support.
+ */
+ public static boolean isSupported(Context context) {
+ CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ try {
+ String[] cameraIds = cameraManager.getCameraIdList();
+ for (String id : cameraIds) {
+ CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
+ if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
+ == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ return false;
+ }
+ }
+ } catch (CameraAccessException | RuntimeException e) {
+ Logging.e(TAG, "Failed to check if camera2 is supported", e);
+ return false;
+ }
+ return true;
+ }
+
+ static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
+ if (fpsRanges.length == 0) {
+ return 1000;
+ }
+ return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
+ }
+
+ static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ final int supportLevel =
+ cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+
+ final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
+ final List<Size> sizes = convertSizes(nativeSizes);
+
+ // Video may be stretched pre LMR1 on legacy implementations.
+ // Filter out formats that have different aspect ratio than the sensor array.
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
+ && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ final Rect activeArraySize =
+ cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ final ArrayList<Size> filteredSizes = new ArrayList<Size>();
+
+ for (Size size : sizes) {
+ if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
+ filteredSizes.add(size);
+ }
+ }
+
+ return filteredSizes;
+ } else {
+ return sizes;
+ }
+ }
+
+ @Nullable
+ static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
+ return getSupportedFormats(
+ (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
+ }
+
+ @Nullable
+ static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
+ synchronized (cachedSupportedFormats) {
+ if (cachedSupportedFormats.containsKey(cameraId)) {
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+
+ final CameraCharacteristics cameraCharacteristics;
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+ } catch (Exception ex) {
+ Logging.e(TAG, "getCameraCharacteristics()", ex);
+ return new ArrayList<CaptureFormat>();
+ }
+
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ Range<Integer>[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ List<CaptureFormat.FramerateRange> framerateRanges =
+ convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
+ List<Size> sizes = getSupportedSizes(cameraCharacteristics);
+
+ int defaultMaxFps = 0;
+ for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
+ defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ for (Size size : sizes) {
+ long minFrameDurationNs = 0;
+ try {
+ minFrameDurationNs = streamMap.getOutputMinFrameDuration(
+ SurfaceTexture.class, new android.util.Size(size.width, size.height));
+ } catch (Exception e) {
+ // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+ }
+ final int maxFps = (minFrameDurationNs == 0)
+ ? defaultMaxFps
+ : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
+ formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
+ Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
+ }
+
+ cachedSupportedFormats.put(cameraId, formatList);
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+ }
+
+ // Convert from android.util.Size to Size.
+ private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
+ if (cameraSizes == null || cameraSizes.length == 0) {
+ return Collections.emptyList();
+ }
+ final List<Size> sizes = new ArrayList<>(cameraSizes.length);
+ for (android.util.Size size : cameraSizes) {
+ sizes.add(new Size(size.getWidth(), size.getHeight()));
+ }
+ return sizes;
+ }
+
+ // Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
+ static List<CaptureFormat.FramerateRange> convertFramerates(
+ Range<Integer>[] arrayRanges, int unitFactor) {
+ final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+ for (Range<Integer> range : arrayRanges) {
+ ranges.add(new CaptureFormat.FramerateRange(
+ range.getLower() * unitFactor, range.getUpper() * unitFactor));
+ }
+ return ranges;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
new file mode 100644
index 0000000000..0c3188fffe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+
+import android.graphics.ImageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+ private final static String TAG = "CameraEnumerationAndroid";
+
+ static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
+ // 0, Unknown resolution
+ new Size(160, 120), // 1, QQVGA
+ new Size(240, 160), // 2, HQVGA
+ new Size(320, 240), // 3, QVGA
+ new Size(400, 240), // 4, WQVGA
+ new Size(480, 320), // 5, HVGA
+ new Size(640, 360), // 6, nHD
+ new Size(640, 480), // 7, VGA
+ new Size(768, 480), // 8, WVGA
+ new Size(854, 480), // 9, FWVGA
+ new Size(800, 600), // 10, SVGA
+ new Size(960, 540), // 11, qHD
+ new Size(960, 640), // 12, DVGA
+ new Size(1024, 576), // 13, WSVGA
+ new Size(1024, 600), // 14, WVSGA
+ new Size(1280, 720), // 15, HD
+ new Size(1280, 1024), // 16, SXGA
+ new Size(1920, 1080), // 17, Full HD
+ new Size(1920, 1440), // 18, Full HD 4:3
+ new Size(2560, 1440), // 19, QHD
+ new Size(3840, 2160) // 20, UHD
+ ));
+
+ public static class CaptureFormat {
+ // Class to represent a framerate range. The framerate varies because of lightning conditions.
+ // The values are multiplied by 1000, so 1000 represents one frame per second.
+ public static class FramerateRange {
+ public int min;
+ public int max;
+
+ public FramerateRange(int min, int max) {
+ this.min = min;
+ this.max = max;
+ }
+
+ @Override
+ public String toString() {
+ return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof FramerateRange)) {
+ return false;
+ }
+ final FramerateRange otherFramerate = (FramerateRange) other;
+ return min == otherFramerate.min && max == otherFramerate.max;
+ }
+
+ @Override
+ public int hashCode() {
+ // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+ return 1 + 65537 * min + max;
+ }
+ }
+
+ public final int width;
+ public final int height;
+ public final FramerateRange framerate;
+
+ // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
+ // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
+ // all imageFormats.
+ public final int imageFormat = ImageFormat.NV21;
+
+ public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
+ this.width = width;
+ this.height = height;
+ this.framerate = new FramerateRange(minFramerate, maxFramerate);
+ }
+
+ public CaptureFormat(int width, int height, FramerateRange framerate) {
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+ }
+
+ // Calculates the frame size of this capture format.
+ public int frameSize() {
+ return frameSize(width, height, imageFormat);
+ }
+
+ // Calculates the frame size of the specified image format. Currently only
+ // supporting ImageFormat.NV21.
+ // The size is width * height * number of bytes per pixel.
+ // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
+ public static int frameSize(int width, int height, int imageFormat) {
+ if (imageFormat != ImageFormat.NV21) {
+ throw new UnsupportedOperationException("Don't know how to calculate "
+ + "the frame size of non-NV21 image formats.");
+ }
+ return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + "@" + framerate;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof CaptureFormat)) {
+ return false;
+ }
+ final CaptureFormat otherFormat = (CaptureFormat) other;
+ return width == otherFormat.width && height == otherFormat.height
+ && framerate.equals(otherFormat.framerate);
+ }
+
+ @Override
+ public int hashCode() {
+ return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
+ }
+ }
+
+ // Helper class for finding the closest supported format for the two functions below. It creates a
+ // comparator based on the difference to some requested parameters, where the element with the
+ // minimum difference is the element that is closest to the requested parameters.
+ private static abstract class ClosestComparator<T> implements Comparator<T> {
+ // Difference between supported and requested parameter.
+ abstract int diff(T supportedParameter);
+
+ @Override
+ public int compare(T t1, T t2) {
+ return diff(t1) - diff(t2);
+ }
+ }
+
+ // Prefer a fps range with an upper bound close to `framerate`. Also prefer a fps range with a low
+ // lower bound, to allow the framerate to fluctuate based on lightning conditions.
+ public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
+ List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
+ return Collections.min(
+ supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
+ // Progressive penalty if the upper bound is further away than `MAX_FPS_DIFF_THRESHOLD`
+ // from requested.
+ private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
+ private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
+ private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
+
+ // Progressive penalty if the lower bound is bigger than `MIN_FPS_THRESHOLD`.
+ private static final int MIN_FPS_THRESHOLD = 8000;
+ private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
+ private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
+
+ // Use one weight for small `value` less than `threshold`, and another weight above.
+ private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
+ return (value < threshold) ? value * lowWeight
+ : threshold * lowWeight + (value - threshold) * highWeight;
+ }
+
+ @Override
+ int diff(CaptureFormat.FramerateRange range) {
+ final int minFpsError = progressivePenalty(
+ range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
+ final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
+ MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
+ return minFpsError + maxFpsError;
+ }
+ });
+ }
+
+ public static Size getClosestSupportedSize(
+ List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
+ return Collections.min(supportedSizes, new ClosestComparator<Size>() {
+ @Override
+ int diff(Size size) {
+ return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+ }
+ });
+ }
+
+ // Helper method for camera classes.
+ static void reportCameraResolution(Histogram histogram, Size resolution) {
+ int index = COMMON_RESOLUTIONS.indexOf(resolution);
+ // 0 is reserved for unknown resolution, so add 1.
+ // indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
+ histogram.addSample(index + 1);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerator.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerator.java
new file mode 100644
index 0000000000..db34d542c8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraEnumerator.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.List;
+
+public interface CameraEnumerator {
+ public String[] getDeviceNames();
+ public boolean isFrontFacing(String deviceName);
+ public boolean isBackFacing(String deviceName);
+ public boolean isInfrared(String deviceName);
+ public List<CaptureFormat> getSupportedFormats(String deviceName);
+
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java
new file mode 100644
index 0000000000..ec26868b5c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CameraVideoCapturer.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaRecorder;
+
+/**
+ * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
+ * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
+ * class for detecting camera freezes.
+ */
+public interface CameraVideoCapturer extends VideoCapturer {
+ /**
+ * Camera events handler - can be used to be notifed about camera events. The callbacks are
+ * executed from an arbitrary thread.
+ */
+ public interface CameraEventsHandler {
+ // Camera error handler - invoked when camera can not be opened
+ // or any camera exception happens on camera thread.
+ void onCameraError(String errorDescription);
+
+ // Called when camera is disconnected.
+ void onCameraDisconnected();
+
+ // Invoked when camera stops receiving frames.
+ void onCameraFreezed(String errorDescription);
+
+ // Callback invoked when camera is opening.
+ void onCameraOpening(String cameraName);
+
+ // Callback invoked when first camera frame is available after camera is started.
+ void onFirstFrameAvailable();
+
+ // Callback invoked when camera is closed.
+ void onCameraClosed();
+ }
+
+ /**
+ * Camera switch handler - one of these functions are invoked with the result of switchCamera().
+ * The callback may be called on an arbitrary thread.
+ */
+ public interface CameraSwitchHandler {
+ // Invoked on success. `isFrontCamera` is true if the new camera is front facing.
+ void onCameraSwitchDone(boolean isFrontCamera);
+
+ // Invoked on failure, e.g. camera is stopped or only one camera available.
+ void onCameraSwitchError(String errorDescription);
+ }
+
+ /**
+ * Switch camera to the next valid camera id. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ void switchCamera(CameraSwitchHandler switchEventsHandler);
+
+ /**
+ * Switch camera to the specified camera id. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ void switchCamera(CameraSwitchHandler switchEventsHandler, String cameraName);
+
+ /**
+ * MediaRecorder add/remove handler - one of these functions are invoked with the result of
+ * addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
+ * The callback may be called on an arbitrary thread.
+ */
+ @Deprecated
+ public interface MediaRecorderHandler {
+ // Invoked on success.
+ void onMediaRecorderSuccess();
+
+ // Invoked on failure, e.g. camera is stopped or any exception happens.
+ void onMediaRecorderError(String errorDescription);
+ }
+
+ /**
+ * Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
+ * Once MediaRecorder is added to camera pipeline camera switch is not allowed.
+ * This function can be called from any thread.
+ */
+ @Deprecated
+ default void addMediaRecorderToCamera(
+ MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) {
+ throw new UnsupportedOperationException("Deprecated and not implemented.");
+ }
+
+ /**
+ * Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ @Deprecated
+ default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) {
+ throw new UnsupportedOperationException("Deprecated and not implemented.");
+ }
+
+ /**
+ * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
+ * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
+ * thread.
+ */
+ public static class CameraStatistics {
+ private final static String TAG = "CameraStatistics";
+ private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+ private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
+
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final CameraEventsHandler eventsHandler;
+ private int frameCount;
+ private int freezePeriodCount;
+ // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+ private final Runnable cameraObserver = new Runnable() {
+ @Override
+ public void run() {
+ final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
+ Logging.d(TAG, "Camera fps: " + cameraFps + ".");
+ if (frameCount == 0) {
+ ++freezePeriodCount;
+ if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
+ && eventsHandler != null) {
+ Logging.e(TAG, "Camera freezed.");
+ if (surfaceTextureHelper.isTextureInUse()) {
+ // This can only happen if we are capturing to textures.
+ eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+ } else {
+ eventsHandler.onCameraFreezed("Camera failure.");
+ }
+ return;
+ }
+ } else {
+ freezePeriodCount = 0;
+ }
+ frameCount = 0;
+ surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+ }
+ };
+
+ public CameraStatistics(
+ SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
+ if (surfaceTextureHelper == null) {
+ throw new IllegalArgumentException("SurfaceTextureHelper is null");
+ }
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.eventsHandler = eventsHandler;
+ this.frameCount = 0;
+ this.freezePeriodCount = 0;
+ surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+ }
+
+ private void checkThread() {
+ if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ public void addFrame() {
+ checkThread();
+ ++frameCount;
+ }
+
+ public void release() {
+ surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CapturerObserver.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CapturerObserver.java
new file mode 100644
index 0000000000..382dc15b3a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CapturerObserver.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for observering a capturer. Passed to {@link VideoCapturer#initialize}. Provided by
+ * {@link VideoSource#getCapturerObserver}.
+ *
+ * All callbacks must be executed on a single thread.
+ */
+public interface CapturerObserver {
+ /** Notify if the capturer have been started successfully or not. */
+ void onCapturerStarted(boolean success);
+ /** Notify that the capturer has been stopped. */
+ void onCapturerStopped();
+
+ /** Delivers a captured frame. */
+ void onFrameCaptured(VideoFrame frame);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/CryptoOptions.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/CryptoOptions.java
new file mode 100644
index 0000000000..6e06bc6426
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/CryptoOptions.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * CryptoOptions defines advanced cryptographic settings for native WebRTC.
+ * These settings must be passed into RTCConfiguration. WebRTC is secur by
+ * default and you should not need to set any of these options unless you are
+ * specifically looking for an additional crypto feature such as AES_GCM
+ * support. This class is the Java binding of native api/crypto/cryptooptions.h
+ */
+public final class CryptoOptions {
+ /**
+ * SRTP Related Peer Connection Options.
+ */
+ public final class Srtp {
+ /**
+ * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
+ * if both sides enable it
+ */
+ private final boolean enableGcmCryptoSuites;
+ /**
+ * If set to true, the (potentially insecure) crypto cipher
+ * kSrtpAes128CmSha1_32 will be included in the list of supported ciphers
+ * during negotiation. It will only be used if both peers support it and no
+ * other ciphers get preferred.
+ */
+ private final boolean enableAes128Sha1_32CryptoCipher;
+ /**
+ * If set to true, encrypted RTP header extensions as defined in RFC 6904
+ * will be negotiated. They will only be used if both peers support them.
+ */
+ private final boolean enableEncryptedRtpHeaderExtensions;
+
+ private Srtp(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+ boolean enableEncryptedRtpHeaderExtensions) {
+ this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+ this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+ this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableGcmCryptoSuites() {
+ return enableGcmCryptoSuites;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableAes128Sha1_32CryptoCipher() {
+ return enableAes128Sha1_32CryptoCipher;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableEncryptedRtpHeaderExtensions() {
+ return enableEncryptedRtpHeaderExtensions;
+ }
+ }
+
+ /**
+ * Options to be used when the FrameEncryptor / FrameDecryptor APIs are used.
+ */
+ public final class SFrame {
+ /**
+ * If set all RtpSenders must have an FrameEncryptor attached to them before
+ * they are allowed to send packets. All RtpReceivers must have a
+ * FrameDecryptor attached to them before they are able to receive packets.
+ */
+ private final boolean requireFrameEncryption;
+
+ private SFrame(boolean requireFrameEncryption) {
+ this.requireFrameEncryption = requireFrameEncryption;
+ }
+
+ @CalledByNative("SFrame")
+ public boolean getRequireFrameEncryption() {
+ return requireFrameEncryption;
+ }
+ }
+
+ private final Srtp srtp;
+ private final SFrame sframe;
+
+ private CryptoOptions(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+ boolean enableEncryptedRtpHeaderExtensions, boolean requireFrameEncryption) {
+ this.srtp = new Srtp(
+ enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher, enableEncryptedRtpHeaderExtensions);
+ this.sframe = new SFrame(requireFrameEncryption);
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ @CalledByNative
+ public Srtp getSrtp() {
+ return srtp;
+ }
+
+ @CalledByNative
+ public SFrame getSFrame() {
+ return sframe;
+ }
+
+ public static class Builder {
+ private boolean enableGcmCryptoSuites;
+ private boolean enableAes128Sha1_32CryptoCipher;
+ private boolean enableEncryptedRtpHeaderExtensions;
+ private boolean requireFrameEncryption;
+
+ private Builder() {}
+
+ public Builder setEnableGcmCryptoSuites(boolean enableGcmCryptoSuites) {
+ this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+ return this;
+ }
+
+ public Builder setEnableAes128Sha1_32CryptoCipher(boolean enableAes128Sha1_32CryptoCipher) {
+ this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+ return this;
+ }
+
+ public Builder setEnableEncryptedRtpHeaderExtensions(
+ boolean enableEncryptedRtpHeaderExtensions) {
+ this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+ return this;
+ }
+
+ public Builder setRequireFrameEncryption(boolean requireFrameEncryption) {
+ this.requireFrameEncryption = requireFrameEncryption;
+ return this;
+ }
+
+ public CryptoOptions createCryptoOptions() {
+ return new CryptoOptions(enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher,
+ enableEncryptedRtpHeaderExtensions, requireFrameEncryption);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/DataChannel.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/DataChannel.java
new file mode 100644
index 0000000000..b9301f1faa
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/DataChannel.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+ /** Java wrapper for WebIDL RTCDataChannel. */
+ public static class Init {
+ public boolean ordered = true;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmitTimeMs = -1;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmits = -1;
+ public String protocol = "";
+ public boolean negotiated;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int id = -1;
+
+ @CalledByNative("Init")
+ boolean getOrdered() {
+ return ordered;
+ }
+
+ @CalledByNative("Init")
+ int getMaxRetransmitTimeMs() {
+ return maxRetransmitTimeMs;
+ }
+
+ @CalledByNative("Init")
+ int getMaxRetransmits() {
+ return maxRetransmits;
+ }
+
+ @CalledByNative("Init")
+ String getProtocol() {
+ return protocol;
+ }
+
+ @CalledByNative("Init")
+ boolean getNegotiated() {
+ return negotiated;
+ }
+
+ @CalledByNative("Init")
+ int getId() {
+ return id;
+ }
+ }
+
+ /** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
+ public static class Buffer {
+ /** The underlying data. */
+ public final ByteBuffer data;
+
+ /**
+ * Indicates whether `data` contains UTF-8 text or "binary data"
+ * (i.e. anything else).
+ */
+ public final boolean binary;
+
+ @CalledByNative("Buffer")
+ public Buffer(ByteBuffer data, boolean binary) {
+ this.data = data;
+ this.binary = binary;
+ }
+ }
+
+ /** Java version of C++ DataChannelObserver. */
+ public interface Observer {
+ /** The data channel's bufferedAmount has changed. */
+ @CalledByNative("Observer") public void onBufferedAmountChange(long previousAmount);
+ /** The data channel state has changed. */
+ @CalledByNative("Observer") public void onStateChange();
+ /**
+ * A data buffer was successfully received. NOTE: `buffer.data` will be
+ * freed once this function returns so callers who want to use the data
+ * asynchronously must make sure to copy it first.
+ */
+ @CalledByNative("Observer") public void onMessage(Buffer buffer);
+ }
+
+ /** Keep in sync with DataChannelInterface::DataState. */
+ public enum State {
+ CONNECTING,
+ OPEN,
+ CLOSING,
+ CLOSED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ private long nativeDataChannel;
+ private long nativeObserver;
+
+ @CalledByNative
+ public DataChannel(long nativeDataChannel) {
+ this.nativeDataChannel = nativeDataChannel;
+ }
+
+ /** Register `observer`, replacing any previously-registered observer. */
+ public void registerObserver(Observer observer) {
+ checkDataChannelExists();
+ if (nativeObserver != 0) {
+ nativeUnregisterObserver(nativeObserver);
+ }
+ nativeObserver = nativeRegisterObserver(observer);
+ }
+
+ /** Unregister the (only) observer. */
+ public void unregisterObserver() {
+ checkDataChannelExists();
+ nativeUnregisterObserver(nativeObserver);
+ nativeObserver = 0;
+ }
+
+ public String label() {
+ checkDataChannelExists();
+ return nativeLabel();
+ }
+
+ public int id() {
+ checkDataChannelExists();
+ return nativeId();
+ }
+
+ public State state() {
+ checkDataChannelExists();
+ return nativeState();
+ }
+
+ /**
+ * Return the number of bytes of application data (UTF-8 text and binary data)
+ * that have been queued using SendBuffer but have not yet been transmitted
+ * to the network.
+ */
+ public long bufferedAmount() {
+ checkDataChannelExists();
+ return nativeBufferedAmount();
+ }
+
+ /** Close the channel. */
+ public void close() {
+ checkDataChannelExists();
+ nativeClose();
+ }
+
+ /** Send `data` to the remote peer; return success. */
+ public boolean send(Buffer buffer) {
+ checkDataChannelExists();
+ // TODO(fischman): this could be cleverer about avoiding copies if the
+ // ByteBuffer is direct and/or is backed by an array.
+ byte[] data = new byte[buffer.data.remaining()];
+ buffer.data.get(data);
+ return nativeSend(data, buffer.binary);
+ }
+
+ /** Dispose of native resources attached to this channel. */
+ public void dispose() {
+ checkDataChannelExists();
+ JniCommon.nativeReleaseRef(nativeDataChannel);
+ nativeDataChannel = 0;
+ }
+
+ @CalledByNative
+ long getNativeDataChannel() {
+ return nativeDataChannel;
+ }
+
+ private void checkDataChannelExists() {
+ if (nativeDataChannel == 0) {
+ throw new IllegalStateException("DataChannel has been disposed.");
+ }
+ }
+
+ private native long nativeRegisterObserver(Observer observer);
+ private native void nativeUnregisterObserver(long observer);
+ private native String nativeLabel();
+ private native int nativeId();
+ private native State nativeState();
+ private native long nativeBufferedAmount();
+ private native void nativeClose();
+ private native boolean nativeSend(byte[] data, boolean binary);
+};
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Dav1dDecoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Dav1dDecoder.java
new file mode 100644
index 0000000000..ecb16bc3a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Dav1dDecoder.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class Dav1dDecoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
new file mode 100644
index 0000000000..d7a8694d3d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+
+/**
+ * Helper class that combines HW and SW decoders.
+ */
+public class DefaultVideoDecoderFactory implements VideoDecoderFactory {
+ private final VideoDecoderFactory hardwareVideoDecoderFactory;
+ private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory();
+ private final @Nullable VideoDecoderFactory platformSoftwareVideoDecoderFactory;
+
+ /**
+ * Create decoder factory using default hardware decoder factory.
+ */
+ public DefaultVideoDecoderFactory(@Nullable EglBase.Context eglContext) {
+ this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext);
+ this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext);
+ }
+
+ /**
+ * Create decoder factory using explicit hardware decoder factory.
+ */
+ DefaultVideoDecoderFactory(VideoDecoderFactory hardwareVideoDecoderFactory) {
+ this.hardwareVideoDecoderFactory = hardwareVideoDecoderFactory;
+ this.platformSoftwareVideoDecoderFactory = null;
+ }
+
+ @Override
+ public @Nullable VideoDecoder createDecoder(VideoCodecInfo codecType) {
+ VideoDecoder softwareDecoder = softwareVideoDecoderFactory.createDecoder(codecType);
+ final VideoDecoder hardwareDecoder = hardwareVideoDecoderFactory.createDecoder(codecType);
+ if (softwareDecoder == null && platformSoftwareVideoDecoderFactory != null) {
+ softwareDecoder = platformSoftwareVideoDecoderFactory.createDecoder(codecType);
+ }
+ if (hardwareDecoder != null && softwareDecoder != null) {
+ // Both hardware and software supported, wrap it in a software fallback
+ return new VideoDecoderFallback(
+ /* fallback= */ softwareDecoder, /* primary= */ hardwareDecoder);
+ }
+ return hardwareDecoder != null ? hardwareDecoder : softwareDecoder;
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<VideoCodecInfo>();
+
+ supportedCodecInfos.addAll(Arrays.asList(softwareVideoDecoderFactory.getSupportedCodecs()));
+ supportedCodecInfos.addAll(Arrays.asList(hardwareVideoDecoderFactory.getSupportedCodecs()));
+ if (platformSoftwareVideoDecoderFactory != null) {
+ supportedCodecInfos.addAll(
+ Arrays.asList(platformSoftwareVideoDecoderFactory.getSupportedCodecs()));
+ }
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/DtmfSender.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/DtmfSender.java
new file mode 100644
index 0000000000..6549823089
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/DtmfSender.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ DtmfSenderInterface. */
+public class DtmfSender {
+ private long nativeDtmfSender;
+
+ public DtmfSender(long nativeDtmfSender) {
+ this.nativeDtmfSender = nativeDtmfSender;
+ }
+
+ /**
+ * @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
+ */
+ public boolean canInsertDtmf() {
+ checkDtmfSenderExists();
+ return nativeCanInsertDtmf(nativeDtmfSender);
+ }
+
+ /**
+ * Queues a task that sends the provided DTMF tones.
+ * <p>
+ * If insertDtmf is called on the same object while an existing task for this
+ * object to generate DTMF is still running, the previous task is canceled.
+ *
+ * @param tones This parameter is treated as a series of characters. The characters 0
+ * through 9, A through D, #, and * generate the associated DTMF tones. The
+ * characters a to d are equivalent to A to D. The character ',' indicates a
+ * delay of 2 seconds before processing the next character in the tones
+ * parameter. Unrecognized characters are ignored.
+ * @param duration Indicates the duration in ms to use for each character passed in the tones
+ * parameter. The duration cannot be more than 6000 or less than 70.
+ * @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
+ * as short as possible.
+ * @return true on success and false on failure.
+ */
+ public boolean insertDtmf(String tones, int duration, int interToneGap) {
+ checkDtmfSenderExists();
+ return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
+ }
+
+ /**
+ * @return The tones remaining to be played out
+ */
+ public String tones() {
+ checkDtmfSenderExists();
+ return nativeTones(nativeDtmfSender);
+ }
+
+ /**
+ * @return The current tone duration value in ms. This value will be the value last set via the
+ * insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
+ */
+ public int duration() {
+ checkDtmfSenderExists();
+ return nativeDuration(nativeDtmfSender);
+ }
+
+ /**
+ * @return The current value of the between-tone gap in ms. This value will be the value last set
+ * via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
+ * called.
+ */
+ public int interToneGap() {
+ checkDtmfSenderExists();
+ return nativeInterToneGap(nativeDtmfSender);
+ }
+
+ public void dispose() {
+ checkDtmfSenderExists();
+ JniCommon.nativeReleaseRef(nativeDtmfSender);
+ nativeDtmfSender = 0;
+ }
+
+ private void checkDtmfSenderExists() {
+ if (nativeDtmfSender == 0) {
+ throw new IllegalStateException("DtmfSender has been disposed.");
+ }
+ }
+
+ private static native boolean nativeCanInsertDtmf(long dtmfSender);
+ private static native boolean nativeInsertDtmf(
+ long dtmfSender, String tones, int duration, int interToneGap);
+ private static native String nativeTones(long dtmfSender);
+ private static native int nativeDuration(long dtmfSender);
+ private static native int nativeInterToneGap(long dtmfSender);
+};
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase.java
new file mode 100644
index 0000000000..64771d004a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase.java
@@ -0,0 +1,255 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import javax.microedition.khronos.egl.EGL10;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+public interface EglBase {
+ // EGL wrapper for an actual EGLContext.
+ public interface Context {
+ public final static long NO_CONTEXT = 0;
+
+ /**
+ * Returns an EGL context that can be used by native code. Returns NO_CONTEXT if the method is
+ * unsupported.
+ *
+ * @note This is currently only supported for EGL 1.4 and not for EGL 1.0.
+ */
+ long getNativeEglContext();
+ }
+
+ // According to the documentation, EGL can be used from multiple threads at the same time if each
+ // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
+ // Therefore, synchronize on this global lock before calling dangerous EGL functions that might
+ // deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+ public static final Object lock = new Object();
+
+ // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+ // This is similar to how GlSurfaceView does:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+ public static final int EGL_OPENGL_ES2_BIT = 4;
+ public static final int EGL_OPENGL_ES3_BIT = 0x40;
+ // Android-specific extension.
+ public static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+ public static ConfigBuilder configBuilder() {
+ return new ConfigBuilder();
+ }
+
+ public static class ConfigBuilder {
+ private int openGlesVersion = 2;
+ private boolean hasAlphaChannel;
+ private boolean supportsPixelBuffer;
+ private boolean isRecordable;
+
+ public ConfigBuilder setOpenGlesVersion(int version) {
+ if (version < 1 || version > 3) {
+ throw new IllegalArgumentException("OpenGL ES version " + version + " not supported");
+ }
+ this.openGlesVersion = version;
+ return this;
+ }
+
+ public ConfigBuilder setHasAlphaChannel(boolean hasAlphaChannel) {
+ this.hasAlphaChannel = hasAlphaChannel;
+ return this;
+ }
+
+ public ConfigBuilder setSupportsPixelBuffer(boolean supportsPixelBuffer) {
+ this.supportsPixelBuffer = supportsPixelBuffer;
+ return this;
+ }
+
+ public ConfigBuilder setIsRecordable(boolean isRecordable) {
+ this.isRecordable = isRecordable;
+ return this;
+ }
+
+ public int[] createConfigAttributes() {
+ ArrayList<Integer> list = new ArrayList<>();
+ list.add(EGL10.EGL_RED_SIZE);
+ list.add(8);
+ list.add(EGL10.EGL_GREEN_SIZE);
+ list.add(8);
+ list.add(EGL10.EGL_BLUE_SIZE);
+ list.add(8);
+ if (hasAlphaChannel) {
+ list.add(EGL10.EGL_ALPHA_SIZE);
+ list.add(8);
+ }
+ if (openGlesVersion == 2 || openGlesVersion == 3) {
+ list.add(EGL10.EGL_RENDERABLE_TYPE);
+ list.add(openGlesVersion == 3 ? EGL_OPENGL_ES3_BIT : EGL_OPENGL_ES2_BIT);
+ }
+ if (supportsPixelBuffer) {
+ list.add(EGL10.EGL_SURFACE_TYPE);
+ list.add(EGL10.EGL_PBUFFER_BIT);
+ }
+ if (isRecordable) {
+ list.add(EGL_RECORDABLE_ANDROID);
+ list.add(1);
+ }
+ list.add(EGL10.EGL_NONE);
+
+ final int[] res = new int[list.size()];
+ for (int i = 0; i < list.size(); ++i) {
+ res[i] = list.get(i);
+ }
+ return res;
+ }
+ }
+
+ public static final int[] CONFIG_PLAIN = configBuilder().createConfigAttributes();
+ public static final int[] CONFIG_RGBA =
+ configBuilder().setHasAlphaChannel(true).createConfigAttributes();
+ public static final int[] CONFIG_PIXEL_BUFFER =
+ configBuilder().setSupportsPixelBuffer(true).createConfigAttributes();
+ public static final int[] CONFIG_PIXEL_RGBA_BUFFER = configBuilder()
+ .setHasAlphaChannel(true)
+ .setSupportsPixelBuffer(true)
+ .createConfigAttributes();
+ public static final int[] CONFIG_RECORDABLE =
+ configBuilder().setIsRecordable(true).createConfigAttributes();
+
+ static int getOpenGlesVersionFromConfig(int[] configAttributes) {
+ for (int i = 0; i < configAttributes.length - 1; ++i) {
+ if (configAttributes[i] == EGL10.EGL_RENDERABLE_TYPE) {
+ switch (configAttributes[i + 1]) {
+ case EGL_OPENGL_ES2_BIT:
+ return 2;
+ case EGL_OPENGL_ES3_BIT:
+ return 3;
+ default:
+ return 1;
+ }
+ }
+ }
+ // Default to V1 if no renderable type is specified.
+ return 1;
+ }
+
+ /**
+ * Create a new context with the specified config attributes, sharing data with `sharedContext`.
+ * If `sharedContext` is null, a root EGL 1.4 context is created.
+ */
+ public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
+ if (sharedContext == null) {
+ return createEgl14(configAttributes);
+ } else if (sharedContext instanceof EglBase14.Context) {
+ return createEgl14((EglBase14.Context) sharedContext, configAttributes);
+ } else if (sharedContext instanceof EglBase10.Context) {
+ return createEgl10((EglBase10.Context) sharedContext, configAttributes);
+ }
+ throw new IllegalArgumentException("Unrecognized Context");
+ }
+
+ /**
+ * Helper function for creating a plain root context. This function will try to create an EGL 1.4
+ * context if possible, and an EGL 1.0 context otherwise.
+ */
+ public static EglBase create() {
+ return create(null /* shaderContext */, CONFIG_PLAIN);
+ }
+
+ /**
+ * Helper function for creating a plain context, sharing data with `sharedContext`. This function
+ * will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
+ */
+ public static EglBase create(Context sharedContext) {
+ return create(sharedContext, CONFIG_PLAIN);
+ }
+
+ /** Explicitly create a root EGl 1.0 context with the specified config attributes. */
+ public static EglBase10 createEgl10(int[] configAttributes) {
+ return new EglBase10Impl(/* sharedContext= */ null, configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.0 context with the specified config attributes and shared
+ * context.
+ */
+ public static EglBase10 createEgl10(EglBase10.Context sharedContext, int[] configAttributes) {
+ return new EglBase10Impl(
+ sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.0 context with the specified config attributes
+ * and shared context.
+ */
+ public static EglBase10 createEgl10(
+ javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
+ return new EglBase10Impl(sharedContext, configAttributes);
+ }
+
+ /** Explicitly create a root EGl 1.4 context with the specified config attributes. */
+ public static EglBase14 createEgl14(int[] configAttributes) {
+ return new EglBase14Impl(/* sharedContext= */ null, configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.4 context with the specified config attributes and shared
+ * context.
+ */
+ public static EglBase14 createEgl14(EglBase14.Context sharedContext, int[] configAttributes) {
+ return new EglBase14Impl(
+ sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.4 context with the specified config attributes
+ * and shared context.
+ */
+ public static EglBase14 createEgl14(
+ android.opengl.EGLContext sharedContext, int[] configAttributes) {
+ return new EglBase14Impl(sharedContext, configAttributes);
+ }
+
+ void createSurface(Surface surface);
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ void createSurface(SurfaceTexture surfaceTexture);
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ void createDummyPbufferSurface();
+
+ void createPbufferSurface(int width, int height);
+
+ Context getEglBaseContext();
+
+ boolean hasSurface();
+
+ int surfaceWidth();
+
+ int surfaceHeight();
+
+ void releaseSurface();
+
+ void release();
+
+ void makeCurrent();
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ void detachCurrent();
+
+ void swapBuffers();
+
+ void swapBuffers(long presentationTimeStampNs);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase10.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase10.java
new file mode 100644
index 0000000000..f8b0a3c0d0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase10.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/** EGL 1.0 implementation of EglBase. */
+public interface EglBase10 extends EglBase {
+ interface Context extends EglBase.Context {
+ EGLContext getRawContext();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase14.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase14.java
new file mode 100644
index 0000000000..69c89c44dc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglBase14.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.EGLContext;
+
+/** EGL 1.4 implementation of EglBase. */
+public interface EglBase14 extends EglBase {
+ interface Context extends EglBase.Context {
+ EGLContext getRawContext();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EglRenderer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglRenderer.java
new file mode 100644
index 0000000000..5ab0868ef3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EglRenderer.java
@@ -0,0 +1,787 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Bitmap;
+import android.graphics.Matrix;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.Message;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to
+ * be used as a helper class for rendering on SurfaceViews and TextureViews.
+ */
+public class EglRenderer implements VideoSink {
+ private static final String TAG = "EglRenderer";
+ private static final long LOG_INTERVAL_SEC = 4;
+
+ public interface FrameListener { void onFrame(Bitmap frame); }
+
+ /** Callback for clients to be notified about errors encountered during rendering. */
+ public static interface ErrorCallback {
+ /** Called if GLES20.GL_OUT_OF_MEMORY is encountered during rendering. */
+ void onGlOutOfMemory();
+ }
+
+ private static class FrameListenerAndParams {
+ public final FrameListener listener;
+ public final float scale;
+ public final RendererCommon.GlDrawer drawer;
+ public final boolean applyFpsReduction;
+
+ public FrameListenerAndParams(FrameListener listener, float scale,
+ RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
+ this.listener = listener;
+ this.scale = scale;
+ this.drawer = drawer;
+ this.applyFpsReduction = applyFpsReduction;
+ }
+ }
+
+ private class EglSurfaceCreation implements Runnable {
+ private Object surface;
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void setSurface(Object surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void run() {
+ if (surface != null && eglBase != null && !eglBase.hasSurface()) {
+ if (surface instanceof Surface) {
+ eglBase.createSurface((Surface) surface);
+ } else if (surface instanceof SurfaceTexture) {
+ eglBase.createSurface((SurfaceTexture) surface);
+ } else {
+ throw new IllegalStateException("Invalid surface: " + surface);
+ }
+ eglBase.makeCurrent();
+ // Necessary for YUV frames with odd width.
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ }
+ }
+ }
+
+ /**
+ * Handler that triggers a callback when an uncaught exception happens when handling a message.
+ */
+ private static class HandlerWithExceptionCallback extends Handler {
+ private final Runnable exceptionCallback;
+
+ public HandlerWithExceptionCallback(Looper looper, Runnable exceptionCallback) {
+ super(looper);
+ this.exceptionCallback = exceptionCallback;
+ }
+
+ @Override
+ public void dispatchMessage(Message msg) {
+ try {
+ super.dispatchMessage(msg);
+ } catch (Exception e) {
+ Logging.e(TAG, "Exception on EglRenderer thread", e);
+ exceptionCallback.run();
+ throw e;
+ }
+ }
+ }
+
+ protected final String name;
+
+ // `renderThreadHandler` is a handler for communicating with `renderThread`, and is synchronized
+ // on `handlerLock`.
+ private final Object handlerLock = new Object();
+ @Nullable private Handler renderThreadHandler;
+
+ private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
+
+ private volatile ErrorCallback errorCallback;
+
+ // Variables for fps reduction.
+ private final Object fpsReductionLock = new Object();
+ // Time for when next frame should be rendered.
+ private long nextFrameTimeNs;
+ // Minimum duration between frames when fps reduction is active, or -1 if video is completely
+ // paused.
+ private long minRenderPeriodNs;
+
+ // EGL and GL resources for drawing YUV/OES textures. After initialization, these are only
+ // accessed from the render thread.
+ @Nullable private EglBase eglBase;
+ private final VideoFrameDrawer frameDrawer;
+ @Nullable private RendererCommon.GlDrawer drawer;
+ private boolean usePresentationTimeStamp;
+ private final Matrix drawMatrix = new Matrix();
+
+ // Pending frame to render. Serves as a queue with size 1. Synchronized on `frameLock`.
+ private final Object frameLock = new Object();
+ @Nullable private VideoFrame pendingFrame;
+
+ // These variables are synchronized on `layoutLock`.
+ private final Object layoutLock = new Object();
+ private float layoutAspectRatio;
+ // If true, mirrors the video stream horizontally.
+ private boolean mirrorHorizontally;
+ // If true, mirrors the video stream vertically.
+ private boolean mirrorVertically;
+
+ // These variables are synchronized on `statisticsLock`.
+ private final Object statisticsLock = new Object();
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+ // yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Start time for counting these statistics, or 0 if we haven't started measuring yet.
+ private long statisticsStartTimeNs;
+ // Time in ns spent in renderFrameOnRenderThread() function.
+ private long renderTimeNs;
+ // Time in ns spent by the render thread in the swapBuffers() function.
+ private long renderSwapBufferTimeNs;
+
+ // Used for bitmap capturing.
+ private final GlTextureFrameBuffer bitmapTextureFramebuffer =
+ new GlTextureFrameBuffer(GLES20.GL_RGBA);
+
+ private final Runnable logStatisticsRunnable = new Runnable() {
+ @Override
+ public void run() {
+ logStatistics();
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.removeCallbacks(logStatisticsRunnable);
+ renderThreadHandler.postDelayed(
+ logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+ }
+ }
+ }
+ };
+
+ private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
+
+ /**
+ * Standard constructor. The name will be used for the render thread name and included when
+ * logging. In order to render something, you must first call init() and createEglSurface.
+ */
+ public EglRenderer(String name) {
+ this(name, new VideoFrameDrawer());
+ }
+
+ public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) {
+ this.name = name;
+ this.frameDrawer = videoFrameDrawer;
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be
+ * set with the frame timestamps, which specifies desired presentation time and might be useful
+ * for e.g. syncing audio and video.
+ */
+ public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ throw new IllegalStateException(name + "Already initialized");
+ }
+ logD("Initializing EglRenderer");
+ this.drawer = drawer;
+ this.usePresentationTimeStamp = usePresentationTimeStamp;
+
+ final HandlerThread renderThread = new HandlerThread(name + "EglRenderer");
+ renderThread.start();
+ renderThreadHandler =
+ new HandlerWithExceptionCallback(renderThread.getLooper(), new Runnable() {
+ @Override
+ public void run() {
+ synchronized (handlerLock) {
+ renderThreadHandler = null;
+ }
+ }
+ });
+ // Create EGL context on the newly created render thread. It should be possibly to create the
+ // context on this thread and make it current on the render thread, but this causes failure on
+ // some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350.
+ ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
+ // If sharedContext is null, then texture frames are disabled. This is typically for old
+ // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
+ // caused trouble on some weird devices.
+ if (sharedContext == null) {
+ logD("EglBase10.create context");
+ eglBase = EglBase.createEgl10(configAttributes);
+ } else {
+ logD("EglBase.create shared context");
+ eglBase = EglBase.create(sharedContext, configAttributes);
+ }
+ });
+ renderThreadHandler.post(eglSurfaceCreationRunnable);
+ final long currentTimeNs = System.nanoTime();
+ resetStatistics(currentTimeNs);
+ renderThreadHandler.postDelayed(
+ logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+ }
+ }
+
+ /**
+ * Same as above with usePresentationTimeStamp set to false.
+ *
+ * @see #init(EglBase.Context, int[], RendererCommon.GlDrawer, boolean)
+ */
+ public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ init(sharedContext, configAttributes, drawer, /* usePresentationTimeStamp= */ false);
+ }
+
+ public void createEglSurface(Surface surface) {
+ createEglSurfaceInternal(surface);
+ }
+
+ public void createEglSurface(SurfaceTexture surfaceTexture) {
+ createEglSurfaceInternal(surfaceTexture);
+ }
+
+ private void createEglSurfaceInternal(Object surface) {
+ eglSurfaceCreationRunnable.setSurface(surface);
+ postToRenderThread(eglSurfaceCreationRunnable);
+ }
+
+ /**
+ * Block until any pending frame is returned and all GL resources released, even if an interrupt
+ * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+ * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+ * don't call this function, the GL resources might leak.
+ */
+ public void release() {
+ logD("Releasing.");
+ final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ logD("Already released");
+ return;
+ }
+ renderThreadHandler.removeCallbacks(logStatisticsRunnable);
+ // Release EGL and GL resources on render thread.
+ renderThreadHandler.postAtFrontOfQueue(() -> {
+ // Detach current shader program.
+ synchronized (EglBase.lock) {
+ GLES20.glUseProgram(/* program= */ 0);
+ }
+ if (drawer != null) {
+ drawer.release();
+ drawer = null;
+ }
+ frameDrawer.release();
+ bitmapTextureFramebuffer.release();
+ if (eglBase != null) {
+ logD("eglBase detach and release.");
+ eglBase.detachCurrent();
+ eglBase.release();
+ eglBase = null;
+ }
+ frameListeners.clear();
+ eglCleanupBarrier.countDown();
+ });
+ final Looper renderLooper = renderThreadHandler.getLooper();
+ // TODO(magjed): Replace this post() with renderLooper.quitSafely() when API support >= 18.
+ renderThreadHandler.post(() -> {
+ logD("Quitting render thread.");
+ renderLooper.quit();
+ });
+ // Don't accept any more frames or messages to the render thread.
+ renderThreadHandler = null;
+ }
+ // Make sure the EGL/GL cleanup posted above is executed.
+ ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ pendingFrame.release();
+ pendingFrame = null;
+ }
+ }
+ logD("Releasing done.");
+ }
+
+ /**
+ * Reset the statistics logged in logStatistics().
+ */
+ private void resetStatistics(long currentTimeNs) {
+ synchronized (statisticsLock) {
+ statisticsStartTimeNs = currentTimeNs;
+ framesReceived = 0;
+ framesDropped = 0;
+ framesRendered = 0;
+ renderTimeNs = 0;
+ renderSwapBufferTimeNs = 0;
+ }
+ }
+
+ public void printStackTrace() {
+ synchronized (handlerLock) {
+ final Thread renderThread =
+ (renderThreadHandler == null) ? null : renderThreadHandler.getLooper().getThread();
+ if (renderThread != null) {
+ final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
+ if (renderStackTrace.length > 0) {
+ logW("EglRenderer stack trace:");
+ for (StackTraceElement traceElem : renderStackTrace) {
+ logW(traceElem.toString());
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored horizontally or not.
+ */
+ public void setMirror(final boolean mirror) {
+ logD("setMirrorHorizontally: " + mirror);
+ synchronized (layoutLock) {
+ this.mirrorHorizontally = mirror;
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored vertically or not.
+ */
+ public void setMirrorVertically(final boolean mirrorVertically) {
+ logD("setMirrorVertically: " + mirrorVertically);
+ synchronized (layoutLock) {
+ this.mirrorVertically = mirrorVertically;
+ }
+ }
+
+ /**
+ * Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
+ * Set this to 0 to disable cropping.
+ */
+ public void setLayoutAspectRatio(float layoutAspectRatio) {
+ logD("setLayoutAspectRatio: " + layoutAspectRatio);
+ synchronized (layoutLock) {
+ this.layoutAspectRatio = layoutAspectRatio;
+ }
+ }
+
+ /**
+ * Limit render framerate.
+ *
+ * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+ * reduction.
+ */
+ public void setFpsReduction(float fps) {
+ logD("setFpsReduction: " + fps);
+ synchronized (fpsReductionLock) {
+ final long previousRenderPeriodNs = minRenderPeriodNs;
+ if (fps <= 0) {
+ minRenderPeriodNs = Long.MAX_VALUE;
+ } else {
+ minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
+ }
+ if (minRenderPeriodNs != previousRenderPeriodNs) {
+ // Fps reduction changed - reset frame time.
+ nextFrameTimeNs = System.nanoTime();
+ }
+ }
+ }
+
+ public void disableFpsReduction() {
+ setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
+ }
+
+ public void pauseVideo() {
+ setFpsReduction(0 /* fps */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received. This version uses
+ * the drawer of the EglRenderer that was passed in init.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ */
+ public void addFrameListener(final FrameListener listener, final float scale) {
+ addFrameListener(listener, scale, null, false /* applyFpsReduction */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ * @param drawer Custom drawer to use for this frame listener or null to use the default one.
+ */
+ public void addFrameListener(
+ final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
+ addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ * @param drawer Custom drawer to use for this frame listener or null to use the default one.
+ * @param applyFpsReduction This callback will not be called for frames that have been dropped by
+ * FPS reduction.
+ */
+ public void addFrameListener(final FrameListener listener, final float scale,
+ @Nullable final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
+ postToRenderThread(() -> {
+ final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
+ frameListeners.add(
+ new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
+ });
+ }
+
+ /**
+ * Remove any pending callback that was added with addFrameListener. If the callback is not in
+ * the queue, nothing happens. It is ensured that callback won't be called after this method
+ * returns.
+ *
+ * @param runnable The callback to remove.
+ */
+ public void removeFrameListener(final FrameListener listener) {
+ final CountDownLatch latch = new CountDownLatch(1);
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ return;
+ }
+ if (Thread.currentThread() == renderThreadHandler.getLooper().getThread()) {
+ throw new RuntimeException("removeFrameListener must not be called on the render thread.");
+ }
+ postToRenderThread(() -> {
+ latch.countDown();
+ final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
+ while (iter.hasNext()) {
+ if (iter.next().listener == listener) {
+ iter.remove();
+ }
+ }
+ });
+ }
+ ThreadUtils.awaitUninterruptibly(latch);
+ }
+
+ /** Can be set in order to be notified about errors encountered during rendering. */
+ public void setErrorCallback(ErrorCallback errorCallback) {
+ this.errorCallback = errorCallback;
+ }
+
+ // VideoSink interface.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ synchronized (statisticsLock) {
+ ++framesReceived;
+ }
+ final boolean dropOldFrame;
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ logD("Dropping frame - Not initialized or already released.");
+ return;
+ }
+ synchronized (frameLock) {
+ dropOldFrame = (pendingFrame != null);
+ if (dropOldFrame) {
+ pendingFrame.release();
+ }
+ pendingFrame = frame;
+ pendingFrame.retain();
+ renderThreadHandler.post(this ::renderFrameOnRenderThread);
+ }
+ }
+ if (dropOldFrame) {
+ synchronized (statisticsLock) {
+ ++framesDropped;
+ }
+ }
+ }
+
+ /**
+ * Release EGL surface. This function will block until the EGL surface is released.
+ */
+ public void releaseEglSurface(final Runnable completionCallback) {
+ // Ensure that the render thread is no longer touching the Surface before returning from this
+ // function.
+ eglSurfaceCreationRunnable.setSurface(null /* surface */);
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.removeCallbacks(eglSurfaceCreationRunnable);
+ renderThreadHandler.postAtFrontOfQueue(() -> {
+ if (eglBase != null) {
+ eglBase.detachCurrent();
+ eglBase.releaseSurface();
+ }
+ completionCallback.run();
+ });
+ return;
+ }
+ }
+ completionCallback.run();
+ }
+
+ /**
+ * Private helper function to post tasks safely.
+ */
+ private void postToRenderThread(Runnable runnable) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.post(runnable);
+ }
+ }
+ }
+
+ private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
+ if (eglBase != null && eglBase.hasSurface()) {
+ logD("clearSurface");
+ GLES20.glClearColor(r, g, b, a);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ }
+
+ /**
+ * Post a task to clear the surface to a transparent uniform color.
+ */
+ public void clearImage() {
+ clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ }
+
+ /**
+ * Post a task to clear the surface to a specific color.
+ */
+ public void clearImage(final float r, final float g, final float b, final float a) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ return;
+ }
+ renderThreadHandler.postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
+ }
+ }
+
+ /**
+ * Renders and releases `pendingFrame`.
+ */
+ private void renderFrameOnRenderThread() {
+ // Fetch and render `pendingFrame`.
+ final VideoFrame frame;
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ return;
+ }
+ frame = pendingFrame;
+ pendingFrame = null;
+ }
+ if (eglBase == null || !eglBase.hasSurface()) {
+ logD("Dropping frame - No surface");
+ frame.release();
+ return;
+ }
+ // Check if fps reduction is active.
+ final boolean shouldRenderFrame;
+ synchronized (fpsReductionLock) {
+ if (minRenderPeriodNs == Long.MAX_VALUE) {
+ // Rendering is paused.
+ shouldRenderFrame = false;
+ } else if (minRenderPeriodNs <= 0) {
+ // FPS reduction is disabled.
+ shouldRenderFrame = true;
+ } else {
+ final long currentTimeNs = System.nanoTime();
+ if (currentTimeNs < nextFrameTimeNs) {
+ logD("Skipping frame rendering - fps reduction is active.");
+ shouldRenderFrame = false;
+ } else {
+ nextFrameTimeNs += minRenderPeriodNs;
+ // The time for the next frame should always be in the future.
+ nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
+ shouldRenderFrame = true;
+ }
+ }
+ }
+
+ final long startTimeNs = System.nanoTime();
+
+ final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
+ final float drawnAspectRatio;
+ synchronized (layoutLock) {
+ drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
+ }
+
+ final float scaleX;
+ final float scaleY;
+
+ if (frameAspectRatio > drawnAspectRatio) {
+ scaleX = drawnAspectRatio / frameAspectRatio;
+ scaleY = 1f;
+ } else {
+ scaleX = 1f;
+ scaleY = frameAspectRatio / drawnAspectRatio;
+ }
+
+ drawMatrix.reset();
+ drawMatrix.preTranslate(0.5f, 0.5f);
+ drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+ drawMatrix.preScale(scaleX, scaleY);
+ drawMatrix.preTranslate(-0.5f, -0.5f);
+
+ try {
+ if (shouldRenderFrame) {
+ GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
+ eglBase.surfaceWidth(), eglBase.surfaceHeight());
+
+ final long swapBuffersStartTimeNs = System.nanoTime();
+ if (usePresentationTimeStamp) {
+ eglBase.swapBuffers(frame.getTimestampNs());
+ } else {
+ eglBase.swapBuffers();
+ }
+
+ final long currentTimeNs = System.nanoTime();
+ synchronized (statisticsLock) {
+ ++framesRendered;
+ renderTimeNs += (currentTimeNs - startTimeNs);
+ renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
+ }
+ }
+
+ notifyCallbacks(frame, shouldRenderFrame);
+ } catch (GlUtil.GlOutOfMemoryException e) {
+ logE("Error while drawing frame", e);
+ final ErrorCallback errorCallback = this.errorCallback;
+ if (errorCallback != null) {
+ errorCallback.onGlOutOfMemory();
+ }
+ // Attempt to free up some resources.
+ drawer.release();
+ frameDrawer.release();
+ bitmapTextureFramebuffer.release();
+ // Continue here on purpose and retry again for next frame. In worst case, this is a continous
+ // problem and no more frames will be drawn.
+ } finally {
+ frame.release();
+ }
+ }
+
+ private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
+ if (frameListeners.isEmpty())
+ return;
+
+ drawMatrix.reset();
+ drawMatrix.preTranslate(0.5f, 0.5f);
+ drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+ drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
+ drawMatrix.preTranslate(-0.5f, -0.5f);
+
+ Iterator<FrameListenerAndParams> it = frameListeners.iterator();
+ while (it.hasNext()) {
+ FrameListenerAndParams listenerAndParams = it.next();
+ if (!wasRendered && listenerAndParams.applyFpsReduction) {
+ continue;
+ }
+ it.remove();
+
+ final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
+ final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
+
+ if (scaledWidth == 0 || scaledHeight == 0) {
+ listenerAndParams.listener.onFrame(null);
+ continue;
+ }
+
+ bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+ GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
+
+ GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
+ 0 /* viewportY */, scaledWidth, scaledHeight);
+
+ final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
+ GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
+ GLES20.glReadPixels(
+ 0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
+
+ final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(bitmapBuffer);
+ listenerAndParams.listener.onFrame(bitmap);
+ }
+ }
+
+ private String averageTimeAsString(long sumTimeNs, int count) {
+ return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " us";
+ }
+
+ private void logStatistics() {
+ final DecimalFormat fpsFormat = new DecimalFormat("#.0");
+ final long currentTimeNs = System.nanoTime();
+ synchronized (statisticsLock) {
+ final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
+ if (elapsedTimeNs <= 0 || (minRenderPeriodNs == Long.MAX_VALUE && framesReceived == 0)) {
+ return;
+ }
+ final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
+ logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
+ + " Frames received: " + framesReceived + "."
+ + " Dropped: " + framesDropped + "."
+ + " Rendered: " + framesRendered + "."
+ + " Render fps: " + fpsFormat.format(renderFps) + "."
+ + " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
+ + " Average swapBuffer time: "
+ + averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
+ resetStatistics(currentTimeNs);
+ }
+ }
+
+ private void logE(String string, Throwable e) {
+ Logging.e(TAG, name + string, e);
+ }
+
+ private void logD(String string) {
+ Logging.d(TAG, name + string);
+ }
+
+ private void logW(String string) {
+ Logging.w(TAG, name + string);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/EncodedImage.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/EncodedImage.java
new file mode 100644
index 0000000000..a6eef67da8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/EncodedImage.java
@@ -0,0 +1,183 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * An encoded frame from a video stream. Used as an input for decoders and as an output for
+ * encoders.
+ */
+public class EncodedImage implements RefCounted {
+ // Must be kept in sync with common_types.h FrameType.
+ public enum FrameType {
+ EmptyFrame(0),
+ VideoFrameKey(3),
+ VideoFrameDelta(4);
+
+ private final int nativeIndex;
+
+ private FrameType(int nativeIndex) {
+ this.nativeIndex = nativeIndex;
+ }
+
+ public int getNative() {
+ return nativeIndex;
+ }
+
+ @CalledByNative("FrameType")
+ static FrameType fromNativeIndex(int nativeIndex) {
+ for (FrameType type : FrameType.values()) {
+ if (type.getNative() == nativeIndex) {
+ return type;
+ }
+ }
+ throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
+ }
+ }
+
+ private final RefCountDelegate refCountDelegate;
+ public final ByteBuffer buffer;
+ public final int encodedWidth;
+ public final int encodedHeight;
+ public final long captureTimeMs; // Deprecated
+ public final long captureTimeNs;
+ public final FrameType frameType;
+ public final int rotation;
+ public final @Nullable Integer qp;
+
+ // TODO(bugs.webrtc.org/9378): Use retain and release from jni code.
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @CalledByNative
+ private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int encodedWidth,
+ int encodedHeight, long captureTimeNs, FrameType frameType, int rotation,
+ @Nullable Integer qp) {
+ this.buffer = buffer;
+ this.encodedWidth = encodedWidth;
+ this.encodedHeight = encodedHeight;
+ this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
+ this.captureTimeNs = captureTimeNs;
+ this.frameType = frameType;
+ this.rotation = rotation;
+ this.qp = qp;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @CalledByNative
+ private ByteBuffer getBuffer() {
+ return buffer;
+ }
+
+ @CalledByNative
+ private int getEncodedWidth() {
+ return encodedWidth;
+ }
+
+ @CalledByNative
+ private int getEncodedHeight() {
+ return encodedHeight;
+ }
+
+ @CalledByNative
+ private long getCaptureTimeNs() {
+ return captureTimeNs;
+ }
+
+ @CalledByNative
+ private int getFrameType() {
+ return frameType.getNative();
+ }
+
+ @CalledByNative
+ private int getRotation() {
+ return rotation;
+ }
+
+ @CalledByNative
+ private @Nullable Integer getQp() {
+ return qp;
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ public static class Builder {
+ private ByteBuffer buffer;
+ private @Nullable Runnable releaseCallback;
+ private int encodedWidth;
+ private int encodedHeight;
+ private long captureTimeNs;
+ private EncodedImage.FrameType frameType;
+ private int rotation;
+ private @Nullable Integer qp;
+
+ private Builder() {}
+
+ public Builder setBuffer(ByteBuffer buffer, @Nullable Runnable releaseCallback) {
+ this.buffer = buffer;
+ this.releaseCallback = releaseCallback;
+ return this;
+ }
+
+ public Builder setEncodedWidth(int encodedWidth) {
+ this.encodedWidth = encodedWidth;
+ return this;
+ }
+
+ public Builder setEncodedHeight(int encodedHeight) {
+ this.encodedHeight = encodedHeight;
+ return this;
+ }
+
+ @Deprecated
+ public Builder setCaptureTimeMs(long captureTimeMs) {
+ this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
+ return this;
+ }
+
+ public Builder setCaptureTimeNs(long captureTimeNs) {
+ this.captureTimeNs = captureTimeNs;
+ return this;
+ }
+
+ public Builder setFrameType(EncodedImage.FrameType frameType) {
+ this.frameType = frameType;
+ return this;
+ }
+
+ public Builder setRotation(int rotation) {
+ this.rotation = rotation;
+ return this;
+ }
+
+ public Builder setQp(@Nullable Integer qp) {
+ this.qp = qp;
+ return this;
+ }
+
+ public EncodedImage createEncodedImage() {
+ return new EncodedImage(buffer, releaseCallback, encodedWidth, encodedHeight, captureTimeNs,
+ frameType, rotation, qp);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java
new file mode 100644
index 0000000000..6d39390f72
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Factory for creating webrtc::FecControllerFactory instances.
+ */
+public interface FecControllerFactoryFactoryInterface {
+ /**
+ * Dynamically allocates a webrtc::FecControllerFactory instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ public long createNative();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java
new file mode 100644
index 0000000000..8270367970
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/FileVideoCapturer.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.Charset;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.TimeUnit;
+
+public class FileVideoCapturer implements VideoCapturer {
+ private interface VideoReader {
+ VideoFrame getNextFrame();
+ void close();
+ }
+
+ /**
+ * Read video data from file for the .y4m container.
+ */
+ @SuppressWarnings("StringSplitter")
+ private static class VideoReaderY4M implements VideoReader {
+ private static final String TAG = "VideoReaderY4M";
+ private static final String Y4M_FRAME_DELIMETER = "FRAME";
+ private static final int FRAME_DELIMETER_LENGTH = Y4M_FRAME_DELIMETER.length() + 1;
+
+ private final int frameWidth;
+ private final int frameHeight;
+ // First char after header
+ private final long videoStart;
+ private final RandomAccessFile mediaFile;
+ private final FileChannel mediaFileChannel;
+
+ public VideoReaderY4M(String file) throws IOException {
+ mediaFile = new RandomAccessFile(file, "r");
+ mediaFileChannel = mediaFile.getChannel();
+ StringBuilder builder = new StringBuilder();
+ for (;;) {
+ int c = mediaFile.read();
+ if (c == -1) {
+ // End of file reached.
+ throw new RuntimeException("Found end of file before end of header for file: " + file);
+ }
+ if (c == '\n') {
+ // End of header found.
+ break;
+ }
+ builder.append((char) c);
+ }
+ videoStart = mediaFileChannel.position();
+ String header = builder.toString();
+ String[] headerTokens = header.split("[ ]");
+ int w = 0;
+ int h = 0;
+ String colorSpace = "";
+ for (String tok : headerTokens) {
+ char c = tok.charAt(0);
+ switch (c) {
+ case 'W':
+ w = Integer.parseInt(tok.substring(1));
+ break;
+ case 'H':
+ h = Integer.parseInt(tok.substring(1));
+ break;
+ case 'C':
+ colorSpace = tok.substring(1);
+ break;
+ }
+ }
+ Logging.d(TAG, "Color space: " + colorSpace);
+ if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
+ throw new IllegalArgumentException(
+ "Does not support any other color space than I420 or I420mpeg2");
+ }
+ if ((w % 2) == 1 || (h % 2) == 1) {
+ throw new IllegalArgumentException("Does not support odd width or height");
+ }
+ frameWidth = w;
+ frameHeight = h;
+ Logging.d(TAG, "frame dim: (" + w + ", " + h + ")");
+ }
+
+ @Override
+ public VideoFrame getNextFrame() {
+ final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+ final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
+ final ByteBuffer dataY = buffer.getDataY();
+ final ByteBuffer dataU = buffer.getDataU();
+ final ByteBuffer dataV = buffer.getDataV();
+ final int chromaHeight = (frameHeight + 1) / 2;
+ final int sizeY = frameHeight * buffer.getStrideY();
+ final int sizeU = chromaHeight * buffer.getStrideU();
+ final int sizeV = chromaHeight * buffer.getStrideV();
+
+ try {
+ ByteBuffer frameDelim = ByteBuffer.allocate(FRAME_DELIMETER_LENGTH);
+ if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+ // We reach end of file, loop
+ mediaFileChannel.position(videoStart);
+ if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+ throw new RuntimeException("Error looping video");
+ }
+ }
+ String frameDelimStr = new String(frameDelim.array(), Charset.forName("US-ASCII"));
+ if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
+ throw new RuntimeException(
+ "Frames should be delimited by FRAME plus newline, found delimter was: '"
+ + frameDelimStr + "'");
+ }
+
+ mediaFileChannel.read(dataY);
+ mediaFileChannel.read(dataU);
+ mediaFileChannel.read(dataV);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs);
+ }
+
+ @Override
+ public void close() {
+ try {
+ // Closing a file also closes the channel.
+ mediaFile.close();
+ } catch (IOException e) {
+ Logging.e(TAG, "Problem closing file", e);
+ }
+ }
+ }
+
+ private final static String TAG = "FileVideoCapturer";
+ private final VideoReader videoReader;
+ private CapturerObserver capturerObserver;
+ private final Timer timer = new Timer();
+
+ private final TimerTask tickTask = new TimerTask() {
+ @Override
+ public void run() {
+ tick();
+ }
+ };
+
+ public FileVideoCapturer(String inputFile) throws IOException {
+ try {
+ videoReader = new VideoReaderY4M(inputFile);
+ } catch (IOException e) {
+ Logging.d(TAG, "Could not open video file: " + inputFile);
+ throw e;
+ }
+ }
+
+ public void tick() {
+ VideoFrame videoFrame = videoReader.getNextFrame();
+ capturerObserver.onFrameCaptured(videoFrame);
+ videoFrame.release();
+ }
+
+ @Override
+ public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ CapturerObserver capturerObserver) {
+ this.capturerObserver = capturerObserver;
+ }
+
+ @Override
+ public void startCapture(int width, int height, int framerate) {
+ timer.schedule(tickTask, 0, 1000 / framerate);
+ }
+
+ @Override
+ public void stopCapture() throws InterruptedException {
+ timer.cancel();
+ }
+
+ @Override
+ public void changeCaptureFormat(int width, int height, int framerate) {
+ // Empty on purpose
+ }
+
+ @Override
+ public void dispose() {
+ videoReader.close();
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameDecryptor.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameDecryptor.java
new file mode 100644
index 0000000000..2932f3d94a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameDecryptor.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameDecryptor interface allows Java API users to provide a
+ * pointer to their native implementation of the FrameDecryptorInterface.
+ * FrameDecryptors are extremely performance sensitive as they must process all
+ * incoming video and audio frames. Due to this reason they should always be
+ * backed by a native implementation
+ * @note Not ready for production use.
+ */
+public interface FrameDecryptor {
+ /**
+ * @return A FrameDecryptorInterface pointer.
+ */
+ long getNativeFrameDecryptor();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameEncryptor.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameEncryptor.java
new file mode 100644
index 0000000000..bc81223f21
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/FrameEncryptor.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameEncryptor interface allows Java API users to provide a pointer to
+ * their native implementation of the FrameEncryptorInterface.
+ * FrameEncyptors are extremely performance sensitive as they must process all
+ * outgoing video and audio frames. Due to this reason they should always be
+ * backed by a native implementation.
+ * @note Not ready for production use.
+ */
+public interface FrameEncryptor {
+ /**
+ * @return A FrameEncryptorInterface pointer.
+ */
+ long getNativeFrameEncryptor();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/GlRectDrawer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlRectDrawer.java
new file mode 100644
index 0000000000..d1fbd1b7bc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlRectDrawer.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Simplest possible GL shader that just draws frames as opaque quads. */
+public class GlRectDrawer extends GlGenericDrawer {
+ private static final String FRAGMENT_SHADER = "void main() {\n"
+ + " gl_FragColor = sample(tc);\n"
+ + "}\n";
+
+ private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks {
+ @Override
+ public void onNewShader(GlShader shader) {}
+
+ @Override
+ public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight) {}
+ }
+
+ public GlRectDrawer() {
+ super(FRAGMENT_SHADER, new ShaderCallbacks());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/GlShader.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlShader.java
new file mode 100644
index 0000000000..7efd8d3a95
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlShader.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.FloatBuffer;
+
+// Helper class for handling OpenGL shaders and shader programs.
+public class GlShader {
+ private static final String TAG = "GlShader";
+
+ private static int compileShader(int shaderType, String source) {
+ final int shader = GLES20.glCreateShader(shaderType);
+ if (shader == 0) {
+ throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
+ }
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compileStatus = new int[] {GLES20.GL_FALSE};
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
+ if (compileStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(
+ TAG, "Compile error " + GLES20.glGetShaderInfoLog(shader) + " in shader:\n" + source);
+ throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+ }
+ GlUtil.checkNoGLES2Error("compileShader");
+ return shader;
+ }
+
+ private int program;
+
+ public GlShader(String vertexSource, String fragmentSource) {
+ final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ program = GLES20.glCreateProgram();
+ if (program == 0) {
+ throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[] {GLES20.GL_FALSE};
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
+ throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+ }
+ // According to the documentation of glLinkProgram():
+ // "After the link operation, applications are free to modify attached shader objects, compile
+ // attached shader objects, detach shader objects, delete shader objects, and attach additional
+ // shader objects. None of these operations affects the information log or the program that is
+ // part of the program object."
+ // But in practice, detaching shaders from the program seems to break some devices. Deleting the
+ // shaders are fine however - it will delete them when they are no longer attached to a program.
+ GLES20.glDeleteShader(vertexShader);
+ GLES20.glDeleteShader(fragmentShader);
+ GlUtil.checkNoGLES2Error("Creating GlShader");
+ }
+
+ public int getAttribLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetAttribLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate '" + label + "' in program");
+ }
+ return location;
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute `label`. The vertex data is specified in
+ * `buffer` with `dimension` number of components per vertex.
+ */
+ public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
+ setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute `label`. The vertex data is specified in
+ * `buffer` with `dimension` number of components per vertex and specified `stride`.
+ */
+ public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = getAttribLocation(label);
+ GLES20.glEnableVertexAttribArray(location);
+ GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer);
+ GlUtil.checkNoGLES2Error("setVertexAttribArray");
+ }
+
+ public int getUniformLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetUniformLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate uniform '" + label + "' in program");
+ }
+ return location;
+ }
+
+ public void useProgram() {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ synchronized (EglBase.lock) {
+ GLES20.glUseProgram(program);
+ }
+ GlUtil.checkNoGLES2Error("glUseProgram");
+ }
+
+ public void release() {
+ Logging.d(TAG, "Deleting shader.");
+ // Delete program, automatically detaching any shaders from it.
+ if (program != -1) {
+ GLES20.glDeleteProgram(program);
+ program = -1;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
new file mode 100644
index 0000000000..b906fe56e0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+/**
+ * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
+ * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
+ * conversion. This class is not thread safe and must be used by a thread with an active GL context.
+ */
+// TODO(magjed): Add unittests for this class.
+public class GlTextureFrameBuffer {
+ private final int pixelFormat;
+ private int frameBufferId;
+ private int textureId;
+ private int width;
+ private int height;
+
+ /**
+ * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
+ * when calling this function. The framebuffer is not complete until setSize() is called.
+ */
+ public GlTextureFrameBuffer(int pixelFormat) {
+ switch (pixelFormat) {
+ case GLES20.GL_LUMINANCE:
+ case GLES20.GL_RGB:
+ case GLES20.GL_RGBA:
+ this.pixelFormat = pixelFormat;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
+ }
+ this.width = 0;
+ this.height = 0;
+ }
+
+ /**
+ * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
+ * EGLContext must be bound on the current thread when calling this function. Must be called at
+ * least once before using the framebuffer. May be called multiple times to change size.
+ */
+ public void setSize(int width, int height) {
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
+ }
+ if (width == this.width && height == this.height) {
+ return;
+ }
+ this.width = width;
+ this.height = height;
+ // Lazy allocation the first time setSize() is called.
+ if (textureId == 0) {
+ textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ if (frameBufferId == 0) {
+ final int frameBuffers[] = new int[1];
+ GLES20.glGenFramebuffers(1, frameBuffers, 0);
+ frameBufferId = frameBuffers[0];
+ }
+
+ // Allocate texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
+ GLES20.GL_UNSIGNED_BYTE, null);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
+
+ // Attach the texture to the framebuffer as color attachment.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GLES20.glFramebufferTexture2D(
+ GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
+
+ // Check that the framebuffer is in a good state.
+ final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+ if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ throw new IllegalStateException("Framebuffer not complete, status: " + status);
+ }
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ }
+
+ public int getWidth() {
+ return width;
+ }
+
+ public int getHeight() {
+ return height;
+ }
+
+ /** Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. */
+ public int getFrameBufferId() {
+ return frameBufferId;
+ }
+
+ /** Gets the OpenGL texture id. This value is only valid after setSize() has been called. */
+ public int getTextureId() {
+ return textureId;
+ }
+
+ /**
+ * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
+ * this function. This object should not be used after this call.
+ */
+ public void release() {
+ GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
+ textureId = 0;
+ GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
+ frameBufferId = 0;
+ width = 0;
+ height = 0;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/GlUtil.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlUtil.java
new file mode 100644
index 0000000000..e2dd0c56d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/GlUtil.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+import android.opengl.GLException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL static utility functions.
+ */
+public class GlUtil {
+ private GlUtil() {}
+
+ public static class GlOutOfMemoryException extends GLException {
+ public GlOutOfMemoryException(int error, String msg) {
+ super(error, msg);
+ }
+ }
+
+ // Assert that no OpenGL ES 2.0 error has been raised.
+ public static void checkNoGLES2Error(String msg) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ throw error == GLES20.GL_OUT_OF_MEMORY
+ ? new GlOutOfMemoryException(error, msg)
+ : new GLException(error, msg + ": GLES20 error: " + error);
+ }
+ }
+
+ public static FloatBuffer createFloatBuffer(float[] coords) {
+ // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+ ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
+ bb.order(ByteOrder.nativeOrder());
+ FloatBuffer fb = bb.asFloatBuffer();
+ fb.put(coords);
+ fb.position(0);
+ return fb;
+ }
+
+ /**
+ * Generate texture with standard parameters.
+ */
+ public static int generateTexture(int target) {
+ final int textureArray[] = new int[1];
+ GLES20.glGenTextures(1, textureArray, 0);
+ final int textureId = textureArray[0];
+ GLES20.glBindTexture(target, textureId);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ checkNoGLES2Error("generateTexture");
+ return textureId;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
new file mode 100644
index 0000000000..215598a85d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+
+/** Factory for Android hardware VideoDecoders. */
+public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
+ private final static Predicate<MediaCodecInfo> defaultAllowedPredicate =
+ new Predicate<MediaCodecInfo>() {
+ @Override
+ public boolean test(MediaCodecInfo arg) {
+ return MediaCodecUtils.isHardwareAccelerated(arg);
+ }
+ };
+
+ /** Creates a HardwareVideoDecoderFactory that does not use surface textures. */
+ @Deprecated // Not removed yet to avoid breaking callers.
+ public HardwareVideoDecoderFactory() {
+ this(null);
+ }
+
+ /**
+ * Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ */
+ public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) {
+ this(sharedContext, /* codecAllowedPredicate= */ null);
+ }
+
+ /**
+ * Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param codecAllowedPredicate predicate to filter codecs. It is combined with the default
+ * predicate that only allows hardware codecs.
+ */
+ public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
+ @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) {
+ super(sharedContext,
+ (codecAllowedPredicate == null ? defaultAllowedPredicate
+ : codecAllowedPredicate.and(defaultAllowedPredicate)));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/IceCandidateErrorEvent.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/IceCandidateErrorEvent.java
new file mode 100644
index 0000000000..aae9da7061
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/IceCandidateErrorEvent.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public final class IceCandidateErrorEvent {
+ /** The local IP address used to communicate with the STUN or TURN server. */
+ public final String address;
+ /** The port used to communicate with the STUN or TURN server. */
+ public final int port;
+ /**
+ * The STUN or TURN URL that identifies the STUN or TURN server for which the failure occurred.
+ */
+ public final String url;
+ /**
+ * The numeric STUN error code returned by the STUN or TURN server. If no host candidate can reach
+ * the server, errorCode will be set to the value 701 which is outside the STUN error code range.
+ * This error is only fired once per server URL while in the RTCIceGatheringState of "gathering".
+ */
+ public final int errorCode;
+ /**
+ * The STUN reason text returned by the STUN or TURN server. If the server could not be reached,
+ * errorText will be set to an implementation-specific value providing details about the error.
+ */
+ public final String errorText;
+
+ @CalledByNative
+ public IceCandidateErrorEvent(
+ String address, int port, String url, int errorCode, String errorText) {
+ this.address = address;
+ this.port = port;
+ this.url = url;
+ this.errorCode = errorCode;
+ this.errorText = errorText;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java
new file mode 100644
index 0000000000..322b8f38c9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/JavaI420Buffer.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import org.webrtc.VideoFrame.I420Buffer;
+
+/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */
+public class JavaI420Buffer implements VideoFrame.I420Buffer {
+ private final int width;
+ private final int height;
+ private final ByteBuffer dataY;
+ private final ByteBuffer dataU;
+ private final ByteBuffer dataV;
+ private final int strideY;
+ private final int strideU;
+ private final int strideV;
+ private final RefCountDelegate refCountDelegate;
+
+ private JavaI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
+ int strideU, ByteBuffer dataV, int strideV, @Nullable Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.dataY = dataY;
+ this.dataU = dataU;
+ this.dataV = dataV;
+ this.strideY = strideY;
+ this.strideU = strideU;
+ this.strideV = strideV;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ private static void checkCapacity(ByteBuffer data, int width, int height, int stride) {
+ // The last row does not necessarily need padding.
+ final int minCapacity = stride * (height - 1) + width;
+ if (data.capacity() < minCapacity) {
+ throw new IllegalArgumentException(
+ "Buffer must be at least " + minCapacity + " bytes, but was " + data.capacity());
+ }
+ }
+
+ /** Wraps existing ByteBuffers into JavaI420Buffer object without copying the contents. */
+ public static JavaI420Buffer wrap(int width, int height, ByteBuffer dataY, int strideY,
+ ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV,
+ @Nullable Runnable releaseCallback) {
+ if (dataY == null || dataU == null || dataV == null) {
+ throw new IllegalArgumentException("Data buffers cannot be null.");
+ }
+ if (!dataY.isDirect() || !dataU.isDirect() || !dataV.isDirect()) {
+ throw new IllegalArgumentException("Data buffers must be direct byte buffers.");
+ }
+
+ // Slice the buffers to prevent external modifications to the position / limit of the buffer.
+ // Note that this doesn't protect the contents of the buffers from modifications.
+ dataY = dataY.slice();
+ dataU = dataU.slice();
+ dataV = dataV.slice();
+
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ checkCapacity(dataY, width, height, strideY);
+ checkCapacity(dataU, chromaWidth, chromaHeight, strideU);
+ checkCapacity(dataV, chromaWidth, chromaHeight, strideV);
+
+ return new JavaI420Buffer(
+ width, height, dataY, strideY, dataU, strideU, dataV, strideV, releaseCallback);
+ }
+
+ /** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
+ public static JavaI420Buffer allocate(int width, int height) {
+ int chromaHeight = (height + 1) / 2;
+ int strideUV = (width + 1) / 2;
+ int yPos = 0;
+ int uPos = yPos + width * height;
+ int vPos = uPos + strideUV * chromaHeight;
+
+ ByteBuffer buffer =
+ JniCommon.nativeAllocateByteBuffer(width * height + 2 * strideUV * chromaHeight);
+
+ buffer.position(yPos);
+ buffer.limit(uPos);
+ ByteBuffer dataY = buffer.slice();
+
+ buffer.position(uPos);
+ buffer.limit(vPos);
+ ByteBuffer dataU = buffer.slice();
+
+ buffer.position(vPos);
+ buffer.limit(vPos + strideUV * chromaHeight);
+ ByteBuffer dataV = buffer.slice();
+
+ return new JavaI420Buffer(width, height, dataY, width, dataU, strideUV, dataV, strideUV,
+ () -> { JniCommon.nativeFreeByteBuffer(buffer); });
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public ByteBuffer getDataY() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataY.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataU() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataU.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataV() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataV.slice();
+ }
+
+ @Override
+ public int getStrideY() {
+ return strideY;
+ }
+
+ @Override
+ public int getStrideU() {
+ return strideU;
+ }
+
+ @Override
+ public int getStrideV() {
+ return strideV;
+ }
+
+ @Override
+ public I420Buffer toI420() {
+ retain();
+ return this;
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ return cropAndScaleI420(this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ }
+
+ public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
+ int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
+ // No scaling.
+ ByteBuffer dataY = buffer.getDataY();
+ ByteBuffer dataU = buffer.getDataU();
+ ByteBuffer dataV = buffer.getDataV();
+
+ dataY.position(cropX + cropY * buffer.getStrideY());
+ dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
+ dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
+
+ buffer.retain();
+ return JavaI420Buffer.wrap(scaleWidth, scaleHeight, dataY.slice(), buffer.getStrideY(),
+ dataU.slice(), buffer.getStrideU(), dataV.slice(), buffer.getStrideV(), buffer::release);
+ }
+
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
+ buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
+ cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+ newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
+ scaleHeight);
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
+ ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
+ int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
+ int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Encoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Encoder.java
new file mode 100644
index 0000000000..569a719f44
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibaomAv1Encoder.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibaomAv1Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java
new file mode 100644
index 0000000000..54ad0aa137
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp8Decoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java
new file mode 100644
index 0000000000..4be9e52c14
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp8Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java
new file mode 100644
index 0000000000..90a24433a3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp9Decoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+
+ static native boolean nativeIsSupported();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java
new file mode 100644
index 0000000000..1211ae93fb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+
+ static native boolean nativeIsSupported();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaConstraints.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaConstraints.java
new file mode 100644
index 0000000000..bae04e532c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaConstraints.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Description of media constraints for {@code MediaStream} and
+ * {@code PeerConnection}.
+ */
+public class MediaConstraints {
+ /** Simple String key/value pair. */
+ public static class KeyValuePair {
+ private final String key;
+ private final String value;
+
+ public KeyValuePair(String key, String value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ @CalledByNative("KeyValuePair")
+ public String getKey() {
+ return key;
+ }
+
+ @CalledByNative("KeyValuePair")
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public String toString() {
+ return key + ": " + value;
+ }
+
+ @Override
+ public boolean equals(@Nullable Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+ KeyValuePair that = (KeyValuePair) other;
+ return key.equals(that.key) && value.equals(that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return key.hashCode() + value.hashCode();
+ }
+ }
+
+ public final List<KeyValuePair> mandatory;
+ public final List<KeyValuePair> optional;
+
+ public MediaConstraints() {
+ mandatory = new ArrayList<KeyValuePair>();
+ optional = new ArrayList<KeyValuePair>();
+ }
+
+ private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
+ StringBuilder builder = new StringBuilder("[");
+ for (KeyValuePair pair : list) {
+ if (builder.length() > 1) {
+ builder.append(", ");
+ }
+ builder.append(pair.toString());
+ }
+ return builder.append("]").toString();
+ }
+
+ @Override
+ public String toString() {
+ return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
+ + stringifyKeyValuePairList(optional);
+ }
+
+ @CalledByNative
+ List<KeyValuePair> getMandatory() {
+ return mandatory;
+ }
+
+ @CalledByNative
+ List<KeyValuePair> getOptional() {
+ return optional;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaSource.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaSource.java
new file mode 100644
index 0000000000..9245e3e2eb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaSource.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaSourceInterface. */
+public class MediaSource {
+ /** Tracks MediaSourceInterface.SourceState */
+ public enum State {
+ INITIALIZING,
+ LIVE,
+ ENDED,
+ MUTED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ private final RefCountDelegate refCountDelegate;
+ private long nativeSource;
+
+ public MediaSource(long nativeSource) {
+ refCountDelegate = new RefCountDelegate(() -> JniCommon.nativeReleaseRef(nativeSource));
+ this.nativeSource = nativeSource;
+ }
+
+ public State state() {
+ checkMediaSourceExists();
+ return nativeGetState(nativeSource);
+ }
+
+ public void dispose() {
+ checkMediaSourceExists();
+ refCountDelegate.release();
+ nativeSource = 0;
+ }
+
+ /** Returns a pointer to webrtc::MediaSourceInterface. */
+ protected long getNativeMediaSource() {
+ checkMediaSourceExists();
+ return nativeSource;
+ }
+
+ /**
+ * Runs code in {@code runnable} holding a reference to the media source. If the object has
+ * already been released, does nothing.
+ */
+ void runWithReference(Runnable runnable) {
+ if (refCountDelegate.safeRetain()) {
+ try {
+ runnable.run();
+ } finally {
+ refCountDelegate.release();
+ }
+ }
+ }
+
+ private void checkMediaSourceExists() {
+ if (nativeSource == 0) {
+ throw new IllegalStateException("MediaSource has been disposed.");
+ }
+ }
+
+ private static native State nativeGetState(long pointer);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java
new file mode 100644
index 0000000000..2e4c3e18f7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/MediaStreamTrack.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/** Java wrapper for a C++ MediaStreamTrackInterface. */
+public class MediaStreamTrack {
+ public static final String AUDIO_TRACK_KIND = "audio";
+ public static final String VIDEO_TRACK_KIND = "video";
+
+ /** Tracks MediaStreamTrackInterface.TrackState */
+ public enum State {
+ LIVE,
+ ENDED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ // Must be kept in sync with cricket::MediaType.
+ public enum MediaType {
+ MEDIA_TYPE_AUDIO(0),
+ MEDIA_TYPE_VIDEO(1);
+
+ private final int nativeIndex;
+
+ private MediaType(int nativeIndex) {
+ this.nativeIndex = nativeIndex;
+ }
+
+ @CalledByNative("MediaType")
+ int getNative() {
+ return nativeIndex;
+ }
+
+ @CalledByNative("MediaType")
+ static MediaType fromNativeIndex(int nativeIndex) {
+ for (MediaType type : MediaType.values()) {
+ if (type.getNative() == nativeIndex) {
+ return type;
+ }
+ }
+ throw new IllegalArgumentException("Unknown native media type: " + nativeIndex);
+ }
+ }
+
+ /** Factory method to create an AudioTrack or VideoTrack subclass. */
+ static @Nullable MediaStreamTrack createMediaStreamTrack(long nativeTrack) {
+ if (nativeTrack == 0) {
+ return null;
+ }
+ String trackKind = nativeGetKind(nativeTrack);
+ if (trackKind.equals(AUDIO_TRACK_KIND)) {
+ return new AudioTrack(nativeTrack);
+ } else if (trackKind.equals(VIDEO_TRACK_KIND)) {
+ return new VideoTrack(nativeTrack);
+ } else {
+ return null;
+ }
+ }
+
+ private long nativeTrack;
+
+ public MediaStreamTrack(long nativeTrack) {
+ if (nativeTrack == 0) {
+ throw new IllegalArgumentException("nativeTrack may not be null");
+ }
+ this.nativeTrack = nativeTrack;
+ }
+
+ public String id() {
+ checkMediaStreamTrackExists();
+ return nativeGetId(nativeTrack);
+ }
+
+ public String kind() {
+ checkMediaStreamTrackExists();
+ return nativeGetKind(nativeTrack);
+ }
+
+ public boolean enabled() {
+ checkMediaStreamTrackExists();
+ return nativeGetEnabled(nativeTrack);
+ }
+
+ public boolean setEnabled(boolean enable) {
+ checkMediaStreamTrackExists();
+ return nativeSetEnabled(nativeTrack, enable);
+ }
+
+ public State state() {
+ checkMediaStreamTrackExists();
+ return nativeGetState(nativeTrack);
+ }
+
+ public void dispose() {
+ checkMediaStreamTrackExists();
+ JniCommon.nativeReleaseRef(nativeTrack);
+ nativeTrack = 0;
+ }
+
+ long getNativeMediaStreamTrack() {
+ checkMediaStreamTrackExists();
+ return nativeTrack;
+ }
+
+ private void checkMediaStreamTrackExists() {
+ if (nativeTrack == 0) {
+ throw new IllegalStateException("MediaStreamTrack has been disposed.");
+ }
+ }
+
+ private static native String nativeGetId(long track);
+ private static native String nativeGetKind(long track);
+ private static native boolean nativeGetEnabled(long track);
+ private static native boolean nativeSetEnabled(long track, boolean enabled);
+ private static native State nativeGetState(long track);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Metrics.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Metrics.java
new file mode 100644
index 0000000000..253376831c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Metrics.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+// Java-side of androidmetrics.cc
+//
+// Rtc histograms can be queried through the API, getAndReset().
+// The returned map holds the name of a histogram and its samples.
+//
+// Example of `map` with one histogram:
+// `name`: "WebRTC.Video.InputFramesPerSecond"
+// `min`: 1
+// `max`: 100
+// `bucketCount`: 50
+// `samples`: [30]:1
+//
+// Most histograms are not updated frequently (e.g. most video metrics are an
+// average over the call and recorded when a stream is removed).
+// The metrics can for example be retrieved when a peer connection is closed.
+public class Metrics {
+ private static final String TAG = "Metrics";
+
+ public final Map<String, HistogramInfo> map =
+ new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
+
+ @CalledByNative
+ Metrics() {}
+
+ /**
+ * Class holding histogram information.
+ */
+ public static class HistogramInfo {
+ public final int min;
+ public final int max;
+ public final int bucketCount;
+ public final Map<Integer, Integer> samples =
+ new HashMap<Integer, Integer>(); // <value, # of events>
+
+ @CalledByNative("HistogramInfo")
+ public HistogramInfo(int min, int max, int bucketCount) {
+ this.min = min;
+ this.max = max;
+ this.bucketCount = bucketCount;
+ }
+
+ @CalledByNative("HistogramInfo")
+ public void addSample(int value, int numEvents) {
+ samples.put(value, numEvents);
+ }
+ }
+
+ @CalledByNative
+ private void add(String name, HistogramInfo info) {
+ map.put(name, info);
+ }
+
+ // Enables gathering of metrics (which can be fetched with getAndReset()).
+ // Must be called before PeerConnectionFactory is created.
+ public static void enable() {
+ nativeEnable();
+ }
+
+ // Gets and clears native histograms.
+ public static Metrics getAndReset() {
+ return nativeGetAndReset();
+ }
+
+ private static native void nativeEnable();
+ private static native Metrics nativeGetAndReset();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java
new file mode 100644
index 0000000000..8bd7b3b250
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/NativeLibraryLoader.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for loading native libraries. A custom loader can be passed to
+ * PeerConnectionFactory.initialize.
+ */
+public interface NativeLibraryLoader {
+ /**
+ * Loads a native library with the given name.
+ *
+ * @return True on success
+ */
+ boolean load(String name);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java
new file mode 100644
index 0000000000..aeb91e1750
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::jni::OwnedPeerConnection instances. */
+public interface NativePeerConnectionFactory {
+ /**
+ * Create a new webrtc::jni::OwnedPeerConnection instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ long createNativePeerConnection();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/NetEqFactoryFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/NetEqFactoryFactory.java
new file mode 100644
index 0000000000..8464324cbc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/NetEqFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::NetEqFactory}.
+ */
+public interface NetEqFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::NetEqFactory}. The caller takes ownership.
+ */
+ long createNativeNetEqFactory();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
new file mode 100644
index 0000000000..caca5e5889
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+
+/** Factory for Android platform software VideoDecoders. */
+public class PlatformSoftwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
+ /**
+ * Default allowed predicate.
+ */
+ private static final Predicate<MediaCodecInfo> defaultAllowedPredicate =
+ new Predicate<MediaCodecInfo>() {
+ @Override
+ public boolean test(MediaCodecInfo arg) {
+ return MediaCodecUtils.isSoftwareOnly(arg);
+ }
+ };
+
+ /**
+ * Creates a PlatformSoftwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ */
+ public PlatformSoftwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) {
+ super(sharedContext, defaultAllowedPredicate);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/Predicate.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/Predicate.java
new file mode 100644
index 0000000000..50e6975000
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/Predicate.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Represents a predicate (boolean-valued function) of one argument.
+ */
+public interface Predicate<T> {
+ /**
+ * Evaluates this predicate on the given argument.
+ *
+ * @param arg the input argument
+ * @return true if the input argument matches the predicate, otherwise false
+ */
+ boolean test(T arg);
+
+ /**
+ * Returns a composed predicate that represents a short-circuiting logical OR of this predicate
+ * and another. When evaluating the composed predicate, if this predicate is true, then the other
+ * predicate is not evaluated.
+ *
+ * @param other a predicate that will be logically-ORed with this predicate
+ * @return a composed predicate that represents the short-circuiting logical OR of this predicate
+ * and the other predicate
+ */
+ default Predicate<T> or(Predicate<? super T> other) {
+ return new Predicate<T>() {
+ @Override
+ public boolean test(T arg) {
+ return Predicate.this.test(arg) || other.test(arg);
+ }
+ };
+ }
+
+ /**
+ * Returns a composed predicate that represents a short-circuiting logical AND of this predicate
+ * and another.
+ *
+ * @param other a predicate that will be logically-ANDed with this predicate
+ * @return a composed predicate that represents the short-circuiting logical AND of this predicate
+ * and the other predicate
+ */
+ default Predicate<T> and(Predicate<? super T> other) {
+ return new Predicate<T>() {
+ @Override
+ public boolean test(T arg) {
+ return Predicate.this.test(arg) && other.test(arg);
+ }
+ };
+ }
+
+ /**
+ * Returns a predicate that represents the logical negation of this predicate.
+ *
+ * @return a predicate that represents the logical negation of this predicate
+ */
+ default Predicate<T> negate() {
+ return new Predicate<T>() {
+ @Override
+ public boolean test(T arg) {
+ return !Predicate.this.test(arg);
+ }
+ };
+ }
+} \ No newline at end of file
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/RefCounted.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/RefCounted.java
new file mode 100644
index 0000000000..0c1c3bf1f9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/RefCounted.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for ref counted objects in WebRTC. These objects have significant resources that need
+ * to be freed when they are no longer in use. Each objects starts with ref count of one when
+ * created. If a reference is passed as a parameter to a method, the caller has ownesrship of the
+ * object by default - calling release is not necessary unless retain is called.
+ */
+public interface RefCounted {
+ /** Increases ref count by one. */
+ @CalledByNative void retain();
+
+ /**
+ * Decreases ref count by one. When the ref count reaches zero, resources related to the object
+ * will be freed.
+ */
+ @CalledByNative void release();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/RendererCommon.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/RendererCommon.java
new file mode 100644
index 0000000000..b97901c634
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/RendererCommon.java
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.opengl.Matrix;
+import android.view.View;
+
+/**
+ * Static helper functions for renderer implementations.
+ */
+public class RendererCommon {
+ /** Interface for reporting rendering events. */
+ public static interface RendererEvents {
+ /**
+ * Callback fired once first frame is rendered.
+ */
+ public void onFirstFrameRendered();
+
+ /**
+ * Callback fired when rendered frame resolution or rotation has changed.
+ */
+ public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
+ }
+
+ /**
+ * Interface for rendering frames on an EGLSurface with specified viewport location. Rotation,
+ * mirror, and cropping is specified using a 4x4 texture coordinate transform matrix. The frame
+ * input can either be an OES texture, RGB texture, or YUV textures in I420 format. The function
+ * release() must be called manually to free the resources held by this object.
+ */
+ public static interface GlDrawer {
+ /**
+ * Functions for drawing frames with different sources. The rendering surface target is
+ * implied by the current EGL context of the calling thread and requires no explicit argument.
+ * The coordinates specify the viewport location on the surface target.
+ */
+ void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+ void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
+ int viewportY, int viewportWidth, int viewportHeight);
+ void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+
+ /**
+ * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+ */
+ void release();
+ }
+
+ /**
+ * Helper class for determining layout size based on layout requirements, scaling type, and video
+ * aspect ratio.
+ */
+ public static class VideoLayoutMeasure {
+ // The scaling type determines how the video will fill the allowed layout area in measure(). It
+ // can be specified separately for the case when video has matched orientation with layout size
+ // and when there is an orientation mismatch.
+ private float visibleFractionMatchOrientation =
+ convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED);
+ private float visibleFractionMismatchOrientation =
+ convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED);
+
+ public void setScalingType(ScalingType scalingType) {
+ setScalingType(/* scalingTypeMatchOrientation= */ scalingType,
+ /* scalingTypeMismatchOrientation= */ scalingType);
+ }
+
+ public void setScalingType(
+ ScalingType scalingTypeMatchOrientation, ScalingType scalingTypeMismatchOrientation) {
+ this.visibleFractionMatchOrientation =
+ convertScalingTypeToVisibleFraction(scalingTypeMatchOrientation);
+ this.visibleFractionMismatchOrientation =
+ convertScalingTypeToVisibleFraction(scalingTypeMismatchOrientation);
+ }
+
+ public void setVisibleFraction(
+ float visibleFractionMatchOrientation, float visibleFractionMismatchOrientation) {
+ this.visibleFractionMatchOrientation = visibleFractionMatchOrientation;
+ this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation;
+ }
+
+ public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
+ // Calculate max allowed layout size.
+ final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
+ final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
+ if (frameWidth == 0 || frameHeight == 0 || maxWidth == 0 || maxHeight == 0) {
+ return new Point(maxWidth, maxHeight);
+ }
+ // Calculate desired display size based on scaling type, video aspect ratio,
+ // and maximum layout size.
+ final float frameAspect = frameWidth / (float) frameHeight;
+ final float displayAspect = maxWidth / (float) maxHeight;
+ final float visibleFraction = (frameAspect > 1.0f) == (displayAspect > 1.0f)
+ ? visibleFractionMatchOrientation
+ : visibleFractionMismatchOrientation;
+ final Point layoutSize = getDisplaySize(visibleFraction, frameAspect, maxWidth, maxHeight);
+
+ // If the measure specification is forcing a specific size - yield.
+ if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
+ layoutSize.x = maxWidth;
+ }
+ if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
+ layoutSize.y = maxHeight;
+ }
+ return layoutSize;
+ }
+ }
+
+ // Types of video scaling:
+ // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+ // maintaining the aspect ratio (black borders may be displayed).
+ // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+ // maintaining the aspect ratio. Some portion of the video frame may be
+ // clipped.
+ // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
+ // possible of the view while maintaining aspect ratio, under the constraint that at least
+ // `BALANCED_VISIBLE_FRACTION` of the frame content will be shown.
+ public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
+ // The minimum fraction of the frame content that will be shown for `SCALE_ASPECT_BALANCED`.
+ // This limits excessive cropping when adjusting display size.
+ private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+
+ /**
+ * Returns layout transformation matrix that applies an optional mirror effect and compensates
+ * for video vs display aspect ratio.
+ */
+ public static float[] getLayoutMatrix(
+ boolean mirror, float videoAspectRatio, float displayAspectRatio) {
+ float scaleX = 1;
+ float scaleY = 1;
+ // Scale X or Y dimension so that video and display size have same aspect ratio.
+ if (displayAspectRatio > videoAspectRatio) {
+ scaleY = videoAspectRatio / displayAspectRatio;
+ } else {
+ scaleX = displayAspectRatio / videoAspectRatio;
+ }
+ // Apply optional horizontal flip.
+ if (mirror) {
+ scaleX *= -1;
+ }
+ final float matrix[] = new float[16];
+ Matrix.setIdentityM(matrix, 0);
+ Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
+ adjustOrigin(matrix);
+ return matrix;
+ }
+
+ /** Converts a float[16] matrix array to android.graphics.Matrix. */
+ public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) {
+ // clang-format off
+ float[] values = {
+ matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0],
+ matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1],
+ matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3],
+ };
+ // clang-format on
+
+ android.graphics.Matrix matrix = new android.graphics.Matrix();
+ matrix.setValues(values);
+ return matrix;
+ }
+
+ /** Converts android.graphics.Matrix to a float[16] matrix array. */
+ public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) {
+ float[] values = new float[9];
+ matrix.getValues(values);
+
+ // The android.graphics.Matrix looks like this:
+ // [x1 y1 w1]
+ // [x2 y2 w2]
+ // [x3 y3 w3]
+ // We want to contruct a matrix that looks like this:
+ // [x1 y1 0 w1]
+ // [x2 y2 0 w2]
+ // [ 0 0 1 0]
+ // [x3 y3 0 w3]
+ // Since it is stored in column-major order, it looks like this:
+ // [x1 x2 0 x3
+ // y1 y2 0 y3
+ // 0 0 1 0
+ // w1 w2 0 w3]
+ // clang-format off
+ float[] matrix4x4 = {
+ values[0 * 3 + 0], values[1 * 3 + 0], 0, values[2 * 3 + 0],
+ values[0 * 3 + 1], values[1 * 3 + 1], 0, values[2 * 3 + 1],
+ 0, 0, 1, 0,
+ values[0 * 3 + 2], values[1 * 3 + 2], 0, values[2 * 3 + 2],
+ };
+ // clang-format on
+ return matrix4x4;
+ }
+
+ /**
+ * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(
+ ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
+ return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
+ maxDisplayWidth, maxDisplayHeight);
+ }
+
+ /**
+ * Move `matrix` transformation origin to (0.5, 0.5). This is the origin for texture coordinates
+ * that are in the range 0 to 1.
+ */
+ private static void adjustOrigin(float[] matrix) {
+ // Note that OpenGL is using column-major order.
+ // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
+ matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
+ matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
+ // Post translate with 0.5 to move coordinates to range [0, 1].
+ matrix[12] += 0.5f;
+ matrix[13] += 0.5f;
+ }
+
+ /**
+ * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
+ * that must remain visible.
+ */
+ private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
+ switch (scalingType) {
+ case SCALE_ASPECT_FIT:
+ return 1.0f;
+ case SCALE_ASPECT_FILL:
+ return 0.0f;
+ case SCALE_ASPECT_BALANCED:
+ return BALANCED_VISIBLE_FRACTION;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * Calculate display size based on minimum fraction of the video that must remain visible,
+ * video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(
+ float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
+ // If there is no constraint on the amount of cropping, fill the allowed display area.
+ if (minVisibleFraction == 0 || videoAspectRatio == 0) {
+ return new Point(maxDisplayWidth, maxDisplayHeight);
+ }
+ // Each dimension is constrained on max display size and how much we are allowed to crop.
+ final int width = Math.min(
+ maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+ final int height = Math.min(
+ maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+ return new Point(width, height);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SSLCertificateVerifier.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SSLCertificateVerifier.java
new file mode 100644
index 0000000000..461cd3b143
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SSLCertificateVerifier.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The SSLCertificateVerifier interface allows API users to provide custom
+ * logic to verify certificates.
+ */
+public interface SSLCertificateVerifier {
+ /**
+ * Implementations of verify allow applications to provide custom logic for
+ * verifying certificates. This is not required by default and should be used
+ * with care.
+ *
+ * @param certificate A byte array containing a DER encoded X509 certificate.
+ * @return True if the certificate is verified and trusted else false.
+ */
+ @CalledByNative boolean verify(byte[] certificate);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
new file mode 100644
index 0000000000..231d507155
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
@@ -0,0 +1,212 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.hardware.display.DisplayManager;
+import android.hardware.display.VirtualDisplay;
+import android.media.projection.MediaProjection;
+import android.media.projection.MediaProjectionManager;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+
+/**
+ * An implementation of VideoCapturer to capture the screen content as a video stream.
+ * Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this
+ * {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
+ * The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
+ * {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
+ * as a texture to the native code via {@code CapturerObserver.onFrameCaptured()}. This takes
+ * place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
+ * the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
+ * frames. At any time, at most one frame is being processed.
+ */
+public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
+ private static final int DISPLAY_FLAGS =
+ DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
+ // DPI for VirtualDisplay, does not seem to matter for us.
+ private static final int VIRTUAL_DISPLAY_DPI = 400;
+
+ private final Intent mediaProjectionPermissionResultData;
+ private final MediaProjection.Callback mediaProjectionCallback;
+
+ private int width;
+ private int height;
+ @Nullable private VirtualDisplay virtualDisplay;
+ @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable private CapturerObserver capturerObserver;
+ private long numCapturedFrames;
+ @Nullable private MediaProjection mediaProjection;
+ private boolean isDisposed;
+ @Nullable private MediaProjectionManager mediaProjectionManager;
+
+ /**
+ * Constructs a new Screen Capturer.
+ *
+ * @param mediaProjectionPermissionResultData the result data of MediaProjection permission
+ * activity; the calling app must validate that result code is Activity.RESULT_OK before
+ * calling this method.
+ * @param mediaProjectionCallback MediaProjection callback to implement application specific
+ * logic in events such as when the user revokes a previously granted capture permission.
+ **/
+ public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
+ MediaProjection.Callback mediaProjectionCallback) {
+ this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
+ this.mediaProjectionCallback = mediaProjectionCallback;
+ }
+
+ private void checkNotDisposed() {
+ if (isDisposed) {
+ throw new RuntimeException("capturer is disposed.");
+ }
+ }
+
+ @Nullable
+ public MediaProjection getMediaProjection() {
+ return mediaProjection;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
+ final Context applicationContext, final CapturerObserver capturerObserver) {
+ checkNotDisposed();
+
+ if (capturerObserver == null) {
+ throw new RuntimeException("capturerObserver not set.");
+ }
+ this.capturerObserver = capturerObserver;
+
+ if (surfaceTextureHelper == null) {
+ throw new RuntimeException("surfaceTextureHelper not set.");
+ }
+ this.surfaceTextureHelper = surfaceTextureHelper;
+
+ mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
+ Context.MEDIA_PROJECTION_SERVICE);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void startCapture(
+ final int width, final int height, final int ignoredFramerate) {
+ checkNotDisposed();
+
+ this.width = width;
+ this.height = height;
+
+ mediaProjection = mediaProjectionManager.getMediaProjection(
+ Activity.RESULT_OK, mediaProjectionPermissionResultData);
+
+ // Let MediaProjection callback use the SurfaceTextureHelper thread.
+ mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler());
+
+ createVirtualDisplay();
+ capturerObserver.onCapturerStarted(true);
+ surfaceTextureHelper.startListening(ScreenCapturerAndroid.this);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void stopCapture() {
+ checkNotDisposed();
+ ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
+ @Override
+ public void run() {
+ surfaceTextureHelper.stopListening();
+ capturerObserver.onCapturerStopped();
+
+ if (virtualDisplay != null) {
+ virtualDisplay.release();
+ virtualDisplay = null;
+ }
+
+ if (mediaProjection != null) {
+ // Unregister the callback before stopping, otherwise the callback recursively
+ // calls this method.
+ mediaProjection.unregisterCallback(mediaProjectionCallback);
+ mediaProjection.stop();
+ mediaProjection = null;
+ }
+ }
+ });
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void dispose() {
+ isDisposed = true;
+ }
+
+ /**
+ * Changes output video format. This method can be used to scale the output
+ * video, or to change orientation when the captured screen is rotated for example.
+ *
+ * @param width new output video width
+ * @param height new output video height
+ * @param ignoredFramerate ignored
+ */
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void changeCaptureFormat(
+ final int width, final int height, final int ignoredFramerate) {
+ checkNotDisposed();
+
+ this.width = width;
+ this.height = height;
+
+ if (virtualDisplay == null) {
+ // Capturer is stopped, the virtual display will be created in startCaptuer().
+ return;
+ }
+
+ // Create a new virtual display on the surfaceTextureHelper thread to avoid interference
+ // with frame processing, which happens on the same thread (we serialize events by running
+ // them on the same thread).
+ ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
+ @Override
+ public void run() {
+ virtualDisplay.release();
+ createVirtualDisplay();
+ }
+ });
+ }
+
+ private void createVirtualDisplay() {
+ surfaceTextureHelper.setTextureSize(width, height);
+ virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
+ VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
+ null /* callback */, null /* callback handler */);
+ }
+
+ // This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ numCapturedFrames++;
+ capturerObserver.onFrameCaptured(frame);
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return true;
+ }
+
+ public long getNumCapturedFrames() {
+ return numCapturedFrames;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SdpObserver.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SdpObserver.java
new file mode 100644
index 0000000000..afa99bc552
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SdpObserver.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for observing SDP-related events. */
+public interface SdpObserver {
+ /** Called on success of Create{Offer,Answer}(). */
+ @CalledByNative void onCreateSuccess(SessionDescription sdp);
+
+ /** Called on success of Set{Local,Remote}Description(). */
+ @CalledByNative void onSetSuccess();
+
+ /** Called on error of Create{Offer,Answer}(). */
+ @CalledByNative void onCreateFailure(String error);
+
+ /** Called on error of Set{Local,Remote}Description(). */
+ @CalledByNative void onSetFailure(String error);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SessionDescription.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SessionDescription.java
new file mode 100644
index 0000000000..be89599a5f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SessionDescription.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Locale;
+
+/**
+ * Description of an RFC 4566 Session.
+ * SDPs are passed as serialized Strings in Java-land and are materialized
+ * to SessionDescriptionInterface as appropriate in the JNI layer.
+ */
+public class SessionDescription {
+ /** Java-land enum version of SessionDescriptionInterface's type() string. */
+ public static enum Type {
+ OFFER,
+ PRANSWER,
+ ANSWER,
+ ROLLBACK;
+
+ public String canonicalForm() {
+ return name().toLowerCase(Locale.US);
+ }
+
+ @CalledByNative("Type")
+ public static Type fromCanonicalForm(String canonical) {
+ return Type.valueOf(Type.class, canonical.toUpperCase(Locale.US));
+ }
+ }
+
+ public final Type type;
+ public final String description;
+
+ @CalledByNative
+ public SessionDescription(Type type, String description) {
+ this.type = type;
+ this.description = description;
+ }
+
+ @CalledByNative
+ String getDescription() {
+ return description;
+ }
+
+ @CalledByNative
+ String getTypeInCanonicalForm() {
+ return type.canonicalForm();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
new file mode 100644
index 0000000000..2ac42e834e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+
+public class SoftwareVideoDecoderFactory implements VideoDecoderFactory {
+ private static final String TAG = "SoftwareVideoDecoderFactory";
+
+ private final long nativeFactory;
+
+ public SoftwareVideoDecoderFactory() {
+ this.nativeFactory = nativeCreateFactory();
+ }
+
+ @Nullable
+ @Override
+ public VideoDecoder createDecoder(VideoCodecInfo info) {
+ long nativeDecoder = nativeCreateDecoder(nativeFactory, info);
+ if (nativeDecoder == 0) {
+ Logging.w(TAG, "Trying to create decoder for unsupported format. " + info);
+ return null;
+ }
+
+ return new WrappedNativeVideoDecoder() {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeDecoder;
+ }
+ };
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ return nativeGetSupportedCodecs(nativeFactory).toArray(new VideoCodecInfo[0]);
+ }
+
+ private static native long nativeCreateFactory();
+
+ private static native long nativeCreateDecoder(long factory, VideoCodecInfo videoCodecInfo);
+
+ private static native List<VideoCodecInfo> nativeGetSupportedCodecs(long factory);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
new file mode 100644
index 0000000000..7f4c457b97
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+
+public class SoftwareVideoEncoderFactory implements VideoEncoderFactory {
+ private static final String TAG = "SoftwareVideoEncoderFactory";
+
+ private final long nativeFactory;
+
+ public SoftwareVideoEncoderFactory() {
+ this.nativeFactory = nativeCreateFactory();
+ }
+
+ @Nullable
+ @Override
+ public VideoEncoder createEncoder(VideoCodecInfo info) {
+ long nativeEncoder = nativeCreateEncoder(nativeFactory, info);
+ if (nativeEncoder == 0) {
+ Logging.w(TAG, "Trying to create encoder for unsupported format. " + info);
+ return null;
+ }
+
+ return new WrappedNativeVideoEncoder() {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeEncoder;
+ }
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+ };
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ return nativeGetSupportedCodecs(nativeFactory).toArray(new VideoCodecInfo[0]);
+ }
+
+ private static native long nativeCreateFactory();
+
+ private static native long nativeCreateEncoder(long factory, VideoCodecInfo videoCodecInfo);
+
+ private static native List<VideoCodecInfo> nativeGetSupportedCodecs(long factory);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsObserver.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsObserver.java
new file mode 100644
index 0000000000..b9984c18db
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsObserver.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for observing Stats reports (see webrtc::StatsObservers). */
+public interface StatsObserver {
+ /** Called when the reports are ready.*/
+ @CalledByNative public void onComplete(StatsReport[] reports);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsReport.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsReport.java
new file mode 100644
index 0000000000..b8f1cf87fe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/StatsReport.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java version of webrtc::StatsReport. */
+public class StatsReport {
+ /** Java version of webrtc::StatsReport::Value. */
+ public static class Value {
+ public final String name;
+ public final String value;
+
+ @CalledByNative("Value")
+ public Value(String name, String value) {
+ this.name = name;
+ this.value = value;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("[").append(name).append(": ").append(value).append("]");
+ return builder.toString();
+ }
+ }
+
+ public final String id;
+ public final String type;
+ // Time since 1970-01-01T00:00:00Z in milliseconds.
+ public final double timestamp;
+ public final Value[] values;
+
+ @CalledByNative
+ public StatsReport(String id, String type, double timestamp, Value[] values) {
+ this.id = id;
+ this.type = type;
+ this.timestamp = timestamp;
+ this.values = values;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("id: ")
+ .append(id)
+ .append(", type: ")
+ .append(type)
+ .append(", timestamp: ")
+ .append(timestamp)
+ .append(", values: ");
+ for (int i = 0; i < values.length; ++i) {
+ builder.append(values[i].toString()).append(", ");
+ }
+ return builder.toString();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
new file mode 100644
index 0000000000..6cba3f473b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.view.SurfaceHolder;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Display the video stream on a Surface.
+ * renderFrame() is asynchronous to avoid blocking the calling thread.
+ * This class is thread safe and handles access from potentially three different threads:
+ * Interaction from the main app in init, release and setMirror.
+ * Interaction from C++ rtc::VideoSinkInterface in renderFrame.
+ * Interaction from SurfaceHolder lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
+ */
+public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Callback {
+ private static final String TAG = "SurfaceEglRenderer";
+
+ // Callback for reporting renderer events. Read-only after initialization so no lock required.
+ private RendererCommon.RendererEvents rendererEvents;
+
+ private final Object layoutLock = new Object();
+ private boolean isRenderingPaused;
+ private boolean isFirstFrameRendered;
+ private int rotatedFrameWidth;
+ private int rotatedFrameHeight;
+ private int frameRotation;
+
+ /**
+ * In order to render something, you must first call init().
+ */
+ public SurfaceEglRenderer(String name) {
+ super(name);
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle.
+ */
+ public void init(final EglBase.Context sharedContext,
+ RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ ThreadUtils.checkIsOnMainThread();
+ this.rendererEvents = rendererEvents;
+ synchronized (layoutLock) {
+ isFirstFrameRendered = false;
+ rotatedFrameWidth = 0;
+ rotatedFrameHeight = 0;
+ frameRotation = 0;
+ }
+ super.init(sharedContext, configAttributes, drawer);
+ }
+
+ @Override
+ public void init(final EglBase.Context sharedContext, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ init(sharedContext, null /* rendererEvents */, configAttributes, drawer);
+ }
+
+ /**
+ * Limit render framerate.
+ *
+ * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+ * reduction.
+ */
+ @Override
+ public void setFpsReduction(float fps) {
+ synchronized (layoutLock) {
+ isRenderingPaused = fps == 0f;
+ }
+ super.setFpsReduction(fps);
+ }
+
+ @Override
+ public void disableFpsReduction() {
+ synchronized (layoutLock) {
+ isRenderingPaused = false;
+ }
+ super.disableFpsReduction();
+ }
+
+ @Override
+ public void pauseVideo() {
+ synchronized (layoutLock) {
+ isRenderingPaused = true;
+ }
+ super.pauseVideo();
+ }
+
+ // VideoSink interface.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ updateFrameDimensionsAndReportEvents(frame);
+ super.onFrame(frame);
+ }
+
+ // SurfaceHolder.Callback interface.
+ @Override
+ public void surfaceCreated(final SurfaceHolder holder) {
+ ThreadUtils.checkIsOnMainThread();
+ createEglSurface(holder.getSurface());
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ ThreadUtils.checkIsOnMainThread();
+ final CountDownLatch completionLatch = new CountDownLatch(1);
+ releaseEglSurface(completionLatch::countDown);
+ ThreadUtils.awaitUninterruptibly(completionLatch);
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ ThreadUtils.checkIsOnMainThread();
+ logD("surfaceChanged: format: " + format + " size: " + width + "x" + height);
+ }
+
+ // Update frame dimensions and report any changes to `rendererEvents`.
+ private void updateFrameDimensionsAndReportEvents(VideoFrame frame) {
+ synchronized (layoutLock) {
+ if (isRenderingPaused) {
+ return;
+ }
+ if (!isFirstFrameRendered) {
+ isFirstFrameRendered = true;
+ logD("Reporting first rendered frame.");
+ if (rendererEvents != null) {
+ rendererEvents.onFirstFrameRendered();
+ }
+ }
+ if (rotatedFrameWidth != frame.getRotatedWidth()
+ || rotatedFrameHeight != frame.getRotatedHeight()
+ || frameRotation != frame.getRotation()) {
+ logD("Reporting frame resolution changed to " + frame.getBuffer().getWidth() + "x"
+ + frame.getBuffer().getHeight() + " with rotation " + frame.getRotation());
+ if (rendererEvents != null) {
+ rendererEvents.onFrameResolutionChanged(
+ frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), frame.getRotation());
+ }
+ rotatedFrameWidth = frame.getRotatedWidth();
+ rotatedFrameHeight = frame.getRotatedHeight();
+ frameRotation = frame.getRotation();
+ }
+ }
+ }
+
+ private void logD(String string) {
+ Logging.d(TAG, name + ": " + string);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
new file mode 100644
index 0000000000..3ea22736ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -0,0 +1,390 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import androidx.annotation.Nullable;
+import java.util.concurrent.Callable;
+import org.webrtc.EglBase.Context;
+import org.webrtc.TextureBufferImpl.RefCountMonitor;
+import org.webrtc.VideoFrame.TextureBuffer;
+
+/**
+ * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
+ * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only
+ * one texture frame can be in flight at once, so the frame must be released in order to receive a
+ * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all
+ * resources once the texture frame is released.
+ */
+public class SurfaceTextureHelper {
+ /**
+ * Interface for monitoring texture buffers created from this SurfaceTexture. Since only one
+ * texture buffer can exist at a time, this can be used to monitor for stuck frames.
+ */
+ public interface FrameRefMonitor {
+ /** A new frame was created. New frames start with ref count of 1. */
+ void onNewBuffer(TextureBuffer textureBuffer);
+ /** Ref count of the frame was incremented by the calling thread. */
+ void onRetainBuffer(TextureBuffer textureBuffer);
+ /** Ref count of the frame was decremented by the calling thread. */
+ void onReleaseBuffer(TextureBuffer textureBuffer);
+ /** Frame was destroyed (ref count reached 0). */
+ void onDestroyBuffer(TextureBuffer textureBuffer);
+ }
+
+ private static final String TAG = "SurfaceTextureHelper";
+ /**
+ * Construct a new SurfaceTextureHelper sharing OpenGL resources with `sharedContext`. A dedicated
+ * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
+ * initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame
+ * timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to
+ * rtc::TimeNanos() there is no need for aligning timestamps again in
+ * PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and
+ * closer to actual creation time.
+ */
+ public static SurfaceTextureHelper create(final String threadName,
+ final EglBase.Context sharedContext, boolean alignTimestamps, final YuvConverter yuvConverter,
+ FrameRefMonitor frameRefMonitor) {
+ final HandlerThread thread = new HandlerThread(threadName);
+ thread.start();
+ final Handler handler = new Handler(thread.getLooper());
+
+ // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
+ // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
+ // is constructed on the `handler` thread.
+ return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
+ @Nullable
+ @Override
+ public SurfaceTextureHelper call() {
+ try {
+ return new SurfaceTextureHelper(
+ sharedContext, handler, alignTimestamps, yuvConverter, frameRefMonitor);
+ } catch (RuntimeException e) {
+ Logging.e(TAG, threadName + " create failure", e);
+ return null;
+ }
+ }
+ });
+ }
+
+ /**
+ * Same as above with alignTimestamps set to false and yuvConverter set to new YuvConverter.
+ *
+ * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
+ */
+ public static SurfaceTextureHelper create(
+ final String threadName, final EglBase.Context sharedContext) {
+ return create(threadName, sharedContext, /* alignTimestamps= */ false, new YuvConverter(),
+ /*frameRefMonitor=*/null);
+ }
+
+ /**
+ * Same as above with yuvConverter set to new YuvConverter.
+ *
+ * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
+ */
+ public static SurfaceTextureHelper create(
+ final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) {
+ return create(
+ threadName, sharedContext, alignTimestamps, new YuvConverter(), /*frameRefMonitor=*/null);
+ }
+
+ /**
+ * Create a SurfaceTextureHelper without frame ref monitor.
+ *
+ * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
+ */
+ public static SurfaceTextureHelper create(final String threadName,
+ final EglBase.Context sharedContext, boolean alignTimestamps, YuvConverter yuvConverter) {
+ return create(
+ threadName, sharedContext, alignTimestamps, yuvConverter, /*frameRefMonitor=*/null);
+ }
+
+ private final RefCountMonitor textureRefCountMonitor = new RefCountMonitor() {
+ @Override
+ public void onRetain(TextureBufferImpl textureBuffer) {
+ if (frameRefMonitor != null) {
+ frameRefMonitor.onRetainBuffer(textureBuffer);
+ }
+ }
+
+ @Override
+ public void onRelease(TextureBufferImpl textureBuffer) {
+ if (frameRefMonitor != null) {
+ frameRefMonitor.onReleaseBuffer(textureBuffer);
+ }
+ }
+
+ @Override
+ public void onDestroy(TextureBufferImpl textureBuffer) {
+ returnTextureFrame();
+ if (frameRefMonitor != null) {
+ frameRefMonitor.onDestroyBuffer(textureBuffer);
+ }
+ }
+ };
+
+ private final Handler handler;
+ private final EglBase eglBase;
+ private final SurfaceTexture surfaceTexture;
+ private final int oesTextureId;
+ private final YuvConverter yuvConverter;
+ @Nullable private final TimestampAligner timestampAligner;
+ private final FrameRefMonitor frameRefMonitor;
+
+ // These variables are only accessed from the `handler` thread.
+ @Nullable private VideoSink listener;
+ // The possible states of this class.
+ private boolean hasPendingTexture;
+ private volatile boolean isTextureInUse;
+ private boolean isQuitting;
+ private int frameRotation;
+ private int textureWidth;
+ private int textureHeight;
+ // `pendingListener` is set in setListener() and the runnable is posted to the handler thread.
+ // setListener() is not allowed to be called again before stopListening(), so this is thread safe.
+ @Nullable private VideoSink pendingListener;
+ final Runnable setListenerRunnable = new Runnable() {
+ @Override
+ public void run() {
+ Logging.d(TAG, "Setting listener to " + pendingListener);
+ listener = pendingListener;
+ pendingListener = null;
+ // May have a pending frame from the previous capture session - drop it.
+ if (hasPendingTexture) {
+ // Calling updateTexImage() is neccessary in order to receive new frames.
+ updateTexImage();
+ hasPendingTexture = false;
+ }
+ }
+ };
+
+ private SurfaceTextureHelper(Context sharedContext, Handler handler, boolean alignTimestamps,
+ YuvConverter yuvConverter, FrameRefMonitor frameRefMonitor) {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
+ }
+ this.handler = handler;
+ this.timestampAligner = alignTimestamps ? new TimestampAligner() : null;
+ this.yuvConverter = yuvConverter;
+ this.frameRefMonitor = frameRefMonitor;
+
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
+ try {
+ // Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+ } catch (RuntimeException e) {
+ // Clean up before rethrowing the exception.
+ eglBase.release();
+ handler.getLooper().quit();
+ throw e;
+ }
+
+ oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ surfaceTexture = new SurfaceTexture(oesTextureId);
+ surfaceTexture.setOnFrameAvailableListener(st -> {
+ if (hasPendingTexture) {
+ Logging.d(TAG, "A frame is already pending, dropping frame.");
+ }
+
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ }, handler);
+ }
+
+ /**
+ * Start to stream textures to the given `listener`. If you need to change listener, you need to
+ * call stopListening() first.
+ */
+ public void startListening(final VideoSink listener) {
+ if (this.listener != null || this.pendingListener != null) {
+ throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
+ }
+ this.pendingListener = listener;
+ handler.post(setListenerRunnable);
+ }
+
+ /**
+ * Stop listening. The listener set in startListening() is guaranteded to not receive any more
+ * onFrame() callbacks after this function returns.
+ */
+ public void stopListening() {
+ Logging.d(TAG, "stopListening()");
+ handler.removeCallbacks(setListenerRunnable);
+ ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+ listener = null;
+ pendingListener = null;
+ });
+ }
+
+ /**
+ * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself
+ * since this class needs to be aware of the texture size.
+ */
+ public void setTextureSize(int textureWidth, int textureHeight) {
+ if (textureWidth <= 0) {
+ throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth);
+ }
+ if (textureHeight <= 0) {
+ throw new IllegalArgumentException(
+ "Texture height must be positive, but was " + textureHeight);
+ }
+ surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight);
+ handler.post(() -> {
+ this.textureWidth = textureWidth;
+ this.textureHeight = textureHeight;
+ tryDeliverTextureFrame();
+ });
+ }
+
+ /**
+ * Forces a frame to be produced. If no new frame is available, the last frame is sent to the
+ * listener again.
+ */
+ public void forceFrame() {
+ handler.post(() -> {
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ });
+ }
+
+ /** Set the rotation of the delivered frames. */
+ public void setFrameRotation(int rotation) {
+ handler.post(() -> this.frameRotation = rotation);
+ }
+
+ /**
+ * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
+ * producer such as a camera or decoder.
+ */
+ public SurfaceTexture getSurfaceTexture() {
+ return surfaceTexture;
+ }
+
+ /** Retrieve the handler that calls onFrame(). This handler is valid until dispose() is called. */
+ public Handler getHandler() {
+ return handler;
+ }
+
+ /**
+ * This function is called when the texture frame is released. Only one texture frame can be in
+ * flight at once, so this function must be called before a new frame is delivered.
+ */
+ private void returnTextureFrame() {
+ handler.post(() -> {
+ isTextureInUse = false;
+ if (isQuitting) {
+ release();
+ } else {
+ tryDeliverTextureFrame();
+ }
+ });
+ }
+
+ public boolean isTextureInUse() {
+ return isTextureInUse;
+ }
+
+ /**
+ * Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
+ * stopped when the texture frame has been released. You are guaranteed to not receive any more
+ * onFrame() after this function returns.
+ */
+ public void dispose() {
+ Logging.d(TAG, "dispose()");
+ ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+ isQuitting = true;
+ if (!isTextureInUse) {
+ release();
+ }
+ });
+ }
+
+ /**
+ * Posts to the correct thread to convert `textureBuffer` to I420.
+ *
+ * @deprecated Use toI420() instead.
+ */
+ @Deprecated
+ public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
+ return textureBuffer.toI420();
+ }
+
+ private void updateTexImage() {
+ // SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
+ // as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
+ // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+ synchronized (EglBase.lock) {
+ surfaceTexture.updateTexImage();
+ }
+ }
+
+ private void tryDeliverTextureFrame() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
+ return;
+ }
+ if (textureWidth == 0 || textureHeight == 0) {
+ // Information about the resolution needs to be provided by a call to setTextureSize() before
+ // frames are produced.
+ Logging.w(TAG, "Texture size has not been set.");
+ return;
+ }
+ isTextureInUse = true;
+ hasPendingTexture = false;
+
+ updateTexImage();
+
+ final float[] transformMatrix = new float[16];
+ surfaceTexture.getTransformMatrix(transformMatrix);
+ long timestampNs = surfaceTexture.getTimestamp();
+ if (timestampAligner != null) {
+ timestampNs = timestampAligner.translateTimestamp(timestampNs);
+ }
+ final VideoFrame.TextureBuffer buffer =
+ new TextureBufferImpl(textureWidth, textureHeight, TextureBuffer.Type.OES, oesTextureId,
+ RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix), handler,
+ yuvConverter, textureRefCountMonitor);
+ if (frameRefMonitor != null) {
+ frameRefMonitor.onNewBuffer(buffer);
+ }
+ final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs);
+ listener.onFrame(frame);
+ frame.release();
+ }
+
+ private void release() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isTextureInUse || !isQuitting) {
+ throw new IllegalStateException("Unexpected release.");
+ }
+ yuvConverter.release();
+ GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+ surfaceTexture.release();
+ eglBase.release();
+ handler.getLooper().quit();
+ if (timestampAligner != null) {
+ timestampAligner.dispose();
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java
new file mode 100644
index 0000000000..6c9140abbd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/SurfaceViewRenderer.java
@@ -0,0 +1,300 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.content.res.Resources.NotFoundException;
+import android.graphics.Point;
+import android.os.Looper;
+import android.util.AttributeSet;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+/**
+ * Display the video stream on a SurfaceView.
+ */
+public class SurfaceViewRenderer extends SurfaceView
+ implements SurfaceHolder.Callback, VideoSink, RendererCommon.RendererEvents {
+ private static final String TAG = "SurfaceViewRenderer";
+
+ // Cached resource name.
+ private final String resourceName;
+ private final RendererCommon.VideoLayoutMeasure videoLayoutMeasure =
+ new RendererCommon.VideoLayoutMeasure();
+ private final SurfaceEglRenderer eglRenderer;
+
+ // Callback for reporting renderer events. Read-only after initialization so no lock required.
+ private RendererCommon.RendererEvents rendererEvents;
+
+ // Accessed only on the main thread.
+ private int rotatedFrameWidth;
+ private int rotatedFrameHeight;
+ private boolean enableFixedSize;
+ private int surfaceWidth;
+ private int surfaceHeight;
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context) {
+ super(context);
+ this.resourceName = getResourceName();
+ eglRenderer = new SurfaceEglRenderer(resourceName);
+ getHolder().addCallback(this);
+ getHolder().addCallback(eglRenderer);
+ }
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ this.resourceName = getResourceName();
+ eglRenderer = new SurfaceEglRenderer(resourceName);
+ getHolder().addCallback(this);
+ getHolder().addCallback(eglRenderer);
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. It is allowed to call init() to
+ * reinitialize the renderer after a previous init()/release() cycle.
+ */
+ public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle.
+ */
+ public void init(final EglBase.Context sharedContext,
+ RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ ThreadUtils.checkIsOnMainThread();
+ this.rendererEvents = rendererEvents;
+ rotatedFrameWidth = 0;
+ rotatedFrameHeight = 0;
+ eglRenderer.init(sharedContext, this /* rendererEvents */, configAttributes, drawer);
+ }
+
+ /**
+ * Block until any pending frame is returned and all GL resources released, even if an interrupt
+ * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+ * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+ * don't call this function, the GL resources might leak.
+ */
+ public void release() {
+ eglRenderer.release();
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ * @param drawer Custom drawer to use for this frame listener.
+ */
+ public void addFrameListener(
+ EglRenderer.FrameListener listener, float scale, RendererCommon.GlDrawer drawerParam) {
+ eglRenderer.addFrameListener(listener, scale, drawerParam);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received. This version uses
+ * the drawer of the EglRenderer that was passed in init.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ */
+ public void addFrameListener(EglRenderer.FrameListener listener, float scale) {
+ eglRenderer.addFrameListener(listener, scale);
+ }
+
+ public void removeFrameListener(EglRenderer.FrameListener listener) {
+ eglRenderer.removeFrameListener(listener);
+ }
+
+ /**
+ * Enables fixed size for the surface. This provides better performance but might be buggy on some
+ * devices. By default this is turned off.
+ */
+ public void setEnableHardwareScaler(boolean enabled) {
+ ThreadUtils.checkIsOnMainThread();
+ enableFixedSize = enabled;
+ updateSurfaceSize();
+ }
+
+ /**
+ * Set if the video stream should be mirrored or not.
+ */
+ public void setMirror(final boolean mirror) {
+ eglRenderer.setMirror(mirror);
+ }
+
+ /**
+ * Set how the video will fill the allowed layout area.
+ */
+ public void setScalingType(RendererCommon.ScalingType scalingType) {
+ ThreadUtils.checkIsOnMainThread();
+ videoLayoutMeasure.setScalingType(scalingType);
+ requestLayout();
+ }
+
+ public void setScalingType(RendererCommon.ScalingType scalingTypeMatchOrientation,
+ RendererCommon.ScalingType scalingTypeMismatchOrientation) {
+ ThreadUtils.checkIsOnMainThread();
+ videoLayoutMeasure.setScalingType(scalingTypeMatchOrientation, scalingTypeMismatchOrientation);
+ requestLayout();
+ }
+
+ /**
+ * Limit render framerate.
+ *
+ * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+ * reduction.
+ */
+ public void setFpsReduction(float fps) {
+ eglRenderer.setFpsReduction(fps);
+ }
+
+ public void disableFpsReduction() {
+ eglRenderer.disableFpsReduction();
+ }
+
+ public void pauseVideo() {
+ eglRenderer.pauseVideo();
+ }
+
+ // VideoSink interface.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ eglRenderer.onFrame(frame);
+ }
+
+ // View layout interface.
+ @Override
+ protected void onMeasure(int widthSpec, int heightSpec) {
+ ThreadUtils.checkIsOnMainThread();
+ Point size =
+ videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
+ setMeasuredDimension(size.x, size.y);
+ logD("onMeasure(). New size: " + size.x + "x" + size.y);
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+ ThreadUtils.checkIsOnMainThread();
+ eglRenderer.setLayoutAspectRatio((right - left) / (float) (bottom - top));
+ updateSurfaceSize();
+ }
+
+ private void updateSurfaceSize() {
+ ThreadUtils.checkIsOnMainThread();
+ if (enableFixedSize && rotatedFrameWidth != 0 && rotatedFrameHeight != 0 && getWidth() != 0
+ && getHeight() != 0) {
+ final float layoutAspectRatio = getWidth() / (float) getHeight();
+ final float frameAspectRatio = rotatedFrameWidth / (float) rotatedFrameHeight;
+ final int drawnFrameWidth;
+ final int drawnFrameHeight;
+ if (frameAspectRatio > layoutAspectRatio) {
+ drawnFrameWidth = (int) (rotatedFrameHeight * layoutAspectRatio);
+ drawnFrameHeight = rotatedFrameHeight;
+ } else {
+ drawnFrameWidth = rotatedFrameWidth;
+ drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio);
+ }
+ // Aspect ratio of the drawn frame and the view is the same.
+ final int width = Math.min(getWidth(), drawnFrameWidth);
+ final int height = Math.min(getHeight(), drawnFrameHeight);
+ logD("updateSurfaceSize. Layout size: " + getWidth() + "x" + getHeight() + ", frame size: "
+ + rotatedFrameWidth + "x" + rotatedFrameHeight + ", requested surface size: " + width
+ + "x" + height + ", old surface size: " + surfaceWidth + "x" + surfaceHeight);
+ if (width != surfaceWidth || height != surfaceHeight) {
+ surfaceWidth = width;
+ surfaceHeight = height;
+ getHolder().setFixedSize(width, height);
+ }
+ } else {
+ surfaceWidth = surfaceHeight = 0;
+ getHolder().setSizeFromLayout();
+ }
+ }
+
+ // SurfaceHolder.Callback interface.
+ @Override
+ public void surfaceCreated(final SurfaceHolder holder) {
+ ThreadUtils.checkIsOnMainThread();
+ surfaceWidth = surfaceHeight = 0;
+ updateSurfaceSize();
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {}
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
+
+ private String getResourceName() {
+ try {
+ return getResources().getResourceEntryName(getId());
+ } catch (NotFoundException e) {
+ return "";
+ }
+ }
+
+ /**
+ * Post a task to clear the SurfaceView to a transparent uniform color.
+ */
+ public void clearImage() {
+ eglRenderer.clearImage();
+ }
+
+ @Override
+ public void onFirstFrameRendered() {
+ if (rendererEvents != null) {
+ rendererEvents.onFirstFrameRendered();
+ }
+ }
+
+ @Override
+ public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) {
+ if (rendererEvents != null) {
+ rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
+ }
+ int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight;
+ int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth;
+ // run immediately if possible for ui thread tests
+ postOrRun(() -> {
+ rotatedFrameWidth = rotatedWidth;
+ rotatedFrameHeight = rotatedHeight;
+ updateSurfaceSize();
+ requestLayout();
+ });
+ }
+
+ private void postOrRun(Runnable r) {
+ if (Thread.currentThread() == Looper.getMainLooper().getThread()) {
+ r.run();
+ } else {
+ post(r);
+ }
+ }
+
+ private void logD(String string) {
+ Logging.d(TAG, resourceName + ": " + string);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/TextureBufferImpl.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/TextureBufferImpl.java
new file mode 100644
index 0000000000..8e0e40ef70
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/TextureBufferImpl.java
@@ -0,0 +1,203 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Matrix;
+import android.os.Handler;
+import androidx.annotation.Nullable;
+
+/**
+ * Android texture buffer that glues together the necessary information together with a generic
+ * release callback. ToI420() is implemented by providing a Handler and a YuvConverter.
+ */
+public class TextureBufferImpl implements VideoFrame.TextureBuffer {
+ interface RefCountMonitor {
+ void onRetain(TextureBufferImpl textureBuffer);
+ void onRelease(TextureBufferImpl textureBuffer);
+ void onDestroy(TextureBufferImpl textureBuffer);
+ }
+
+ // This is the full resolution the texture has in memory after applying the transformation matrix
+ // that might include cropping. This resolution is useful to know when sampling the texture to
+ // avoid downscaling artifacts.
+ private final int unscaledWidth;
+ private final int unscaledHeight;
+ // This is the resolution that has been applied after cropAndScale().
+ private final int width;
+ private final int height;
+ private final Type type;
+ private final int id;
+ private final Matrix transformMatrix;
+ private final Handler toI420Handler;
+ private final YuvConverter yuvConverter;
+ private final RefCountDelegate refCountDelegate;
+ private final RefCountMonitor refCountMonitor;
+
+ public TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
+ Handler toI420Handler, YuvConverter yuvConverter, @Nullable Runnable releaseCallback) {
+ this(width, height, width, height, type, id, transformMatrix, toI420Handler, yuvConverter,
+ new RefCountMonitor() {
+ @Override
+ public void onRetain(TextureBufferImpl textureBuffer) {}
+
+ @Override
+ public void onRelease(TextureBufferImpl textureBuffer) {}
+
+ @Override
+ public void onDestroy(TextureBufferImpl textureBuffer) {
+ if (releaseCallback != null) {
+ releaseCallback.run();
+ }
+ }
+ });
+ }
+
+ TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
+ Handler toI420Handler, YuvConverter yuvConverter, RefCountMonitor refCountMonitor) {
+ this(width, height, width, height, type, id, transformMatrix, toI420Handler, yuvConverter,
+ refCountMonitor);
+ }
+
+ private TextureBufferImpl(int unscaledWidth, int unscaledHeight, int width, int height, Type type,
+ int id, Matrix transformMatrix, Handler toI420Handler, YuvConverter yuvConverter,
+ RefCountMonitor refCountMonitor) {
+ this.unscaledWidth = unscaledWidth;
+ this.unscaledHeight = unscaledHeight;
+ this.width = width;
+ this.height = height;
+ this.type = type;
+ this.id = id;
+ this.transformMatrix = transformMatrix;
+ this.toI420Handler = toI420Handler;
+ this.yuvConverter = yuvConverter;
+ this.refCountDelegate = new RefCountDelegate(() -> refCountMonitor.onDestroy(this));
+ this.refCountMonitor = refCountMonitor;
+ }
+
+ @Override
+ public VideoFrame.TextureBuffer.Type getType() {
+ return type;
+ }
+
+ @Override
+ public int getTextureId() {
+ return id;
+ }
+
+ @Override
+ public Matrix getTransformMatrix() {
+ return transformMatrix;
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ return ThreadUtils.invokeAtFrontUninterruptibly(
+ toI420Handler, () -> yuvConverter.convert(this));
+ }
+
+ @Override
+ public void retain() {
+ refCountMonitor.onRetain(this);
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountMonitor.onRelease(this);
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ final Matrix cropAndScaleMatrix = new Matrix();
+ // In WebRTC, Y=0 is the top row, while in OpenGL Y=0 is the bottom row. This means that the Y
+ // direction is effectively reversed.
+ final int cropYFromBottom = height - (cropY + cropHeight);
+ cropAndScaleMatrix.preTranslate(cropX / (float) width, cropYFromBottom / (float) height);
+ cropAndScaleMatrix.preScale(cropWidth / (float) width, cropHeight / (float) height);
+
+ return applyTransformMatrix(cropAndScaleMatrix,
+ Math.round(unscaledWidth * cropWidth / (float) width),
+ Math.round(unscaledHeight * cropHeight / (float) height), scaleWidth, scaleHeight);
+ }
+
+ /**
+ * Returns the width of the texture in memory. This should only be used for downscaling, and you
+ * should still respect the width from getWidth().
+ */
+ public int getUnscaledWidth() {
+ return unscaledWidth;
+ }
+
+ /**
+ * Returns the height of the texture in memory. This should only be used for downscaling, and you
+ * should still respect the height from getHeight().
+ */
+ public int getUnscaledHeight() {
+ return unscaledHeight;
+ }
+
+ public Handler getToI420Handler() {
+ return toI420Handler;
+ }
+
+ public YuvConverter getYuvConverter() {
+ return yuvConverter;
+ }
+
+ /**
+ * Create a new TextureBufferImpl with an applied transform matrix and a new size. The
+ * existing buffer is unchanged. The given transform matrix is applied first when texture
+ * coordinates are still in the unmodified [0, 1] range.
+ */
+ @Override
+ public TextureBufferImpl applyTransformMatrix(
+ Matrix transformMatrix, int newWidth, int newHeight) {
+ return applyTransformMatrix(transformMatrix, /* unscaledWidth= */ newWidth,
+ /* unscaledHeight= */ newHeight, /* scaledWidth= */ newWidth,
+ /* scaledHeight= */ newHeight);
+ }
+
+ private TextureBufferImpl applyTransformMatrix(Matrix transformMatrix, int unscaledWidth,
+ int unscaledHeight, int scaledWidth, int scaledHeight) {
+ final Matrix newMatrix = new Matrix(this.transformMatrix);
+ newMatrix.preConcat(transformMatrix);
+ retain();
+ return new TextureBufferImpl(unscaledWidth, unscaledHeight, scaledWidth, scaledHeight, type, id,
+ newMatrix, toI420Handler, yuvConverter, new RefCountMonitor() {
+ @Override
+ public void onRetain(TextureBufferImpl textureBuffer) {
+ refCountMonitor.onRetain(TextureBufferImpl.this);
+ }
+
+ @Override
+ public void onRelease(TextureBufferImpl textureBuffer) {
+ refCountMonitor.onRelease(TextureBufferImpl.this);
+ }
+
+ @Override
+ public void onDestroy(TextureBufferImpl textureBuffer) {
+ release();
+ }
+ });
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/TimestampAligner.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/TimestampAligner.java
new file mode 100644
index 0000000000..d96c939595
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/TimestampAligner.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The TimestampAligner class helps translating camera timestamps into the same timescale as is
+ * used by rtc::TimeNanos(). Some cameras have built in timestamping which is more accurate than
+ * reading the system clock, but using a different epoch and unknown clock drift. Frame timestamps
+ * in webrtc should use rtc::TimeNanos (system monotonic time), and this class provides a filter
+ * which lets us use the rtc::TimeNanos timescale, and at the same time take advantage of higher
+ * accuracy of the camera clock. This class is a wrapper on top of rtc::TimestampAligner.
+ */
+public class TimestampAligner {
+ /**
+ * Wrapper around rtc::TimeNanos(). This is normally same as System.nanoTime(), but call this
+ * function to be safe.
+ */
+ public static long getRtcTimeNanos() {
+ return nativeRtcTimeNanos();
+ }
+
+ private volatile long nativeTimestampAligner = nativeCreateTimestampAligner();
+
+ /**
+ * Translates camera timestamps to the same timescale as is used by rtc::TimeNanos().
+ * `cameraTimeNs` is assumed to be accurate, but with an unknown epoch and clock drift. Returns
+ * the translated timestamp.
+ */
+ public long translateTimestamp(long cameraTimeNs) {
+ checkNativeAlignerExists();
+ return nativeTranslateTimestamp(nativeTimestampAligner, cameraTimeNs);
+ }
+
+ /** Dispose native timestamp aligner. */
+ public void dispose() {
+ checkNativeAlignerExists();
+ nativeReleaseTimestampAligner(nativeTimestampAligner);
+ nativeTimestampAligner = 0;
+ }
+
+ private void checkNativeAlignerExists() {
+ if (nativeTimestampAligner == 0) {
+ throw new IllegalStateException("TimestampAligner has been disposed.");
+ }
+ }
+
+ private static native long nativeRtcTimeNanos();
+ private static native long nativeCreateTimestampAligner();
+ private static native void nativeReleaseTimestampAligner(long timestampAligner);
+ private static native long nativeTranslateTimestamp(long timestampAligner, long cameraTimeNs);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/TurnCustomizer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/TurnCustomizer.java
new file mode 100644
index 0000000000..41bedb7dcb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/TurnCustomizer.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ TurnCustomizer. */
+public class TurnCustomizer {
+ private long nativeTurnCustomizer;
+
+ public TurnCustomizer(long nativeTurnCustomizer) {
+ this.nativeTurnCustomizer = nativeTurnCustomizer;
+ }
+
+ public void dispose() {
+ checkTurnCustomizerExists();
+ nativeFreeTurnCustomizer(nativeTurnCustomizer);
+ nativeTurnCustomizer = 0;
+ }
+
+ private static native void nativeFreeTurnCustomizer(long turnCustomizer);
+
+ /** Return a pointer to webrtc::TurnCustomizer. */
+ @CalledByNative
+ long getNativeTurnCustomizer() {
+ checkTurnCustomizerExists();
+ return nativeTurnCustomizer;
+ }
+
+ private void checkTurnCustomizerExists() {
+ if (nativeTurnCustomizer == 0) {
+ throw new IllegalStateException("TurnCustomizer has been disposed.");
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCapturer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCapturer.java
new file mode 100644
index 0000000000..67eb7ab086
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCapturer.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+// Base interface for all VideoCapturers to implement.
+public interface VideoCapturer {
+ /**
+ * This function is used to initialize the camera thread, the android application context, and the
+ * capture observer. It will be called only once and before any startCapture() request. The
+ * camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants
+ * to deliver texture frames, it should do this by rendering on the SurfaceTexture in
+ * {@code surfaceTextureHelper}, register itself as a listener, and forward the frames to
+ * CapturerObserver.onFrameCaptured(). The caller still has ownership of {@code
+ * surfaceTextureHelper} and is responsible for making sure surfaceTextureHelper.dispose() is
+ * called. This also means that the caller can reuse the SurfaceTextureHelper to initialize a new
+ * VideoCapturer once the previous VideoCapturer has been disposed.
+ */
+ void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ CapturerObserver capturerObserver);
+
+ /**
+ * Start capturing frames in a format that is as close as possible to {@code width x height} and
+ * {@code framerate}.
+ */
+ void startCapture(int width, int height, int framerate);
+
+ /**
+ * Stop capturing. This function should block until capture is actually stopped.
+ */
+ void stopCapture() throws InterruptedException;
+
+ void changeCaptureFormat(int width, int height, int framerate);
+
+ /**
+ * Perform any final cleanup here. No more capturing will be done after this call.
+ */
+ void dispose();
+
+ /**
+ * @return true if-and-only-if this is a screen capturer.
+ */
+ boolean isScreencast();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java
new file mode 100644
index 0000000000..363be347b5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecInfo.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.Locale;
+import java.util.Map;
+
+/**
+ * Represent a video codec as encoded in SDP.
+ */
+public class VideoCodecInfo {
+ // Keys for H264 VideoCodecInfo properties.
+ public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
+ public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
+ public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
+
+ public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0";
+ public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
+ public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
+ public static final String H264_CONSTRAINED_HIGH_3_1 =
+ H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
+ public static final String H264_CONSTRAINED_BASELINE_3_1 =
+ H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
+
+ public final String name;
+ public final Map<String, String> params;
+ @Deprecated public final int payload;
+
+ @CalledByNative
+ public VideoCodecInfo(String name, Map<String, String> params) {
+ this.payload = 0;
+ this.name = name;
+ this.params = params;
+ }
+
+ @Deprecated
+ public VideoCodecInfo(int payload, String name, Map<String, String> params) {
+ this.payload = payload;
+ this.name = name;
+ this.params = params;
+ }
+
+ @Override
+ public boolean equals(@Nullable Object obj) {
+ if (obj == null)
+ return false;
+ if (obj == this)
+ return true;
+ if (!(obj instanceof VideoCodecInfo))
+ return false;
+
+ VideoCodecInfo otherInfo = (VideoCodecInfo) obj;
+ return name.equalsIgnoreCase(otherInfo.name) && params.equals(otherInfo.params);
+ }
+
+ @Override
+ public int hashCode() {
+ Object[] values = {name.toUpperCase(Locale.ROOT), params};
+ return Arrays.hashCode(values);
+ }
+
+ @Override
+ public String toString() {
+ return "VideoCodec{" + name + " " + params + "}";
+ }
+
+ @CalledByNative
+ String getName() {
+ return name;
+ }
+
+ @CalledByNative
+ Map<String, String> getParams() {
+ return params;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java
new file mode 100644
index 0000000000..670d255880
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoCodecStatus.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Status codes reported by video encoding/decoding components. This should be kept in sync with
+ * video_error_codes.h.
+ */
+public enum VideoCodecStatus {
+ TARGET_BITRATE_OVERSHOOT(5),
+ REQUEST_SLI(2),
+ NO_OUTPUT(1),
+ OK(0),
+ ERROR(-1),
+ LEVEL_EXCEEDED(-2),
+ MEMORY(-3),
+ ERR_PARAMETER(-4),
+ ERR_SIZE(-5),
+ TIMEOUT(-6),
+ UNINITIALIZED(-7),
+ ERR_REQUEST_SLI(-12),
+ FALLBACK_SOFTWARE(-13);
+
+ private final int number;
+
+ private VideoCodecStatus(int number) {
+ this.number = number;
+ }
+
+ @CalledByNative
+ public int getNumber() {
+ return number;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoder.java
new file mode 100644
index 0000000000..a80fa4fef2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoder.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for a video decoder that can be used in WebRTC. All calls to the class will be made on
+ * a single decoding thread.
+ */
+public interface VideoDecoder {
+ /** Settings passed to the decoder by WebRTC. */
+ public class Settings {
+ public final int numberOfCores;
+ public final int width;
+ public final int height;
+
+ @CalledByNative("Settings")
+ public Settings(int numberOfCores, int width, int height) {
+ this.numberOfCores = numberOfCores;
+ this.width = width;
+ this.height = height;
+ }
+ }
+
+ /** Additional info for decoding. */
+ public class DecodeInfo {
+ public final boolean isMissingFrames;
+ public final long renderTimeMs;
+
+ public DecodeInfo(boolean isMissingFrames, long renderTimeMs) {
+ this.isMissingFrames = isMissingFrames;
+ this.renderTimeMs = renderTimeMs;
+ }
+ }
+
+ public interface Callback {
+ /**
+ * Call to return a decoded frame. Can be called on any thread.
+ *
+ * @param frame Decoded frame
+ * @param decodeTimeMs Time it took to decode the frame in milliseconds or null if not available
+ * @param qp QP value of the decoded frame or null if not available
+ */
+ void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp);
+ }
+
+ /**
+ * The decoder implementation backing this interface is either 1) a Java
+ * decoder (e.g., an Android platform decoder), or alternatively 2) a native
+ * decoder (e.g., a software decoder or a C++ decoder adapter).
+ *
+ * For case 1), createNativeVideoDecoder() should return zero.
+ * In this case, we expect the native library to call the decoder through
+ * JNI using the Java interface declared below.
+ *
+ * For case 2), createNativeVideoDecoder() should return a non-zero value.
+ * In this case, we expect the native library to treat the returned value as
+ * a raw pointer of type webrtc::VideoDecoder* (ownership is transferred to
+ * the caller). The native library should then directly call the
+ * webrtc::VideoDecoder interface without going through JNI. All calls to
+ * the Java interface methods declared below should thus throw an
+ * UnsupportedOperationException.
+ */
+ @CalledByNative
+ default long createNativeVideoDecoder() {
+ return 0;
+ }
+
+ /**
+ * Initializes the decoding process with specified settings. Will be called on the decoding thread
+ * before any decode calls.
+ */
+ @CalledByNative VideoCodecStatus initDecode(Settings settings, Callback decodeCallback);
+ /**
+ * Called when the decoder is no longer needed. Any more calls to decode will not be made.
+ */
+ @CalledByNative VideoCodecStatus release();
+ /**
+ * Request the decoder to decode a frame.
+ */
+ @CalledByNative VideoCodecStatus decode(EncodedImage frame, DecodeInfo info);
+ /**
+ * Should return a descriptive name for the implementation. Gets called once and cached. May be
+ * called from arbitrary thread.
+ */
+ @CalledByNative String getImplementationName();
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java
new file mode 100644
index 0000000000..8b25516e99
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/** Factory for creating VideoDecoders. */
+public interface VideoDecoderFactory {
+ /**
+ * Creates a VideoDecoder for the given codec. Supports the same codecs supported by
+ * VideoEncoderFactory.
+ */
+ @Nullable @CalledByNative VideoDecoder createDecoder(VideoCodecInfo info);
+
+ /**
+ * Enumerates the list of supported video codecs.
+ */
+ @CalledByNative
+ default VideoCodecInfo[] getSupportedCodecs() {
+ return new VideoCodecInfo[0];
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java
new file mode 100644
index 0000000000..ddfa3ecd40
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoDecoderFallback.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * A combined video decoder that falls back on a secondary decoder if the primary decoder fails.
+ */
+public class VideoDecoderFallback extends WrappedNativeVideoDecoder {
+ private final VideoDecoder fallback;
+ private final VideoDecoder primary;
+
+ public VideoDecoderFallback(VideoDecoder fallback, VideoDecoder primary) {
+ this.fallback = fallback;
+ this.primary = primary;
+ }
+
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder(fallback, primary);
+ }
+
+ private static native long nativeCreateDecoder(VideoDecoder fallback, VideoDecoder primary);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoder.java
new file mode 100644
index 0000000000..0d8cf830ae
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoder.java
@@ -0,0 +1,385 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import org.webrtc.EncodedImage;
+
+/**
+ * Interface for a video encoder that can be used with WebRTC. All calls will be made on the
+ * encoding thread. The encoder may be constructed on a different thread and changing thread after
+ * calling release is allowed.
+ */
+public interface VideoEncoder {
+ /** Settings passed to the encoder by WebRTC. */
+ public class Settings {
+ public final int numberOfCores;
+ public final int width;
+ public final int height;
+ public final int startBitrate; // Kilobits per second.
+ public final int maxFramerate;
+ public final int numberOfSimulcastStreams;
+ public final boolean automaticResizeOn;
+ public final Capabilities capabilities;
+
+ // TODO(bugs.webrtc.org/10720): Remove.
+ @Deprecated
+ public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
+ int numberOfSimulcastStreams, boolean automaticResizeOn) {
+ this(numberOfCores, width, height, startBitrate, maxFramerate, numberOfSimulcastStreams,
+ automaticResizeOn, new VideoEncoder.Capabilities(false /* lossNotification */));
+ }
+
+ @CalledByNative("Settings")
+ public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
+ int numberOfSimulcastStreams, boolean automaticResizeOn, Capabilities capabilities) {
+ this.numberOfCores = numberOfCores;
+ this.width = width;
+ this.height = height;
+ this.startBitrate = startBitrate;
+ this.maxFramerate = maxFramerate;
+ this.numberOfSimulcastStreams = numberOfSimulcastStreams;
+ this.automaticResizeOn = automaticResizeOn;
+ this.capabilities = capabilities;
+ }
+ }
+
+ /** Capabilities (loss notification, etc.) passed to the encoder by WebRTC. */
+ public class Capabilities {
+ /**
+ * The remote side has support for the loss notification RTCP feedback message format, and will
+ * be sending these feedback messages if necessary.
+ */
+ public final boolean lossNotification;
+
+ @CalledByNative("Capabilities")
+ public Capabilities(boolean lossNotification) {
+ this.lossNotification = lossNotification;
+ }
+ }
+
+ /** Additional info for encoding. */
+ public class EncodeInfo {
+ public final EncodedImage.FrameType[] frameTypes;
+
+ @CalledByNative("EncodeInfo")
+ public EncodeInfo(EncodedImage.FrameType[] frameTypes) {
+ this.frameTypes = frameTypes;
+ }
+ }
+
+ // TODO(sakal): Add values to these classes as necessary.
+ /** Codec specific information about the encoded frame. */
+ public class CodecSpecificInfo {}
+
+ public class CodecSpecificInfoVP8 extends CodecSpecificInfo {}
+
+ public class CodecSpecificInfoVP9 extends CodecSpecificInfo {}
+
+ public class CodecSpecificInfoH264 extends CodecSpecificInfo {}
+
+ public class CodecSpecificInfoAV1 extends CodecSpecificInfo {}
+
+ /**
+ * Represents bitrate allocated for an encoder to produce frames. Bitrate can be divided between
+ * spatial and temporal layers.
+ */
+ public class BitrateAllocation {
+ // First index is the spatial layer and second the temporal layer.
+ public final int[][] bitratesBbs;
+
+ /**
+ * Initializes the allocation with a two dimensional array of bitrates. The first index of the
+ * array is the spatial layer and the second index in the temporal layer.
+ */
+ @CalledByNative("BitrateAllocation")
+ public BitrateAllocation(int[][] bitratesBbs) {
+ this.bitratesBbs = bitratesBbs;
+ }
+
+ /**
+ * Gets the total bitrate allocated for all layers.
+ */
+ public int getSum() {
+ int sum = 0;
+ for (int[] spatialLayer : bitratesBbs) {
+ for (int bitrate : spatialLayer) {
+ sum += bitrate;
+ }
+ }
+ return sum;
+ }
+ }
+
+ /** Settings for WebRTC quality based scaling. */
+ public class ScalingSettings {
+ public final boolean on;
+ @Nullable public final Integer low;
+ @Nullable public final Integer high;
+
+ /**
+ * Settings to disable quality based scaling.
+ */
+ public static final ScalingSettings OFF = new ScalingSettings();
+
+ /**
+ * Creates settings to enable quality based scaling.
+ *
+ * @param low Average QP at which to scale up the resolution.
+ * @param high Average QP at which to scale down the resolution.
+ */
+ public ScalingSettings(int low, int high) {
+ this.on = true;
+ this.low = low;
+ this.high = high;
+ }
+
+ private ScalingSettings() {
+ this.on = false;
+ this.low = null;
+ this.high = null;
+ }
+
+ // TODO(bugs.webrtc.org/8830): Below constructors are deprecated.
+ // Default thresholds are going away, so thresholds have to be set
+ // when scaling is on.
+ /**
+ * Creates quality based scaling setting.
+ *
+ * @param on True if quality scaling is turned on.
+ */
+ @Deprecated
+ public ScalingSettings(boolean on) {
+ this.on = on;
+ this.low = null;
+ this.high = null;
+ }
+
+ /**
+ * Creates quality based scaling settings with custom thresholds.
+ *
+ * @param on True if quality scaling is turned on.
+ * @param low Average QP at which to scale up the resolution.
+ * @param high Average QP at which to scale down the resolution.
+ */
+ @Deprecated
+ public ScalingSettings(boolean on, int low, int high) {
+ this.on = on;
+ this.low = low;
+ this.high = high;
+ }
+
+ @Override
+ public String toString() {
+ return on ? "[ " + low + ", " + high + " ]" : "OFF";
+ }
+ }
+
+ /**
+ * Bitrate limits for resolution.
+ */
+ public class ResolutionBitrateLimits {
+ /**
+ * Maximum size of video frame, in pixels, the bitrate limits are intended for.
+ */
+ public final int frameSizePixels;
+
+ /**
+ * Recommended minimum bitrate to start encoding.
+ */
+ public final int minStartBitrateBps;
+
+ /**
+ * Recommended minimum bitrate.
+ */
+ public final int minBitrateBps;
+
+ /**
+ * Recommended maximum bitrate.
+ */
+ public final int maxBitrateBps;
+
+ public ResolutionBitrateLimits(
+ int frameSizePixels, int minStartBitrateBps, int minBitrateBps, int maxBitrateBps) {
+ this.frameSizePixels = frameSizePixels;
+ this.minStartBitrateBps = minStartBitrateBps;
+ this.minBitrateBps = minBitrateBps;
+ this.maxBitrateBps = maxBitrateBps;
+ }
+
+ @CalledByNative("ResolutionBitrateLimits")
+ public int getFrameSizePixels() {
+ return frameSizePixels;
+ }
+
+ @CalledByNative("ResolutionBitrateLimits")
+ public int getMinStartBitrateBps() {
+ return minStartBitrateBps;
+ }
+
+ @CalledByNative("ResolutionBitrateLimits")
+ public int getMinBitrateBps() {
+ return minBitrateBps;
+ }
+
+ @CalledByNative("ResolutionBitrateLimits")
+ public int getMaxBitrateBps() {
+ return maxBitrateBps;
+ }
+ }
+
+ /** Rate control parameters. */
+ public class RateControlParameters {
+ /**
+ * Adjusted target bitrate, per spatial/temporal layer. May be lower or higher than the target
+ * depending on encoder behaviour.
+ */
+ public final BitrateAllocation bitrate;
+
+ /**
+ * Target framerate, in fps. A value <= 0.0 is invalid and should be interpreted as framerate
+ * target not available. In this case the encoder should fall back to the max framerate
+ * specified in `codec_settings` of the last InitEncode() call.
+ */
+ public final double framerateFps;
+
+ @CalledByNative("RateControlParameters")
+ public RateControlParameters(BitrateAllocation bitrate, double framerateFps) {
+ this.bitrate = bitrate;
+ this.framerateFps = framerateFps;
+ }
+ }
+
+ /**
+ * Metadata about the Encoder.
+ */
+ public class EncoderInfo {
+ /**
+ * The width and height of the incoming video frames should be divisible by
+ * |requested_resolution_alignment|
+ */
+ public final int requestedResolutionAlignment;
+
+ /**
+ * Same as above but if true, each simulcast layer should also be divisible by
+ * |requested_resolution_alignment|.
+ */
+ public final boolean applyAlignmentToAllSimulcastLayers;
+
+ public EncoderInfo(
+ int requestedResolutionAlignment, boolean applyAlignmentToAllSimulcastLayers) {
+ this.requestedResolutionAlignment = requestedResolutionAlignment;
+ this.applyAlignmentToAllSimulcastLayers = applyAlignmentToAllSimulcastLayers;
+ }
+
+ @CalledByNative("EncoderInfo")
+ public int getRequestedResolutionAlignment() {
+ return requestedResolutionAlignment;
+ }
+
+ @CalledByNative("EncoderInfo")
+ public boolean getApplyAlignmentToAllSimulcastLayers() {
+ return applyAlignmentToAllSimulcastLayers;
+ }
+ }
+
+ public interface Callback {
+ /**
+ * Old encoders assume that the byte buffer held by `frame` is not accessed after the call to
+ * this method returns. If the pipeline downstream needs to hold on to the buffer, it then has
+ * to make its own copy. We want to move to a model where no copying is needed, and instead use
+ * retain()/release() to signal to the encoder when it is safe to reuse the buffer.
+ *
+ * Over the transition, implementations of this class should use the maybeRetain() method if
+ * they want to keep a reference to the buffer, and fall back to copying if that method returns
+ * false.
+ */
+ void onEncodedFrame(EncodedImage frame, CodecSpecificInfo info);
+ }
+
+ /**
+ * The encoder implementation backing this interface is either 1) a Java
+ * encoder (e.g., an Android platform encoder), or alternatively 2) a native
+ * encoder (e.g., a software encoder or a C++ encoder adapter).
+ *
+ * For case 1), createNativeVideoEncoder() should return zero.
+ * In this case, we expect the native library to call the encoder through
+ * JNI using the Java interface declared below.
+ *
+ * For case 2), createNativeVideoEncoder() should return a non-zero value.
+ * In this case, we expect the native library to treat the returned value as
+ * a raw pointer of type webrtc::VideoEncoder* (ownership is transferred to
+ * the caller). The native library should then directly call the
+ * webrtc::VideoEncoder interface without going through JNI. All calls to
+ * the Java interface methods declared below should thus throw an
+ * UnsupportedOperationException.
+ */
+ @CalledByNative
+ default long createNativeVideoEncoder() {
+ return 0;
+ }
+
+ /**
+ * Returns true if the encoder is backed by hardware.
+ */
+ @CalledByNative
+ default boolean isHardwareEncoder() {
+ return true;
+ }
+
+ /**
+ * Initializes the encoding process. Call before any calls to encode.
+ */
+ @CalledByNative VideoCodecStatus initEncode(Settings settings, Callback encodeCallback);
+
+ /**
+ * Releases the encoder. No more calls to encode will be made after this call.
+ */
+ @CalledByNative VideoCodecStatus release();
+
+ /**
+ * Requests the encoder to encode a frame.
+ */
+ @CalledByNative VideoCodecStatus encode(VideoFrame frame, EncodeInfo info);
+
+ /** Sets the bitrate allocation and the target framerate for the encoder. */
+ VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate);
+
+ /** Sets the bitrate allocation and the target framerate for the encoder. */
+ default @CalledByNative VideoCodecStatus setRates(RateControlParameters rcParameters) {
+ // Round frame rate up to avoid overshoots.
+ int framerateFps = (int) Math.ceil(rcParameters.framerateFps);
+ return setRateAllocation(rcParameters.bitrate, framerateFps);
+ }
+
+ /** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */
+ @CalledByNative ScalingSettings getScalingSettings();
+
+ /** Returns the list of bitrate limits. */
+ @CalledByNative
+ default ResolutionBitrateLimits[] getResolutionBitrateLimits() {
+ // TODO(ssilkin): Update downstream projects and remove default implementation.
+ ResolutionBitrateLimits bitrate_limits[] = {};
+ return bitrate_limits;
+ }
+
+ /**
+ * Should return a descriptive name for the implementation. Gets called once and cached. May be
+ * called from arbitrary thread.
+ */
+ @CalledByNative String getImplementationName();
+
+ @CalledByNative
+ default EncoderInfo getEncoderInfo() {
+ return new EncoderInfo(
+ /* requestedResolutionAlignment= */ 1, /* applyAlignmentToAllSimulcastLayers= */ false);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java
new file mode 100644
index 0000000000..2a46662d14
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFactory.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/** Factory for creating VideoEncoders. */
+public interface VideoEncoderFactory {
+ public interface VideoEncoderSelector {
+ /** Called with the VideoCodecInfo of the currently used encoder. */
+ @CalledByNative("VideoEncoderSelector") void onCurrentEncoder(VideoCodecInfo info);
+
+ /**
+ * Called with the current available bitrate. Returns null if the encoder selector prefers to
+ * keep the current encoder or a VideoCodecInfo if a new encoder is preferred.
+ */
+ @Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onAvailableBitrate(int kbps);
+
+ /**
+ * Called every time the encoder input resolution change. Returns null if the encoder selector
+ * prefers to keep the current encoder or a VideoCodecInfo if a new encoder is preferred.
+ */
+ @Nullable
+ @CalledByNative("VideoEncoderSelector")
+ default VideoCodecInfo onResolutionChange(int widht, int height) {
+ return null;
+ }
+
+ /**
+ * Called when the currently used encoder signal itself as broken. Returns null if the encoder
+ * selector prefers to keep the current encoder or a VideoCodecInfo if a new encoder is
+ * preferred.
+ */
+ @Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onEncoderBroken();
+ }
+
+ /** Creates an encoder for the given video codec. */
+ @Nullable @CalledByNative VideoEncoder createEncoder(VideoCodecInfo info);
+
+ /**
+ * Enumerates the list of supported video codecs. This method will only be called once and the
+ * result will be cached.
+ */
+ @CalledByNative VideoCodecInfo[] getSupportedCodecs();
+
+ /**
+ * Enumerates the list of supported video codecs that can also be tagged with
+ * implementation information. This method will only be called once and the
+ * result will be cached.
+ */
+ @CalledByNative
+ default VideoCodecInfo[] getImplementations() {
+ return getSupportedCodecs();
+ }
+
+ /**
+ * Returns a VideoEncoderSelector if implemented by the VideoEncoderFactory,
+ * null otherwise.
+ */
+ @CalledByNative
+ default VideoEncoderSelector getEncoderSelector() {
+ return null;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java
new file mode 100644
index 0000000000..fa36b7c989
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoEncoderFallback.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * A combined video encoder that falls back on a secondary encoder if the primary encoder fails.
+ */
+public class VideoEncoderFallback extends WrappedNativeVideoEncoder {
+ private final VideoEncoder fallback;
+ private final VideoEncoder primary;
+
+ public VideoEncoderFallback(VideoEncoder fallback, VideoEncoder primary) {
+ this.fallback = fallback;
+ this.primary = primary;
+ }
+
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder(fallback, primary);
+ }
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return primary.isHardwareEncoder();
+ }
+
+ private static native long nativeCreateEncoder(VideoEncoder fallback, VideoEncoder primary);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java
new file mode 100644
index 0000000000..aef8030459
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFileRenderer.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.Handler;
+import android.os.HandlerThread;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Can be used to save the video frames to file.
+ */
+public class VideoFileRenderer implements VideoSink {
+ private static final String TAG = "VideoFileRenderer";
+
+ private final HandlerThread renderThread;
+ private final Handler renderThreadHandler;
+ private final HandlerThread fileThread;
+ private final Handler fileThreadHandler;
+ private final FileOutputStream videoOutFile;
+ private final String outputFileName;
+ private final int outputFileWidth;
+ private final int outputFileHeight;
+ private final int outputFrameSize;
+ private final ByteBuffer outputFrameBuffer;
+ private EglBase eglBase;
+ private YuvConverter yuvConverter;
+ private int frameCount;
+
+ public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
+ final EglBase.Context sharedContext) throws IOException {
+ if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
+ throw new IllegalArgumentException("Does not support uneven width or height");
+ }
+
+ this.outputFileName = outputFile;
+ this.outputFileWidth = outputFileWidth;
+ this.outputFileHeight = outputFileHeight;
+
+ outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
+ outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
+
+ videoOutFile = new FileOutputStream(outputFile);
+ videoOutFile.write(
+ ("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
+ .getBytes(Charset.forName("US-ASCII")));
+
+ renderThread = new HandlerThread(TAG + "RenderThread");
+ renderThread.start();
+ renderThreadHandler = new Handler(renderThread.getLooper());
+
+ fileThread = new HandlerThread(TAG + "FileThread");
+ fileThread.start();
+ fileThreadHandler = new Handler(fileThread.getLooper());
+
+ ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() {
+ @Override
+ public void run() {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+ yuvConverter = new YuvConverter();
+ }
+ });
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ frame.retain();
+ renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
+ }
+
+ private void renderFrameOnRenderThread(VideoFrame frame) {
+ final VideoFrame.Buffer buffer = frame.getBuffer();
+
+ // If the frame is rotated, it will be applied after cropAndScale. Therefore, if the frame is
+ // rotated by 90 degrees, swap width and height.
+ final int targetWidth = frame.getRotation() % 180 == 0 ? outputFileWidth : outputFileHeight;
+ final int targetHeight = frame.getRotation() % 180 == 0 ? outputFileHeight : outputFileWidth;
+
+ final float frameAspectRatio = (float) buffer.getWidth() / (float) buffer.getHeight();
+ final float fileAspectRatio = (float) targetWidth / (float) targetHeight;
+
+ // Calculate cropping to equalize the aspect ratio.
+ int cropWidth = buffer.getWidth();
+ int cropHeight = buffer.getHeight();
+ if (fileAspectRatio > frameAspectRatio) {
+ cropHeight = (int) (cropHeight * (frameAspectRatio / fileAspectRatio));
+ } else {
+ cropWidth = (int) (cropWidth * (fileAspectRatio / frameAspectRatio));
+ }
+
+ final int cropX = (buffer.getWidth() - cropWidth) / 2;
+ final int cropY = (buffer.getHeight() - cropHeight) / 2;
+
+ final VideoFrame.Buffer scaledBuffer =
+ buffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, targetWidth, targetHeight);
+ frame.release();
+
+ final VideoFrame.I420Buffer i420 = scaledBuffer.toI420();
+ scaledBuffer.release();
+
+ fileThreadHandler.post(() -> {
+ YuvHelper.I420Rotate(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), outputFrameBuffer, i420.getWidth(), i420.getHeight(),
+ frame.getRotation());
+ i420.release();
+
+ try {
+ videoOutFile.write("FRAME\n".getBytes(Charset.forName("US-ASCII")));
+ videoOutFile.write(
+ outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
+ } catch (IOException e) {
+ throw new RuntimeException("Error writing video to disk", e);
+ }
+ frameCount++;
+ });
+ }
+
+ /**
+ * Release all resources. All already posted frames will be rendered first.
+ */
+ public void release() {
+ final CountDownLatch cleanupBarrier = new CountDownLatch(1);
+ renderThreadHandler.post(() -> {
+ yuvConverter.release();
+ eglBase.release();
+ renderThread.quit();
+ cleanupBarrier.countDown();
+ });
+ ThreadUtils.awaitUninterruptibly(cleanupBarrier);
+ fileThreadHandler.post(() -> {
+ try {
+ videoOutFile.close();
+ Logging.d(TAG,
+ "Video written to disk as " + outputFileName + ". The number of frames is " + frameCount
+ + " and the dimensions of the frames are " + outputFileWidth + "x"
+ + outputFileHeight + ".");
+ } catch (IOException e) {
+ throw new RuntimeException("Error closing output file", e);
+ }
+ fileThread.quit();
+ });
+ try {
+ fileThread.join();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ Logging.e(TAG, "Interrupted while waiting for the write to disk to complete.", e);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrame.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrame.java
new file mode 100644
index 0000000000..52a378b8f8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrame.java
@@ -0,0 +1,218 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Matrix;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+/**
+ * Java version of webrtc::VideoFrame and webrtc::VideoFrameBuffer. A difference from the C++
+ * version is that no explicit tag is used, and clients are expected to use 'instanceof' to find the
+ * right subclass of the buffer. This allows clients to create custom VideoFrame.Buffer in
+ * arbitrary format in their custom VideoSources, and then cast it back to the correct subclass in
+ * their custom VideoSinks. All implementations must also implement the toI420() function,
+ * converting from the underlying representation if necessary. I420 is the most widely accepted
+ * format and serves as a fallback for video sinks that can only handle I420, e.g. the internal
+ * WebRTC software encoders.
+ */
+public class VideoFrame implements RefCounted {
+ /**
+ * Implements image storage medium. Might be for example an OpenGL texture or a memory region
+ * containing I420-data.
+ *
+ * <p>Reference counting is needed since a video buffer can be shared between multiple VideoSinks,
+ * and the buffer needs to be returned to the VideoSource as soon as all references are gone.
+ */
+ public interface Buffer extends RefCounted {
+ /**
+ * Representation of the underlying buffer. Currently, only NATIVE and I420 are supported.
+ */
+ @CalledByNative("Buffer")
+ @VideoFrameBufferType
+ default int getBufferType() {
+ return VideoFrameBufferType.NATIVE;
+ }
+
+ /**
+ * Resolution of the buffer in pixels.
+ */
+ @CalledByNative("Buffer") int getWidth();
+ @CalledByNative("Buffer") int getHeight();
+
+ /**
+ * Returns a memory-backed frame in I420 format. If the pixel data is in another format, a
+ * conversion will take place. All implementations must provide a fallback to I420 for
+ * compatibility with e.g. the internal WebRTC software encoders.
+ *
+ * <p> Conversion may fail, for example if reading the pixel data from a texture fails. If the
+ * conversion fails, null is returned.
+ */
+ @Nullable @CalledByNative("Buffer") I420Buffer toI420();
+
+ @Override @CalledByNative("Buffer") void retain();
+ @Override @CalledByNative("Buffer") void release();
+
+ /**
+ * Crops a region defined by `cropx`, `cropY`, `cropWidth` and `cropHeight`. Scales it to size
+ * `scaleWidth` x `scaleHeight`.
+ */
+ @CalledByNative("Buffer")
+ Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
+ }
+
+ /**
+ * Interface for I420 buffers.
+ */
+ public interface I420Buffer extends Buffer {
+ @Override
+ default int getBufferType() {
+ return VideoFrameBufferType.I420;
+ }
+
+ /**
+ * Returns a direct ByteBuffer containing Y-plane data. The buffer capacity is at least
+ * getStrideY() * getHeight() bytes. The position of the returned buffer is ignored and must
+ * be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
+ * implementations must return a new ByteBuffer or slice for each call.
+ */
+ @CalledByNative("I420Buffer") ByteBuffer getDataY();
+ /**
+ * Returns a direct ByteBuffer containing U-plane data. The buffer capacity is at least
+ * getStrideU() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
+ * and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
+ * implementations must return a new ByteBuffer or slice for each call.
+ */
+ @CalledByNative("I420Buffer") ByteBuffer getDataU();
+ /**
+ * Returns a direct ByteBuffer containing V-plane data. The buffer capacity is at least
+ * getStrideV() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
+ * and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
+ * implementations must return a new ByteBuffer or slice for each call.
+ */
+ @CalledByNative("I420Buffer") ByteBuffer getDataV();
+
+ @CalledByNative("I420Buffer") int getStrideY();
+ @CalledByNative("I420Buffer") int getStrideU();
+ @CalledByNative("I420Buffer") int getStrideV();
+ }
+
+ /**
+ * Interface for buffers that are stored as a single texture, either in OES or RGB format.
+ */
+ public interface TextureBuffer extends Buffer {
+ enum Type {
+ OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
+ RGB(GLES20.GL_TEXTURE_2D);
+
+ private final int glTarget;
+
+ private Type(final int glTarget) {
+ this.glTarget = glTarget;
+ }
+
+ public int getGlTarget() {
+ return glTarget;
+ }
+ }
+
+ Type getType();
+ int getTextureId();
+
+ /**
+ * Retrieve the transform matrix associated with the frame. This transform matrix maps 2D
+ * homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to
+ * the coordinate that should be used to sample that location from the buffer.
+ */
+ Matrix getTransformMatrix();
+
+ /**
+ * Create a new TextureBufferImpl with an applied transform matrix and a new size. The existing
+ * buffer is unchanged. The given transform matrix is applied first when texture coordinates are
+ * still in the unmodified [0, 1] range.
+ */
+ default TextureBuffer applyTransformMatrix(
+ Matrix transformMatrix, int newWidth, int newHeight) {
+ throw new UnsupportedOperationException("Not implemented");
+ }
+ }
+
+ private final Buffer buffer;
+ private final int rotation;
+ private final long timestampNs;
+
+ /**
+ * Constructs a new VideoFrame backed by the given {@code buffer}.
+ *
+ * @note Ownership of the buffer object is tranferred to the new VideoFrame.
+ */
+ @CalledByNative
+ public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
+ if (buffer == null) {
+ throw new IllegalArgumentException("buffer not allowed to be null");
+ }
+ if (rotation % 90 != 0) {
+ throw new IllegalArgumentException("rotation must be a multiple of 90");
+ }
+ this.buffer = buffer;
+ this.rotation = rotation;
+ this.timestampNs = timestampNs;
+ }
+
+ @CalledByNative
+ public Buffer getBuffer() {
+ return buffer;
+ }
+
+ /**
+ * Rotation of the frame in degrees.
+ */
+ @CalledByNative
+ public int getRotation() {
+ return rotation;
+ }
+
+ /**
+ * Timestamp of the frame in nano seconds.
+ */
+ @CalledByNative
+ public long getTimestampNs() {
+ return timestampNs;
+ }
+
+ public int getRotatedWidth() {
+ if (rotation % 180 == 0) {
+ return buffer.getWidth();
+ }
+ return buffer.getHeight();
+ }
+
+ public int getRotatedHeight() {
+ if (rotation % 180 == 0) {
+ return buffer.getHeight();
+ }
+ return buffer.getWidth();
+ }
+
+ @Override
+ public void retain() {
+ buffer.retain();
+ }
+
+ @Override
+ @CalledByNative
+ public void release() {
+ buffer.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameBufferType.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameBufferType.java
new file mode 100644
index 0000000000..7b05b88cba
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameBufferType.java
@@ -0,0 +1,33 @@
+
+// Copyright 2022 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// java_cpp_enum.py
+// From
+// ../../api/video/video_frame_buffer.h
+
+package org.webrtc;
+
+import androidx.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+ VideoFrameBufferType.NATIVE, VideoFrameBufferType.I420, VideoFrameBufferType.I420A,
+ VideoFrameBufferType.I422, VideoFrameBufferType.I444, VideoFrameBufferType.I010,
+ VideoFrameBufferType.I210, VideoFrameBufferType.NV12
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface VideoFrameBufferType {
+ int NATIVE = 0;
+ int I420 = 1;
+ int I420A = 2;
+ int I422 = 3;
+ int I444 = 4;
+ int I010 = 5;
+ int I210 = 6;
+ int NV12 = 7;
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java
new file mode 100644
index 0000000000..af32587886
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Matrix;
+import android.graphics.Point;
+import android.opengl.GLES20;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+/**
+ * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
+ * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
+ * taken into account. You can supply an additional render matrix for custom transformations.
+ */
+public class VideoFrameDrawer {
+ public static final String TAG = "VideoFrameDrawer";
+ /**
+ * Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
+ * depending on the type of the buffer. You can supply an additional render matrix. This is
+ * used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
+ * transformationMatrix)
+ */
+ public static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
+ Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
+ int viewportWidth, int viewportHeight) {
+ Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
+ finalMatrix.preConcat(renderMatrix);
+ float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
+ switch (buffer.getType()) {
+ case OES:
+ drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
+ viewportY, viewportWidth, viewportHeight);
+ break;
+ case RGB:
+ drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
+ viewportY, viewportWidth, viewportHeight);
+ break;
+ default:
+ throw new RuntimeException("Unknown texture type.");
+ }
+ }
+
+ /**
+ * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
+ * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
+ */
+ private static class YuvUploader {
+ // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+ // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
+ // that handles stride and compare performance with intermediate copy.
+ @Nullable private ByteBuffer copyBuffer;
+ @Nullable private int[] yuvTextures;
+
+ /**
+ * Upload `planes` into OpenGL textures, taking stride into consideration.
+ *
+ * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
+ */
+ @Nullable
+ public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
+ final int[] planeWidths = new int[] {width, width / 2, width / 2};
+ final int[] planeHeights = new int[] {height, height / 2, height / 2};
+ // Make a first pass to see if we need a temporary copy buffer.
+ int copyCapacityNeeded = 0;
+ for (int i = 0; i < 3; ++i) {
+ if (strides[i] > planeWidths[i]) {
+ copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
+ }
+ }
+ // Allocate copy buffer if necessary.
+ if (copyCapacityNeeded > 0
+ && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+ copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+ }
+ // Make sure YUV textures are allocated.
+ if (yuvTextures == null) {
+ yuvTextures = new int[3];
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ }
+ // Upload each plane.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ // GLES only accepts packed data, i.e. stride == planeWidth.
+ final ByteBuffer packedByteBuffer;
+ if (strides[i] == planeWidths[i]) {
+ // Input is packed already.
+ packedByteBuffer = planes[i];
+ } else {
+ YuvHelper.copyPlane(
+ planes[i], strides[i], copyBuffer, planeWidths[i], planeWidths[i], planeHeights[i]);
+ packedByteBuffer = copyBuffer;
+ }
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
+ planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ }
+ return yuvTextures;
+ }
+
+ @Nullable
+ public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
+ int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
+ ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
+ return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
+ }
+
+ @Nullable
+ public int[] getYuvTextures() {
+ return yuvTextures;
+ }
+
+ /**
+ * Releases cached resources. Uploader can still be used and the resources will be reallocated
+ * on first use.
+ */
+ public void release() {
+ copyBuffer = null;
+ if (yuvTextures != null) {
+ GLES20.glDeleteTextures(3, yuvTextures, 0);
+ yuvTextures = null;
+ }
+ }
+ }
+
+ private static int distance(float x0, float y0, float x1, float y1) {
+ return (int) Math.round(Math.hypot(x1 - x0, y1 - y0));
+ }
+
+ // These points are used to calculate the size of the part of the frame we are rendering.
+ final static float[] srcPoints =
+ new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */};
+ private final float[] dstPoints = new float[6];
+ private final Point renderSize = new Point();
+ private int renderWidth;
+ private int renderHeight;
+
+ // Calculate the frame size after `renderMatrix` is applied. Stores the output in member variables
+ // `renderWidth` and `renderHeight` to avoid allocations since this function is called for every
+ // frame.
+ private void calculateTransformedRenderSize(
+ int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) {
+ if (renderMatrix == null) {
+ renderWidth = frameWidth;
+ renderHeight = frameHeight;
+ return;
+ }
+ // Transform the texture coordinates (in the range [0, 1]) according to `renderMatrix`.
+ renderMatrix.mapPoints(dstPoints, srcPoints);
+
+ // Multiply with the width and height to get the positions in terms of pixels.
+ for (int i = 0; i < 3; ++i) {
+ dstPoints[i * 2 + 0] *= frameWidth;
+ dstPoints[i * 2 + 1] *= frameHeight;
+ }
+
+ // Get the length of the sides of the transformed rectangle in terms of pixels.
+ renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]);
+ renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]);
+ }
+
+ private final YuvUploader yuvUploader = new YuvUploader();
+ // This variable will only be used for checking reference equality and is used for caching I420
+ // textures.
+ @Nullable private VideoFrame lastI420Frame;
+ private final Matrix renderMatrix = new Matrix();
+
+ public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
+ drawFrame(frame, drawer, null /* additionalRenderMatrix */);
+ }
+
+ public void drawFrame(
+ VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
+ drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
+ frame.getRotatedWidth(), frame.getRotatedHeight());
+ }
+
+ public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
+ @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
+ int viewportHeight) {
+ final int width = frame.getRotatedWidth();
+ final int height = frame.getRotatedHeight();
+ calculateTransformedRenderSize(width, height, additionalRenderMatrix);
+ if (renderWidth <= 0 || renderHeight <= 0) {
+ Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);
+ return;
+ }
+
+ final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
+ renderMatrix.reset();
+ renderMatrix.preTranslate(0.5f, 0.5f);
+ if (!isTextureFrame) {
+ renderMatrix.preScale(1f, -1f); // I420-frames are upside down
+ }
+ renderMatrix.preRotate(frame.getRotation());
+ renderMatrix.preTranslate(-0.5f, -0.5f);
+ if (additionalRenderMatrix != null) {
+ renderMatrix.preConcat(additionalRenderMatrix);
+ }
+
+ if (isTextureFrame) {
+ lastI420Frame = null;
+ drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth,
+ renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
+ } else {
+ // Only upload the I420 data to textures once per frame, if we are called multiple times
+ // with the same frame.
+ if (frame != lastI420Frame) {
+ lastI420Frame = frame;
+ final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420();
+ yuvUploader.uploadFromBuffer(i420Buffer);
+ i420Buffer.release();
+ }
+
+ drawer.drawYuv(yuvUploader.getYuvTextures(),
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth,
+ renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
+ }
+ }
+
+ public VideoFrame.Buffer prepareBufferForViewportSize(
+ VideoFrame.Buffer buffer, int width, int height) {
+ buffer.retain();
+ return buffer;
+ }
+
+ public void release() {
+ yuvUploader.release();
+ lastI420Frame = null;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoProcessor.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoProcessor.java
new file mode 100644
index 0000000000..c39a55c27e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoProcessor.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/**
+ * Lightweight abstraction for an object that can receive video frames, process them, and pass them
+ * on to another object. This object is also allowed to observe capturer start/stop.
+ */
+public interface VideoProcessor extends CapturerObserver {
+ public static class FrameAdaptationParameters {
+ public final int cropX;
+ public final int cropY;
+ public final int cropWidth;
+ public final int cropHeight;
+ public final int scaleWidth;
+ public final int scaleHeight;
+ public final long timestampNs;
+ public final boolean drop;
+
+ public FrameAdaptationParameters(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, long timestampNs, boolean drop) {
+ this.cropX = cropX;
+ this.cropY = cropY;
+ this.cropWidth = cropWidth;
+ this.cropHeight = cropHeight;
+ this.scaleWidth = scaleWidth;
+ this.scaleHeight = scaleHeight;
+ this.timestampNs = timestampNs;
+ this.drop = drop;
+ }
+ }
+
+ /**
+ * This is a chance to access an unadapted frame. The default implementation applies the
+ * adaptation and forwards the frame to {@link #onFrameCaptured(VideoFrame)}.
+ */
+ default void onFrameCaptured(VideoFrame frame, FrameAdaptationParameters parameters) {
+ VideoFrame adaptedFrame = applyFrameAdaptationParameters(frame, parameters);
+ if (adaptedFrame != null) {
+ onFrameCaptured(adaptedFrame);
+ adaptedFrame.release();
+ }
+ }
+
+ /**
+ * Set the sink that receives the output from this processor. Null can be passed in to unregister
+ * a sink.
+ */
+ void setSink(@Nullable VideoSink sink);
+
+ /**
+ * Applies the frame adaptation parameters to a frame. Returns null if the frame is meant to be
+ * dropped. Returns a new frame. The caller is responsible for releasing the returned frame.
+ */
+ public static @Nullable VideoFrame applyFrameAdaptationParameters(
+ VideoFrame frame, FrameAdaptationParameters parameters) {
+ if (parameters.drop) {
+ return null;
+ }
+
+ final VideoFrame.Buffer adaptedBuffer =
+ frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth,
+ parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight);
+ return new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSink.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSink.java
new file mode 100644
index 0000000000..5a0a6c719c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSink.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Java version of rtc::VideoSinkInterface.
+ */
+public interface VideoSink {
+ /**
+ * Implementations should call frame.retain() if they need to hold a reference to the frame after
+ * this function returns. Each call to retain() should be followed by a call to frame.release()
+ * when the reference is no longer needed.
+ */
+ @CalledByNative void onFrame(VideoFrame frame);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSource.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSource.java
new file mode 100644
index 0000000000..2e22d1a2db
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoSource.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/**
+ * Java wrapper of native AndroidVideoTrackSource.
+ */
+public class VideoSource extends MediaSource {
+ /** Simple aspect ratio clas for use in constraining output format. */
+ public static class AspectRatio {
+ public static final AspectRatio UNDEFINED = new AspectRatio(/* width= */ 0, /* height= */ 0);
+
+ public final int width;
+ public final int height;
+
+ public AspectRatio(int width, int height) {
+ this.width = width;
+ this.height = height;
+ }
+ }
+
+ private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
+ private final Object videoProcessorLock = new Object();
+ @Nullable private VideoProcessor videoProcessor;
+ private boolean isCapturerRunning;
+
+ private final CapturerObserver capturerObserver = new CapturerObserver() {
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeAndroidVideoTrackSource.setState(success);
+ synchronized (videoProcessorLock) {
+ isCapturerRunning = success;
+ if (videoProcessor != null) {
+ videoProcessor.onCapturerStarted(success);
+ }
+ }
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
+ synchronized (videoProcessorLock) {
+ isCapturerRunning = false;
+ if (videoProcessor != null) {
+ videoProcessor.onCapturerStopped();
+ }
+ }
+ }
+
+ @Override
+ public void onFrameCaptured(VideoFrame frame) {
+ final VideoProcessor.FrameAdaptationParameters parameters =
+ nativeAndroidVideoTrackSource.adaptFrame(frame);
+ synchronized (videoProcessorLock) {
+ if (videoProcessor != null) {
+ videoProcessor.onFrameCaptured(frame, parameters);
+ return;
+ }
+ }
+
+ VideoFrame adaptedFrame = VideoProcessor.applyFrameAdaptationParameters(frame, parameters);
+ if (adaptedFrame != null) {
+ nativeAndroidVideoTrackSource.onFrameCaptured(adaptedFrame);
+ adaptedFrame.release();
+ }
+ }
+ };
+
+ public VideoSource(long nativeSource) {
+ super(nativeSource);
+ this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
+ }
+
+ /**
+ * Calling this function will cause frames to be scaled down to the requested resolution. Also,
+ * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
+ * the requested fps. The requested aspect ratio is orientation agnostic and will be adjusted to
+ * maintain the input orientation, so it doesn't matter if e.g. 1280x720 or 720x1280 is requested.
+ */
+ public void adaptOutputFormat(int width, int height, int fps) {
+ final int maxSide = Math.max(width, height);
+ final int minSide = Math.min(width, height);
+ adaptOutputFormat(maxSide, minSide, minSide, maxSide, fps);
+ }
+
+ /**
+ * Same as above, but allows setting two different target resolutions depending on incoming
+ * frame orientation. This gives more fine-grained control and can e.g. be used to force landscape
+ * video to be cropped to portrait video.
+ */
+ public void adaptOutputFormat(
+ int landscapeWidth, int landscapeHeight, int portraitWidth, int portraitHeight, int fps) {
+ adaptOutputFormat(new AspectRatio(landscapeWidth, landscapeHeight),
+ /* maxLandscapePixelCount= */ landscapeWidth * landscapeHeight,
+ new AspectRatio(portraitWidth, portraitHeight),
+ /* maxPortraitPixelCount= */ portraitWidth * portraitHeight, fps);
+ }
+
+ /** Same as above, with even more control as each constraint is optional. */
+ public void adaptOutputFormat(AspectRatio targetLandscapeAspectRatio,
+ @Nullable Integer maxLandscapePixelCount, AspectRatio targetPortraitAspectRatio,
+ @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
+ nativeAndroidVideoTrackSource.adaptOutputFormat(targetLandscapeAspectRatio,
+ maxLandscapePixelCount, targetPortraitAspectRatio, maxPortraitPixelCount, maxFps);
+ }
+
+ public void setIsScreencast(boolean isScreencast) {
+ nativeAndroidVideoTrackSource.setIsScreencast(isScreencast);
+ }
+
+ /**
+ * Hook for injecting a custom video processor before frames are passed onto WebRTC. The frames
+ * will be cropped and scaled depending on CPU and network conditions before they are passed to
+ * the video processor. Frames will be delivered to the video processor on the same thread they
+ * are passed to this object. The video processor is allowed to deliver the processed frames
+ * back on any thread.
+ */
+ public void setVideoProcessor(@Nullable VideoProcessor newVideoProcessor) {
+ synchronized (videoProcessorLock) {
+ if (videoProcessor != null) {
+ videoProcessor.setSink(/* sink= */ null);
+ if (isCapturerRunning) {
+ videoProcessor.onCapturerStopped();
+ }
+ }
+ videoProcessor = newVideoProcessor;
+ if (newVideoProcessor != null) {
+ newVideoProcessor.setSink(
+ (frame)
+ -> runWithReference(() -> nativeAndroidVideoTrackSource.onFrameCaptured(frame)));
+ if (isCapturerRunning) {
+ newVideoProcessor.onCapturerStarted(/* success= */ true);
+ }
+ }
+ }
+ }
+
+ public CapturerObserver getCapturerObserver() {
+ return capturerObserver;
+ }
+
+ /** Returns a pointer to webrtc::VideoTrackSourceInterface. */
+ long getNativeVideoTrackSource() {
+ return getNativeMediaSource();
+ }
+
+ @Override
+ public void dispose() {
+ setVideoProcessor(/* newVideoProcessor= */ null);
+ super.dispose();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoTrack.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoTrack.java
new file mode 100644
index 0000000000..5593d424f3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/VideoTrack.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.IdentityHashMap;
+
+/** Java version of VideoTrackInterface. */
+public class VideoTrack extends MediaStreamTrack {
+ private final IdentityHashMap<VideoSink, Long> sinks = new IdentityHashMap<VideoSink, Long>();
+
+ public VideoTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+
+ /**
+ * Adds a VideoSink to the track.
+ *
+ * A track can have any number of VideoSinks. VideoSinks will replace
+ * renderers. However, converting old style texture frames will involve costly
+ * conversion to I420 so it is not recommended to upgrade before all your
+ * sources produce VideoFrames.
+ */
+ public void addSink(VideoSink sink) {
+ if (sink == null) {
+ throw new IllegalArgumentException("The VideoSink is not allowed to be null");
+ }
+ // We allow calling addSink() with the same sink multiple times. This is similar to the C++
+ // VideoTrack::AddOrUpdateSink().
+ if (!sinks.containsKey(sink)) {
+ final long nativeSink = nativeWrapSink(sink);
+ sinks.put(sink, nativeSink);
+ nativeAddSink(getNativeMediaStreamTrack(), nativeSink);
+ }
+ }
+
+ /**
+ * Removes a VideoSink from the track.
+ *
+ * If the VideoSink was not attached to the track, this is a no-op.
+ */
+ public void removeSink(VideoSink sink) {
+ final Long nativeSink = sinks.remove(sink);
+ if (nativeSink != null) {
+ nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink);
+ nativeFreeSink(nativeSink);
+ }
+ }
+
+ @Override
+ public void dispose() {
+ for (long nativeSink : sinks.values()) {
+ nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink);
+ nativeFreeSink(nativeSink);
+ }
+ sinks.clear();
+ super.dispose();
+ }
+
+ /** Returns a pointer to webrtc::VideoTrackInterface. */
+ long getNativeVideoTrack() {
+ return getNativeMediaStreamTrack();
+ }
+
+ private static native void nativeAddSink(long track, long nativeSink);
+ private static native void nativeRemoveSink(long track, long nativeSink);
+ private static native long nativeWrapSink(VideoSink sink);
+ private static native void nativeFreeSink(long sink);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoDecoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoDecoder.java
new file mode 100644
index 0000000000..027120e48e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoDecoder.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Wraps a native webrtc::VideoDecoder.
+ */
+public abstract class WrappedNativeVideoDecoder implements VideoDecoder {
+ @Override public abstract long createNativeVideoDecoder();
+
+ @Override
+ public final VideoCodecStatus initDecode(Settings settings, Callback decodeCallback) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus release() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final String getImplementationName() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoEncoder.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoEncoder.java
new file mode 100644
index 0000000000..7d0908a6ac
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/WrappedNativeVideoEncoder.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Wraps a native webrtc::VideoEncoder.
+ */
+public abstract class WrappedNativeVideoEncoder implements VideoEncoder {
+ @Override public abstract long createNativeVideoEncoder();
+ @Override public abstract boolean isHardwareEncoder();
+
+ @Override
+ public final VideoCodecStatus initEncode(Settings settings, Callback encodeCallback) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus release() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus encode(VideoFrame frame, EncodeInfo info) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final ScalingSettings getScalingSettings() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+
+ @Override
+ public final String getImplementationName() {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvConverter.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvConverter.java
new file mode 100644
index 0000000000..c855d4be41
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvConverter.java
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Matrix;
+import android.opengl.GLES20;
+import android.opengl.GLException;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import org.webrtc.VideoFrame.I420Buffer;
+import org.webrtc.VideoFrame.TextureBuffer;
+
+/**
+ * Class for converting OES textures to a YUV ByteBuffer. It can be constructed on any thread, but
+ * should only be operated from a single thread with an active EGL context.
+ */
+public final class YuvConverter {
+ private static final String TAG = "YuvConverter";
+
+ private static final String FRAGMENT_SHADER =
+ // Difference in texture coordinate corresponding to one
+ // sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n"
+ // Color conversion coefficients, including constant term
+ + "uniform vec4 coeffs;\n"
+ + "\n"
+ + "void main() {\n"
+ // Since the alpha read from the texture is always 1, this could
+ // be written as a mat4 x vec4 multiply. However, that seems to
+ // give a worse framerate, possibly because the additional
+ // multiplies by 1.0 consume resources.
+ + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ + " sample(tc - 1.5 * xUnit).rgb);\n"
+ + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ + " sample(tc - 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ + " sample(tc + 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ + " sample(tc + 1.5 * xUnit).rgb);\n"
+ + "}\n";
+
+ private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks {
+ // Y'UV444 to RGB888, see https://en.wikipedia.org/wiki/YUV#Y%E2%80%B2UV444_to_RGB888_conversion
+ // We use the ITU-R BT.601 coefficients for Y, U and V.
+ // The values in Wikipedia are inaccurate, the accurate values derived from the spec are:
+ // Y = 0.299 * R + 0.587 * G + 0.114 * B
+ // U = -0.168736 * R - 0.331264 * G + 0.5 * B + 0.5
+ // V = 0.5 * R - 0.418688 * G - 0.0813124 * B + 0.5
+ // To map the Y-values to range [16-235] and U- and V-values to range [16-240], the matrix has
+ // been multiplied with matrix:
+ // {{219 / 255, 0, 0, 16 / 255},
+ // {0, 224 / 255, 0, 16 / 255},
+ // {0, 0, 224 / 255, 16 / 255},
+ // {0, 0, 0, 1}}
+ private static final float[] yCoeffs =
+ new float[] {0.256788f, 0.504129f, 0.0979059f, 0.0627451f};
+ private static final float[] uCoeffs =
+ new float[] {-0.148223f, -0.290993f, 0.439216f, 0.501961f};
+ private static final float[] vCoeffs =
+ new float[] {0.439216f, -0.367788f, -0.0714274f, 0.501961f};
+
+ private int xUnitLoc;
+ private int coeffsLoc;
+
+ private float[] coeffs;
+ private float stepSize;
+
+ public void setPlaneY() {
+ coeffs = yCoeffs;
+ stepSize = 1.0f;
+ }
+
+ public void setPlaneU() {
+ coeffs = uCoeffs;
+ stepSize = 2.0f;
+ }
+
+ public void setPlaneV() {
+ coeffs = vCoeffs;
+ stepSize = 2.0f;
+ }
+
+ @Override
+ public void onNewShader(GlShader shader) {
+ xUnitLoc = shader.getUniformLocation("xUnit");
+ coeffsLoc = shader.getUniformLocation("coeffs");
+ }
+
+ @Override
+ public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight) {
+ GLES20.glUniform4fv(coeffsLoc, /* count= */ 1, coeffs, /* offset= */ 0);
+ // Matrix * (1;0;0;0) / (width / stepSize). Note that OpenGL uses column major order.
+ GLES20.glUniform2f(
+ xUnitLoc, stepSize * texMatrix[0] / frameWidth, stepSize * texMatrix[1] / frameWidth);
+ }
+ }
+
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+ private final GlTextureFrameBuffer i420TextureFrameBuffer =
+ new GlTextureFrameBuffer(GLES20.GL_RGBA);
+ private final ShaderCallbacks shaderCallbacks = new ShaderCallbacks();
+ private final GlGenericDrawer drawer = new GlGenericDrawer(FRAGMENT_SHADER, shaderCallbacks);
+ private final VideoFrameDrawer videoFrameDrawer;
+
+ /**
+ * This class should be constructed on a thread that has an active EGL context.
+ */
+ public YuvConverter() {
+ this(new VideoFrameDrawer());
+ }
+
+ public YuvConverter(VideoFrameDrawer videoFrameDrawer) {
+ this.videoFrameDrawer = videoFrameDrawer;
+ threadChecker.detachThread();
+ }
+
+ /** Converts the texture buffer to I420. */
+ @Nullable
+ public I420Buffer convert(TextureBuffer inputTextureBuffer) {
+ try {
+ return convertInternal(inputTextureBuffer);
+ } catch (GLException e) {
+ Logging.w(TAG, "Failed to convert TextureBuffer", e);
+ }
+ return null;
+ }
+
+ private I420Buffer convertInternal(TextureBuffer inputTextureBuffer) {
+ TextureBuffer preparedBuffer = (TextureBuffer) videoFrameDrawer.prepareBufferForViewportSize(
+ inputTextureBuffer, inputTextureBuffer.getWidth(), inputTextureBuffer.getHeight());
+
+ // We draw into a buffer laid out like
+ //
+ // +---------+
+ // | |
+ // | Y |
+ // | |
+ // | |
+ // +----+----+
+ // | U | V |
+ // | | |
+ // +----+----+
+ //
+ // In memory, we use the same stride for all of Y, U and V. The
+ // U data starts at offset `height` * `stride` from the Y data,
+ // and the V data starts at at offset |stride/2| from the U
+ // data, with rows of U and V data alternating.
+ //
+ // Now, it would have made sense to allocate a pixel buffer with
+ // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
+ // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
+ // unsupported by devices. So do the following hack: Allocate an
+ // RGBA buffer, of width `stride`/4. To render each of these
+ // large pixels, sample the texture at 4 different x coordinates
+ // and store the results in the four components.
+ //
+ // Since the V data needs to start on a boundary of such a
+ // larger pixel, it is not sufficient that `stride` is even, it
+ // has to be a multiple of 8 pixels.
+ final int frameWidth = preparedBuffer.getWidth();
+ final int frameHeight = preparedBuffer.getHeight();
+ final int stride = ((frameWidth + 7) / 8) * 8;
+ final int uvHeight = (frameHeight + 1) / 2;
+ // Total height of the combined memory layout.
+ final int totalHeight = frameHeight + uvHeight;
+ final ByteBuffer i420ByteBuffer = JniCommon.nativeAllocateByteBuffer(stride * totalHeight);
+ // Viewport width is divided by four since we are squeezing in four color bytes in each RGBA
+ // pixel.
+ final int viewportWidth = stride / 4;
+
+ // Produce a frame buffer starting at top-left corner, not bottom-left.
+ final Matrix renderMatrix = new Matrix();
+ renderMatrix.preTranslate(0.5f, 0.5f);
+ renderMatrix.preScale(1f, -1f);
+ renderMatrix.preTranslate(-0.5f, -0.5f);
+
+ i420TextureFrameBuffer.setSize(viewportWidth, totalHeight);
+
+ // Bind our framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, i420TextureFrameBuffer.getFrameBufferId());
+ GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+ // Draw Y.
+ shaderCallbacks.setPlaneY();
+ VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
+ /* viewportX= */ 0, /* viewportY= */ 0, viewportWidth,
+ /* viewportHeight= */ frameHeight);
+
+ // Draw U.
+ shaderCallbacks.setPlaneU();
+ VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
+ /* viewportX= */ 0, /* viewportY= */ frameHeight, viewportWidth / 2,
+ /* viewportHeight= */ uvHeight);
+
+ // Draw V.
+ shaderCallbacks.setPlaneV();
+ VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
+ /* viewportX= */ viewportWidth / 2, /* viewportY= */ frameHeight, viewportWidth / 2,
+ /* viewportHeight= */ uvHeight);
+
+ GLES20.glReadPixels(0, 0, i420TextureFrameBuffer.getWidth(), i420TextureFrameBuffer.getHeight(),
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, i420ByteBuffer);
+
+ GlUtil.checkNoGLES2Error("YuvConverter.convert");
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+
+ // Prepare Y, U, and V ByteBuffer slices.
+ final int yPos = 0;
+ final int uPos = yPos + stride * frameHeight;
+ // Rows of U and V alternate in the buffer, so V data starts after the first row of U.
+ final int vPos = uPos + stride / 2;
+
+ i420ByteBuffer.position(yPos);
+ i420ByteBuffer.limit(yPos + stride * frameHeight);
+ final ByteBuffer dataY = i420ByteBuffer.slice();
+
+ i420ByteBuffer.position(uPos);
+ // The last row does not have padding.
+ final int uvSize = stride * (uvHeight - 1) + stride / 2;
+ i420ByteBuffer.limit(uPos + uvSize);
+ final ByteBuffer dataU = i420ByteBuffer.slice();
+
+ i420ByteBuffer.position(vPos);
+ i420ByteBuffer.limit(vPos + uvSize);
+ final ByteBuffer dataV = i420ByteBuffer.slice();
+
+ preparedBuffer.release();
+
+ return JavaI420Buffer.wrap(frameWidth, frameHeight, dataY, stride, dataU, stride, dataV, stride,
+ () -> { JniCommon.nativeFreeByteBuffer(i420ByteBuffer); });
+ }
+
+ public void release() {
+ threadChecker.checkIsOnValidThread();
+ drawer.release();
+ i420TextureFrameBuffer.release();
+ videoFrameDrawer.release();
+ // Allow this class to be reused.
+ threadChecker.detachThread();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvHelper.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvHelper.java
new file mode 100644
index 0000000000..afb8e837d1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/YuvHelper.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Wraps libyuv methods to Java. All passed byte buffers must be direct byte buffers. */
+public class YuvHelper {
+ /**
+ * Copy I420 Buffer to a contiguously allocated buffer.
+ * <p> In Android, MediaCodec can request a buffer of a specific layout with the stride and
+ * slice-height (or plane height), and this function is used in this case.
+ * <p> For more information, see
+ * https://cs.android.com/android/platform/superproject/+/64fea7e5726daebc40f46890100837c01091100d:frameworks/base/media/java/android/media/MediaFormat.java;l=568
+ * @param dstStrideY the stride of output buffers' Y plane.
+ * @param dstSliceHeightY the slice-height of output buffer's Y plane.
+ * @param dstStrideU the stride of output buffers' U (and V) plane.
+ * @param dstSliceHeightU the slice-height of output buffer's U (and V) plane
+ */
+ public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStrideY,
+ int dstSliceHeightY, int dstStrideU, int dstSliceHeightU) {
+ final int chromaWidth = (dstWidth + 1) / 2;
+ final int chromaHeight = (dstHeight + 1) / 2;
+
+ final int dstStartY = 0;
+ final int dstEndY = dstStartY + dstStrideY * dstHeight;
+ final int dstStartU = dstStartY + dstStrideY * dstSliceHeightY;
+ final int dstEndU = dstStartU + dstStrideU * chromaHeight;
+ final int dstStartV = dstStartU + dstStrideU * dstSliceHeightU;
+ // The last line doesn't need any padding, so use chromaWidth
+ // to calculate the exact end position.
+ final int dstEndV = dstStartV + dstStrideU * (chromaHeight - 1) + chromaWidth;
+ if (dst.capacity() < dstEndV) {
+ throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ + dstEndV + " was " + dst.capacity());
+ }
+
+ dst.limit(dstEndY);
+ dst.position(dstStartY);
+ final ByteBuffer dstY = dst.slice();
+ dst.limit(dstEndU);
+ dst.position(dstStartU);
+ final ByteBuffer dstU = dst.slice();
+ dst.limit(dstEndV);
+ dst.position(dstStartV);
+ final ByteBuffer dstV = dst.slice();
+
+ I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
+ dstStrideU, dstV, dstStrideU, dstWidth, dstHeight);
+ }
+
+ /** Helper method for copying I420 to tightly packed destination buffer. */
+ public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight) {
+ I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight,
+ dstWidth, dstHeight, (dstWidth + 1) / 2, (dstHeight + 1) / 2);
+ }
+
+ /**
+ * Copy I420 Buffer to a contiguously allocated buffer.
+ * @param dstStrideY the stride of output buffers' Y plane.
+ * @param dstSliceHeightY the slice-height of output buffer's Y plane.
+ */
+ public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStrideY,
+ int dstSliceHeightY) {
+ final int chromaHeight = (dstHeight + 1) / 2;
+ final int chromaWidth = (dstWidth + 1) / 2;
+
+ final int dstStartY = 0;
+ final int dstEndY = dstStartY + dstStrideY * dstHeight;
+ final int dstStartUV = dstStartY + dstStrideY * dstSliceHeightY;
+ final int dstEndUV = dstStartUV + chromaWidth * chromaHeight * 2;
+ if (dst.capacity() < dstEndUV) {
+ throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ + dstEndUV + " was " + dst.capacity());
+ }
+
+ dst.limit(dstEndY);
+ dst.position(dstStartY);
+ final ByteBuffer dstY = dst.slice();
+ dst.limit(dstEndUV);
+ dst.position(dstStartUV);
+ final ByteBuffer dstUV = dst.slice();
+
+ I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV,
+ chromaWidth * 2, dstWidth, dstHeight);
+ }
+
+ /** Helper method for copying I420 to tightly packed NV12 destination buffer. */
+ public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight) {
+ I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight,
+ dstWidth, dstHeight);
+ }
+
+ /** Helper method for rotating I420 to tightly packed destination buffer. */
+ public static void I420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int srcWidth, int srcHeight,
+ int rotationMode) {
+ final int dstWidth = rotationMode % 180 == 0 ? srcWidth : srcHeight;
+ final int dstHeight = rotationMode % 180 == 0 ? srcHeight : srcWidth;
+
+ final int dstChromaHeight = (dstHeight + 1) / 2;
+ final int dstChromaWidth = (dstWidth + 1) / 2;
+
+ final int minSize = dstWidth * dstHeight + dstChromaWidth * dstChromaHeight * 2;
+ if (dst.capacity() < minSize) {
+ throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
+ + minSize + " was " + dst.capacity());
+ }
+
+ final int startY = 0;
+ final int startU = dstHeight * dstWidth;
+ final int startV = startU + dstChromaHeight * dstChromaWidth;
+
+ dst.position(startY);
+ final ByteBuffer dstY = dst.slice();
+ dst.position(startU);
+ final ByteBuffer dstU = dst.slice();
+ dst.position(startV);
+ final ByteBuffer dstV = dst.slice();
+
+ nativeI420Rotate(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstWidth, dstU,
+ dstChromaWidth, dstV, dstChromaWidth, srcWidth, srcHeight, rotationMode);
+ }
+
+ /** Helper method for copying a single colour plane. */
+ public static void copyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
+ nativeCopyPlane(src, srcStride, dst, dstStride, width, height);
+ }
+
+ /** Converts ABGR little endian (rgba in memory) to I420. */
+ public static void ABGRToI420(ByteBuffer src, int srcStride, ByteBuffer dstY, int dstStrideY,
+ ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) {
+ nativeABGRToI420(
+ src, srcStride, dstY, dstStrideY, dstU, dstStrideU, dstV, dstStrideV, width, height);
+ }
+
+ /**
+ * Copies I420 to the I420 dst buffer.
+ * <p> Unlike `libyuv::I420Copy`, this function checks if the height <= 0, so flipping is not
+ * supported.
+ */
+ public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
+ int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) {
+ if (srcY == null || srcU == null || srcV == null || dstY == null || dstU == null || dstV == null
+ || width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Invalid I420Copy input arguments");
+ }
+ nativeI420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
+ dstStrideU, dstV, dstStrideV, width, height);
+ }
+
+ public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstUV,
+ int dstStrideUV, int width, int height) {
+ if (srcY == null || srcU == null || srcV == null || dstY == null || dstUV == null || width <= 0
+ || height <= 0) {
+ throw new IllegalArgumentException("Invalid I420ToNV12 input arguments");
+ }
+ nativeI420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV,
+ dstStrideUV, width, height);
+ }
+
+ public static void I420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
+ ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
+ int dstStrideU, ByteBuffer dstV, int dstStrideV, int srcWidth, int srcHeight,
+ int rotationMode) {
+ nativeI420Rotate(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
+ dstStrideU, dstV, dstStrideV, srcWidth, srcHeight, rotationMode);
+ }
+
+ private static native void nativeCopyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height);
+ private static native void nativeI420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
+ int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
+ ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height);
+ private static native void nativeI420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
+ int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
+ ByteBuffer dstUV, int dstStrideUV, int width, int height);
+ private static native void nativeI420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
+ int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
+ ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int srcWidth, int srcHeight,
+ int rotationMode);
+ private static native void nativeABGRToI420(ByteBuffer src, int srcStride, ByteBuffer dstY,
+ int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width,
+ int height);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java
new file mode 100644
index 0000000000..502c68cc9a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+/**
+ * This interface is a thin wrapper on top of a native C++ webrtc::AudioDeviceModule (ADM). The
+ * reason for basing it on a native ADM instead of a pure Java interface is that we have two native
+ * Android implementations (OpenSLES and AAudio) that does not make sense to wrap through JNI.
+ *
+ * <p>Note: This class is still under development and may change without notice.
+ */
+public interface AudioDeviceModule {
+ /**
+ * Returns a C++ pointer to a webrtc::AudioDeviceModule. Caller does _not_ take ownership and
+ * lifetime is handled through the release() call.
+ */
+ long getNativeAudioDeviceModulePointer();
+
+ /**
+ * Release resources for this AudioDeviceModule, including native resources. The object should not
+ * be used after this call.
+ */
+ void release();
+
+ /** Control muting/unmuting the speaker. */
+ void setSpeakerMute(boolean mute);
+
+ /** Control muting/unmuting the microphone. */
+ void setMicrophoneMute(boolean mute);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java
new file mode 100644
index 0000000000..d3d57602a8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java
@@ -0,0 +1,436 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.content.Context;
+import android.media.AudioAttributes;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.os.Build;
+import androidx.annotation.RequiresApi;
+import java.util.concurrent.ScheduledExecutorService;
+import org.webrtc.JniCommon;
+import org.webrtc.Logging;
+
+/**
+ * AudioDeviceModule implemented using android.media.AudioRecord as input and
+ * android.media.AudioTrack as output.
+ */
+public class JavaAudioDeviceModule implements AudioDeviceModule {
+ private static final String TAG = "JavaAudioDeviceModule";
+
+ public static Builder builder(Context context) {
+ return new Builder(context);
+ }
+
+ public static class Builder {
+ private final Context context;
+ private ScheduledExecutorService scheduler;
+ private final AudioManager audioManager;
+ private int inputSampleRate;
+ private int outputSampleRate;
+ private int audioSource = WebRtcAudioRecord.DEFAULT_AUDIO_SOURCE;
+ private int audioFormat = WebRtcAudioRecord.DEFAULT_AUDIO_FORMAT;
+ private AudioTrackErrorCallback audioTrackErrorCallback;
+ private AudioRecordErrorCallback audioRecordErrorCallback;
+ private SamplesReadyCallback samplesReadyCallback;
+ private AudioTrackStateCallback audioTrackStateCallback;
+ private AudioRecordStateCallback audioRecordStateCallback;
+ private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported();
+ private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported();
+ private boolean useStereoInput;
+ private boolean useStereoOutput;
+ private AudioAttributes audioAttributes;
+ private boolean useLowLatency;
+ private boolean enableVolumeLogger;
+
+ private Builder(Context context) {
+ this.context = context;
+ this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ this.inputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
+ this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
+ this.useLowLatency = false;
+ this.enableVolumeLogger = true;
+ }
+
+ public Builder setScheduler(ScheduledExecutorService scheduler) {
+ this.scheduler = scheduler;
+ return this;
+ }
+
+ /**
+ * Call this method if the default handling of querying the native sample rate shall be
+ * overridden. Can be useful on some devices where the available Android APIs are known to
+ * return invalid results.
+ */
+ public Builder setSampleRate(int sampleRate) {
+ Logging.d(TAG, "Input/Output sample rate overridden to: " + sampleRate);
+ this.inputSampleRate = sampleRate;
+ this.outputSampleRate = sampleRate;
+ return this;
+ }
+
+ /**
+ * Call this method to specifically override input sample rate.
+ */
+ public Builder setInputSampleRate(int inputSampleRate) {
+ Logging.d(TAG, "Input sample rate overridden to: " + inputSampleRate);
+ this.inputSampleRate = inputSampleRate;
+ return this;
+ }
+
+ /**
+ * Call this method to specifically override output sample rate.
+ */
+ public Builder setOutputSampleRate(int outputSampleRate) {
+ Logging.d(TAG, "Output sample rate overridden to: " + outputSampleRate);
+ this.outputSampleRate = outputSampleRate;
+ return this;
+ }
+
+ /**
+ * Call this to change the audio source. The argument should be one of the values from
+ * android.media.MediaRecorder.AudioSource. The default is AudioSource.VOICE_COMMUNICATION.
+ */
+ public Builder setAudioSource(int audioSource) {
+ this.audioSource = audioSource;
+ return this;
+ }
+
+ /**
+ * Call this to change the audio format. The argument should be one of the values from
+ * android.media.AudioFormat ENCODING_PCM_8BIT, ENCODING_PCM_16BIT or ENCODING_PCM_FLOAT.
+ * Default audio data format is PCM 16 bit per sample.
+ * Guaranteed to be supported by all devices.
+ */
+ public Builder setAudioFormat(int audioFormat) {
+ this.audioFormat = audioFormat;
+ return this;
+ }
+
+ /**
+ * Set a callback to retrieve errors from the AudioTrack.
+ */
+ public Builder setAudioTrackErrorCallback(AudioTrackErrorCallback audioTrackErrorCallback) {
+ this.audioTrackErrorCallback = audioTrackErrorCallback;
+ return this;
+ }
+
+ /**
+ * Set a callback to retrieve errors from the AudioRecord.
+ */
+ public Builder setAudioRecordErrorCallback(AudioRecordErrorCallback audioRecordErrorCallback) {
+ this.audioRecordErrorCallback = audioRecordErrorCallback;
+ return this;
+ }
+
+ /**
+ * Set a callback to listen to the raw audio input from the AudioRecord.
+ */
+ public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback) {
+ this.samplesReadyCallback = samplesReadyCallback;
+ return this;
+ }
+
+ /**
+ * Set a callback to retrieve information from the AudioTrack on when audio starts and stop.
+ */
+ public Builder setAudioTrackStateCallback(AudioTrackStateCallback audioTrackStateCallback) {
+ this.audioTrackStateCallback = audioTrackStateCallback;
+ return this;
+ }
+
+ /**
+ * Set a callback to retrieve information from the AudioRecord on when audio starts and stops.
+ */
+ public Builder setAudioRecordStateCallback(AudioRecordStateCallback audioRecordStateCallback) {
+ this.audioRecordStateCallback = audioRecordStateCallback;
+ return this;
+ }
+
+ /**
+ * Control if the built-in HW noise suppressor should be used or not. The default is on if it is
+ * supported. It is possible to query support by calling isBuiltInNoiseSuppressorSupported().
+ */
+ public Builder setUseHardwareNoiseSuppressor(boolean useHardwareNoiseSuppressor) {
+ if (useHardwareNoiseSuppressor && !isBuiltInNoiseSuppressorSupported()) {
+ Logging.e(TAG, "HW NS not supported");
+ useHardwareNoiseSuppressor = false;
+ }
+ this.useHardwareNoiseSuppressor = useHardwareNoiseSuppressor;
+ return this;
+ }
+
+ /**
+ * Control if the built-in HW acoustic echo canceler should be used or not. The default is on if
+ * it is supported. It is possible to query support by calling
+ * isBuiltInAcousticEchoCancelerSupported().
+ */
+ public Builder setUseHardwareAcousticEchoCanceler(boolean useHardwareAcousticEchoCanceler) {
+ if (useHardwareAcousticEchoCanceler && !isBuiltInAcousticEchoCancelerSupported()) {
+ Logging.e(TAG, "HW AEC not supported");
+ useHardwareAcousticEchoCanceler = false;
+ }
+ this.useHardwareAcousticEchoCanceler = useHardwareAcousticEchoCanceler;
+ return this;
+ }
+
+ /**
+ * Control if stereo input should be used or not. The default is mono.
+ */
+ public Builder setUseStereoInput(boolean useStereoInput) {
+ this.useStereoInput = useStereoInput;
+ return this;
+ }
+
+ /**
+ * Control if stereo output should be used or not. The default is mono.
+ */
+ public Builder setUseStereoOutput(boolean useStereoOutput) {
+ this.useStereoOutput = useStereoOutput;
+ return this;
+ }
+
+ /**
+ * Control if the low-latency mode should be used. The default is disabled.
+ */
+ public Builder setUseLowLatency(boolean useLowLatency) {
+ this.useLowLatency = useLowLatency;
+ return this;
+ }
+
+ /**
+ * Set custom {@link AudioAttributes} to use.
+ */
+ public Builder setAudioAttributes(AudioAttributes audioAttributes) {
+ this.audioAttributes = audioAttributes;
+ return this;
+ }
+
+ /** Disables the volume logger on the audio output track. */
+ public Builder setEnableVolumeLogger(boolean enableVolumeLogger) {
+ this.enableVolumeLogger = enableVolumeLogger;
+ return this;
+ }
+
+ /**
+ * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership
+ * and is responsible for calling release().
+ */
+ public JavaAudioDeviceModule createAudioDeviceModule() {
+ Logging.d(TAG, "createAudioDeviceModule");
+ if (useHardwareNoiseSuppressor) {
+ Logging.d(TAG, "HW NS will be used.");
+ } else {
+ if (isBuiltInNoiseSuppressorSupported()) {
+ Logging.d(TAG, "Overriding default behavior; now using WebRTC NS!");
+ }
+ Logging.d(TAG, "HW NS will not be used.");
+ }
+ if (useHardwareAcousticEchoCanceler) {
+ Logging.d(TAG, "HW AEC will be used.");
+ } else {
+ if (isBuiltInAcousticEchoCancelerSupported()) {
+ Logging.d(TAG, "Overriding default behavior; now using WebRTC AEC!");
+ }
+ Logging.d(TAG, "HW AEC will not be used.");
+ }
+ // Low-latency mode was introduced in API version 26, see
+ // https://developer.android.com/reference/android/media/AudioTrack#PERFORMANCE_MODE_LOW_LATENCY
+ final int MIN_LOW_LATENCY_SDK_VERSION = 26;
+ if (useLowLatency && Build.VERSION.SDK_INT >= MIN_LOW_LATENCY_SDK_VERSION) {
+ Logging.d(TAG, "Low latency mode will be used.");
+ }
+ ScheduledExecutorService executor = this.scheduler;
+ if (executor == null) {
+ executor = WebRtcAudioRecord.newDefaultScheduler();
+ }
+ final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager,
+ audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback,
+ samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
+ final WebRtcAudioTrack audioOutput =
+ new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback,
+ audioTrackStateCallback, useLowLatency, enableVolumeLogger);
+ return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput,
+ inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
+ }
+ }
+
+ /* AudioRecord */
+ // Audio recording error handler functions.
+ public enum AudioRecordStartErrorCode {
+ AUDIO_RECORD_START_EXCEPTION,
+ AUDIO_RECORD_START_STATE_MISMATCH,
+ }
+
+ public static interface AudioRecordErrorCallback {
+ void onWebRtcAudioRecordInitError(String errorMessage);
+ void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage);
+ void onWebRtcAudioRecordError(String errorMessage);
+ }
+
+ /** Called when audio recording starts and stops. */
+ public static interface AudioRecordStateCallback {
+ void onWebRtcAudioRecordStart();
+ void onWebRtcAudioRecordStop();
+ }
+
+ /**
+ * Contains audio sample information.
+ */
+ public static class AudioSamples {
+ /** See {@link AudioRecord#getAudioFormat()} */
+ private final int audioFormat;
+ /** See {@link AudioRecord#getChannelCount()} */
+ private final int channelCount;
+ /** See {@link AudioRecord#getSampleRate()} */
+ private final int sampleRate;
+
+ private final byte[] data;
+
+ public AudioSamples(int audioFormat, int channelCount, int sampleRate, byte[] data) {
+ this.audioFormat = audioFormat;
+ this.channelCount = channelCount;
+ this.sampleRate = sampleRate;
+ this.data = data;
+ }
+
+ public int getAudioFormat() {
+ return audioFormat;
+ }
+
+ public int getChannelCount() {
+ return channelCount;
+ }
+
+ public int getSampleRate() {
+ return sampleRate;
+ }
+
+ public byte[] getData() {
+ return data;
+ }
+ }
+
+ /** Called when new audio samples are ready. This should only be set for debug purposes */
+ public static interface SamplesReadyCallback {
+ void onWebRtcAudioRecordSamplesReady(AudioSamples samples);
+ }
+
+ /* AudioTrack */
+ // Audio playout/track error handler functions.
+ public enum AudioTrackStartErrorCode {
+ AUDIO_TRACK_START_EXCEPTION,
+ AUDIO_TRACK_START_STATE_MISMATCH,
+ }
+
+ public static interface AudioTrackErrorCallback {
+ void onWebRtcAudioTrackInitError(String errorMessage);
+ void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage);
+ void onWebRtcAudioTrackError(String errorMessage);
+ }
+
+ /** Called when audio playout starts and stops. */
+ public static interface AudioTrackStateCallback {
+ void onWebRtcAudioTrackStart();
+ void onWebRtcAudioTrackStop();
+ }
+
+ /**
+ * Returns true if the device supports built-in HW AEC, and the UUID is approved (some UUIDs can
+ * be excluded).
+ */
+ public static boolean isBuiltInAcousticEchoCancelerSupported() {
+ return WebRtcAudioEffects.isAcousticEchoCancelerSupported();
+ }
+
+ /**
+ * Returns true if the device supports built-in HW NS, and the UUID is approved (some UUIDs can be
+ * excluded).
+ */
+ public static boolean isBuiltInNoiseSuppressorSupported() {
+ return WebRtcAudioEffects.isNoiseSuppressorSupported();
+ }
+
+ private final Context context;
+ private final AudioManager audioManager;
+ private final WebRtcAudioRecord audioInput;
+ private final WebRtcAudioTrack audioOutput;
+ private final int inputSampleRate;
+ private final int outputSampleRate;
+ private final boolean useStereoInput;
+ private final boolean useStereoOutput;
+
+ private final Object nativeLock = new Object();
+ private long nativeAudioDeviceModule;
+
+ private JavaAudioDeviceModule(Context context, AudioManager audioManager,
+ WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate,
+ int outputSampleRate, boolean useStereoInput, boolean useStereoOutput) {
+ this.context = context;
+ this.audioManager = audioManager;
+ this.audioInput = audioInput;
+ this.audioOutput = audioOutput;
+ this.inputSampleRate = inputSampleRate;
+ this.outputSampleRate = outputSampleRate;
+ this.useStereoInput = useStereoInput;
+ this.useStereoOutput = useStereoOutput;
+ }
+
+ @Override
+ public long getNativeAudioDeviceModulePointer() {
+ synchronized (nativeLock) {
+ if (nativeAudioDeviceModule == 0) {
+ nativeAudioDeviceModule = nativeCreateAudioDeviceModule(context, audioManager, audioInput,
+ audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
+ }
+ return nativeAudioDeviceModule;
+ }
+ }
+
+ @Override
+ public void release() {
+ synchronized (nativeLock) {
+ if (nativeAudioDeviceModule != 0) {
+ JniCommon.nativeReleaseRef(nativeAudioDeviceModule);
+ nativeAudioDeviceModule = 0;
+ }
+ }
+ }
+
+ @Override
+ public void setSpeakerMute(boolean mute) {
+ Logging.d(TAG, "setSpeakerMute: " + mute);
+ audioOutput.setSpeakerMute(mute);
+ }
+
+ @Override
+ public void setMicrophoneMute(boolean mute) {
+ Logging.d(TAG, "setMicrophoneMute: " + mute);
+ audioInput.setMicrophoneMute(mute);
+ }
+
+ /**
+ * Start to prefer a specific {@link AudioDeviceInfo} device for recording. Typically this should
+ * only be used if a client gives an explicit option for choosing a physical device to record
+ * from. Otherwise the best-matching device for other parameters will be used. Calling after
+ * recording is started may cause a temporary interruption if the audio routing changes.
+ */
+ @RequiresApi(Build.VERSION_CODES.M)
+ public void setPreferredInputDevice(AudioDeviceInfo preferredInputDevice) {
+ Logging.d(TAG, "setPreferredInputDevice: " + preferredInputDevice);
+ audioInput.setPreferredDevice(preferredInputDevice);
+ }
+
+ private static native long nativeCreateAudioDeviceModule(Context context,
+ AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput,
+ int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput);
+}
diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java
new file mode 100644
index 0000000000..de0d0d61f9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import org.webrtc.voiceengine.WebRtcAudioRecord;
+import org.webrtc.voiceengine.WebRtcAudioTrack;
+
+/**
+ * This class represents the legacy AudioDeviceModule that is currently hardcoded into C++ WebRTC.
+ * It will return a null native AudioDeviceModule pointer, leading to an internal object being
+ * created inside WebRTC that is controlled by static calls to the classes under the voiceengine
+ * package. Please use the new JavaAudioDeviceModule instead of this class.
+ */
+@Deprecated
+public class LegacyAudioDeviceModule implements AudioDeviceModule {
+ @Override
+ public long getNativeAudioDeviceModulePointer() {
+ // Returning a null pointer will make WebRTC construct the built-in legacy AudioDeviceModule for
+ // Android internally.
+ return 0;
+ }
+
+ @Override
+ public void release() {
+ // All control for this ADM goes through static global methods and the C++ object is owned
+ // internally by WebRTC.
+ }
+
+ @Override
+ public void setSpeakerMute(boolean mute) {
+ WebRtcAudioTrack.setSpeakerMute(mute);
+ }
+
+ @Override
+ public void setMicrophoneMute(boolean mute) {
+ WebRtcAudioRecord.setMicrophoneMute(mute);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java
new file mode 100644
index 0000000000..ad40898e4c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java
@@ -0,0 +1,684 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaFormat;
+import android.os.SystemClock;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.ThreadUtils.ThreadChecker;
+
+/**
+ * Android hardware video decoder.
+ */
+@SuppressWarnings("deprecation")
+// Cannot support API 16 without using deprecated methods.
+// TODO(sakal): Rename to MediaCodecVideoDecoder once the deprecated implementation is removed.
+class AndroidVideoDecoder implements VideoDecoder, VideoSink {
+ private static final String TAG = "AndroidVideoDecoder";
+
+ // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
+ private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
+ private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
+ private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
+ private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
+ private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
+ private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
+
+ // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
+ // this timeout.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+
+ // WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a
+ // long timeout (500 ms) to prevent this from causing the codec to return an error.
+ private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
+
+ // Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds).
+ // If this timeout is exceeded, the output thread will unblock and check if the decoder is still
+ // running. If it is, it will block on dequeue again. Otherwise, it will stop and release the
+ // MediaCodec.
+ private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
+
+ private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
+ private final String codecName;
+ private final VideoCodecMimeType codecType;
+
+ private static class FrameInfo {
+ final long decodeStartTimeMs;
+ final int rotation;
+
+ FrameInfo(long decodeStartTimeMs, int rotation) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.rotation = rotation;
+ }
+ }
+
+ private final BlockingDeque<FrameInfo> frameInfos;
+ private int colorFormat;
+
+ // Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats
+ // those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder
+ // thread and is immutable while the codec is running.
+ @Nullable private Thread outputThread;
+
+ // Checker that ensures work is run on the output thread.
+ private ThreadChecker outputThreadChecker;
+
+ // Checker that ensures work is run on the decoder thread. The decoder thread is owned by the
+ // caller and must be used to call initDecode, decode, and release.
+ private ThreadChecker decoderThreadChecker;
+
+ private volatile boolean running;
+ @Nullable private volatile Exception shutdownException;
+
+ // Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
+ // or the output thread. Accesses should be protected with this lock.
+ private final Object dimensionLock = new Object();
+ private int width;
+ private int height;
+ private int stride;
+ private int sliceHeight;
+
+ // Whether the decoder has finished the first frame. The codec may not change output dimensions
+ // after delivering the first frame. Only accessed on the output thread while the decoder is
+ // running.
+ private boolean hasDecodedFirstFrame;
+ // Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed
+ // on the decoder thread.
+ private boolean keyFrameRequired;
+
+ private final @Nullable EglBase.Context sharedContext;
+ // Valid and immutable while the decoder is running.
+ @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable private Surface surface;
+
+ private static class DecodedTextureMetadata {
+ final long presentationTimestampUs;
+ final Integer decodeTimeMs;
+
+ DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) {
+ this.presentationTimestampUs = presentationTimestampUs;
+ this.decodeTimeMs = decodeTimeMs;
+ }
+ }
+
+ // Metadata for the last frame rendered to the texture.
+ private final Object renderedTextureMetadataLock = new Object();
+ @Nullable private DecodedTextureMetadata renderedTextureMetadata;
+
+ // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid
+ // and immutable while the decoder is running.
+ @Nullable private Callback callback;
+
+ // Valid and immutable while the decoder is running.
+ @Nullable private MediaCodecWrapper codec;
+
+ AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) {
+ if (!isSupportedColorFormat(colorFormat)) {
+ throw new IllegalArgumentException("Unsupported color format: " + colorFormat);
+ }
+ Logging.d(TAG,
+ "ctor name: " + codecName + " type: " + codecType + " color format: " + colorFormat
+ + " context: " + sharedContext);
+ this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
+ this.codecName = codecName;
+ this.codecType = codecType;
+ this.colorFormat = colorFormat;
+ this.sharedContext = sharedContext;
+ this.frameInfos = new LinkedBlockingDeque<>();
+ }
+
+ @Override
+ public VideoCodecStatus initDecode(Settings settings, Callback callback) {
+ this.decoderThreadChecker = new ThreadChecker();
+
+ this.callback = callback;
+ if (sharedContext != null) {
+ surfaceTextureHelper = createSurfaceTextureHelper();
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ surfaceTextureHelper.startListening(this);
+ }
+ return initDecodeInternal(settings.width, settings.height);
+ }
+
+ // Internal variant is used when restarting the codec due to reconfiguration.
+ private VideoCodecStatus initDecodeInternal(int width, int height) {
+ decoderThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG,
+ "initDecodeInternal name: " + codecName + " type: " + codecType + " width: " + width
+ + " height: " + height);
+ if (outputThread != null) {
+ Logging.e(TAG, "initDecodeInternal called while the codec is already running");
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ // Note: it is not necessary to initialize dimensions under the lock, since the output thread
+ // is not running.
+ this.width = width;
+ this.height = height;
+
+ stride = width;
+ sliceHeight = height;
+ hasDecodedFirstFrame = false;
+ keyFrameRequired = true;
+
+ try {
+ codec = mediaCodecWrapperFactory.createByCodecName(codecName);
+ } catch (IOException | IllegalArgumentException | IllegalStateException e) {
+ Logging.e(TAG, "Cannot create media decoder " + codecName);
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
+ if (sharedContext == null) {
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ }
+ codec.configure(format, surface, null, 0);
+ codec.start();
+ } catch (IllegalStateException | IllegalArgumentException e) {
+ Logging.e(TAG, "initDecode failed", e);
+ release();
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+ running = true;
+ outputThread = createOutputThread();
+ outputThread.start();
+
+ Logging.d(TAG, "initDecodeInternal done");
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
+ decoderThreadChecker.checkIsOnValidThread();
+ if (codec == null || callback == null) {
+ Logging.d(TAG, "decode uninitalized, codec: " + (codec != null) + ", callback: " + callback);
+ return VideoCodecStatus.UNINITIALIZED;
+ }
+
+ if (frame.buffer == null) {
+ Logging.e(TAG, "decode() - no input data");
+ return VideoCodecStatus.ERR_PARAMETER;
+ }
+
+ int size = frame.buffer.remaining();
+ if (size == 0) {
+ Logging.e(TAG, "decode() - input buffer empty");
+ return VideoCodecStatus.ERR_PARAMETER;
+ }
+
+ // Load dimensions from shared memory under the dimension lock.
+ final int width;
+ final int height;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ }
+
+ // Check if the resolution changed and reset the codec if necessary.
+ if (frame.encodedWidth * frame.encodedHeight > 0
+ && (frame.encodedWidth != width || frame.encodedHeight != height)) {
+ VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ }
+
+ if (keyFrameRequired) {
+ // Need to process a key frame first.
+ if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
+ Logging.e(TAG, "decode() - key frame required first");
+ return VideoCodecStatus.NO_OUTPUT;
+ }
+ }
+
+ int index;
+ try {
+ index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueInputBuffer failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ if (index < 0) {
+ // Decoder is falling behind. No input buffers available.
+ // The decoder can't simply drop frames; it might lose a key frame.
+ Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind");
+ return VideoCodecStatus.ERROR;
+ }
+
+ ByteBuffer buffer;
+ try {
+ buffer = codec.getInputBuffer(index);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (buffer.capacity() < size) {
+ Logging.e(TAG, "decode() - HW buffer too small");
+ return VideoCodecStatus.ERROR;
+ }
+ buffer.put(frame.buffer);
+
+ frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
+ try {
+ codec.queueInputBuffer(index, 0 /* offset */, size,
+ TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "queueInputBuffer failed", e);
+ frameInfos.pollLast();
+ return VideoCodecStatus.ERROR;
+ }
+ if (keyFrameRequired) {
+ keyFrameRequired = false;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return codecName;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ // TODO(sakal): This is not called on the correct thread but is still called synchronously.
+ // Re-enable the check once this is called on the correct thread.
+ // decoderThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "release");
+ VideoCodecStatus status = releaseInternal();
+ if (surface != null) {
+ releaseSurface();
+ surface = null;
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper.dispose();
+ surfaceTextureHelper = null;
+ }
+ synchronized (renderedTextureMetadataLock) {
+ renderedTextureMetadata = null;
+ }
+ callback = null;
+ frameInfos.clear();
+ return status;
+ }
+
+ // Internal variant is used when restarting the codec due to reconfiguration.
+ private VideoCodecStatus releaseInternal() {
+ if (!running) {
+ Logging.d(TAG, "release: Decoder is not running.");
+ return VideoCodecStatus.OK;
+ }
+ try {
+ // The outputThread actually stops and releases the codec once running is false.
+ running = false;
+ if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ // Log an exception to capture the stack trace and turn it into a TIMEOUT error.
+ Logging.e(TAG, "Media decoder release timeout", new RuntimeException());
+ return VideoCodecStatus.TIMEOUT;
+ }
+ if (shutdownException != null) {
+ // Log the exception and turn it into an error. Wrap the exception in a new exception to
+ // capture both the output thread's stack trace and this thread's stack trace.
+ Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException));
+ shutdownException = null;
+ return VideoCodecStatus.ERROR;
+ }
+ } finally {
+ codec = null;
+ outputThread = null;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
+ decoderThreadChecker.checkIsOnValidThread();
+ VideoCodecStatus status = releaseInternal();
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ return initDecodeInternal(newWidth, newHeight);
+ }
+
+ private Thread createOutputThread() {
+ return new Thread("AndroidVideoDecoder.outputThread") {
+ @Override
+ public void run() {
+ outputThreadChecker = new ThreadChecker();
+ while (running) {
+ deliverDecodedFrame();
+ }
+ releaseCodecOnOutputThread();
+ }
+ };
+ }
+
+ // Visible for testing.
+ protected void deliverDecodedFrame() {
+ outputThreadChecker.checkIsOnValidThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ // Block until an output buffer is available (up to 100 milliseconds). If the timeout is
+ // exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
+ // thread's loop. Blocking here prevents the output thread from busy-waiting while the codec
+ // is idle.
+ int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
+ if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ reformat(codec.getOutputFormat());
+ return;
+ }
+
+ if (index < 0) {
+ Logging.v(TAG, "dequeueOutputBuffer returned " + index);
+ return;
+ }
+
+ FrameInfo frameInfo = frameInfos.poll();
+ Integer decodeTimeMs = null;
+ int rotation = 0;
+ if (frameInfo != null) {
+ decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
+ rotation = frameInfo.rotation;
+ }
+
+ hasDecodedFirstFrame = true;
+
+ if (surfaceTextureHelper != null) {
+ deliverTextureFrame(index, info, rotation, decodeTimeMs);
+ } else {
+ deliverByteFrame(index, info, rotation, decodeTimeMs);
+ }
+
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "deliverDecodedFrame failed", e);
+ }
+ }
+
+ private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
+ final int rotation, final Integer decodeTimeMs) {
+ // Load dimensions from shared memory under the dimension lock.
+ final int width;
+ final int height;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ }
+
+ synchronized (renderedTextureMetadataLock) {
+ if (renderedTextureMetadata != null) {
+ codec.releaseOutputBuffer(index, false);
+ return; // We are still waiting for texture for the previous frame, drop this one.
+ }
+ surfaceTextureHelper.setTextureSize(width, height);
+ surfaceTextureHelper.setFrameRotation(rotation);
+ renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs);
+ codec.releaseOutputBuffer(index, /* render= */ true);
+ }
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ final VideoFrame newFrame;
+ final Integer decodeTimeMs;
+ final long timestampNs;
+ synchronized (renderedTextureMetadataLock) {
+ if (renderedTextureMetadata == null) {
+ throw new IllegalStateException(
+ "Rendered texture metadata was null in onTextureFrameAvailable.");
+ }
+ timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000;
+ decodeTimeMs = renderedTextureMetadata.decodeTimeMs;
+ renderedTextureMetadata = null;
+ }
+ // Change timestamp of frame.
+ final VideoFrame frameWithModifiedTimeStamp =
+ new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs);
+ callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */);
+ }
+
+ private void deliverByteFrame(
+ int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) {
+ // Load dimensions from shared memory under the dimension lock.
+ int width;
+ int height;
+ int stride;
+ int sliceHeight;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ stride = this.stride;
+ sliceHeight = this.sliceHeight;
+ }
+
+ // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
+ // bytes for each of the U and V channels.
+ if (info.size < width * height * 3 / 2) {
+ Logging.e(TAG, "Insufficient output buffer size: " + info.size);
+ return;
+ }
+
+ if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
+ // Some codecs (Exynos) report an incorrect stride. Correct it here.
+ // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
+ // 2 * size / (3 * height).
+ stride = info.size * 2 / (height * 3);
+ }
+
+ ByteBuffer buffer = codec.getOutputBuffer(index);
+ buffer.position(info.offset);
+ buffer.limit(info.offset + info.size);
+ buffer = buffer.slice();
+
+ final VideoFrame.Buffer frameBuffer;
+ if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
+ frameBuffer = copyI420Buffer(buffer, stride, sliceHeight, width, height);
+ } else {
+ // All other supported color formats are NV12.
+ frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height);
+ }
+ codec.releaseOutputBuffer(index, /* render= */ false);
+
+ long presentationTimeNs = info.presentationTimeUs * 1000;
+ VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
+
+ // Note that qp is parsed on the C++ side.
+ callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
+ frame.release();
+ }
+
+ private VideoFrame.Buffer copyNV12ToI420Buffer(
+ ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
+ // toI420 copies the buffer.
+ return new NV12Buffer(width, height, stride, sliceHeight, buffer, null /* releaseCallback */)
+ .toI420();
+ }
+
+ private VideoFrame.Buffer copyI420Buffer(
+ ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
+ if (stride % 2 != 0) {
+ throw new AssertionError("Stride is not divisible by two: " + stride);
+ }
+
+ // Note that the case with odd `sliceHeight` is handled in a special way.
+ // The chroma height contained in the payload is rounded down instead of
+ // up, making it one row less than what we expect in WebRTC. Therefore, we
+ // have to duplicate the last chroma rows for this case. Also, the offset
+ // between the Y plane and the U plane is unintuitive for this case. See
+ // http://bugs.webrtc.org/6651 for more info.
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
+
+ final int uvStride = stride / 2;
+
+ final int yPos = 0;
+ final int yEnd = yPos + stride * height;
+ final int uPos = yPos + stride * sliceHeight;
+ final int uEnd = uPos + uvStride * chromaHeight;
+ final int vPos = uPos + uvStride * sliceHeight / 2;
+ final int vEnd = vPos + uvStride * chromaHeight;
+
+ VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height);
+
+ buffer.limit(yEnd);
+ buffer.position(yPos);
+ copyPlane(
+ buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height);
+
+ buffer.limit(uEnd);
+ buffer.position(uPos);
+ copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(),
+ chromaWidth, chromaHeight);
+ if (sliceHeight % 2 == 1) {
+ buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
+
+ ByteBuffer dataU = frameBuffer.getDataU();
+ dataU.position(frameBuffer.getStrideU() * chromaHeight); // Seek to beginning of last row.
+ dataU.put(buffer); // Copy the last row.
+ }
+
+ buffer.limit(vEnd);
+ buffer.position(vPos);
+ copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(),
+ chromaWidth, chromaHeight);
+ if (sliceHeight % 2 == 1) {
+ buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
+
+ ByteBuffer dataV = frameBuffer.getDataV();
+ dataV.position(frameBuffer.getStrideV() * chromaHeight); // Seek to beginning of last row.
+ dataV.put(buffer); // Copy the last row.
+ }
+
+ return frameBuffer;
+ }
+
+ private void reformat(MediaFormat format) {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Decoder format changed: " + format.toString());
+ final int newWidth;
+ final int newHeight;
+ if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) {
+ newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT)
+ - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT);
+ newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM)
+ - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP);
+ } else {
+ newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
+ newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
+ }
+ // Compare to existing width, height, and save values under the dimension lock.
+ synchronized (dimensionLock) {
+ if (newWidth != width || newHeight != height) {
+ if (hasDecodedFirstFrame) {
+ stopOnOutputThread(new RuntimeException("Unexpected size change. "
+ + "Configured " + width + "*" + height + ". "
+ + "New " + newWidth + "*" + newHeight));
+ return;
+ } else if (newWidth <= 0 || newHeight <= 0) {
+ Logging.w(TAG,
+ "Unexpected format dimensions. Configured " + width + "*" + height + ". "
+ + "New " + newWidth + "*" + newHeight + ". Skip it");
+ return;
+ }
+ width = newWidth;
+ height = newHeight;
+ }
+ }
+
+ // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
+ // color format updates.
+ if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+ colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
+ if (!isSupportedColorFormat(colorFormat)) {
+ stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
+ return;
+ }
+ }
+
+ // Save stride and sliceHeight under the dimension lock.
+ synchronized (dimensionLock) {
+ if (format.containsKey(MEDIA_FORMAT_KEY_STRIDE)) {
+ stride = format.getInteger(MEDIA_FORMAT_KEY_STRIDE);
+ }
+ if (format.containsKey(MEDIA_FORMAT_KEY_SLICE_HEIGHT)) {
+ sliceHeight = format.getInteger(MEDIA_FORMAT_KEY_SLICE_HEIGHT);
+ }
+ Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+ stride = Math.max(width, stride);
+ sliceHeight = Math.max(height, sliceHeight);
+ }
+ }
+
+ private void releaseCodecOnOutputThread() {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Releasing MediaCodec on output thread");
+ try {
+ codec.stop();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder stop failed", e);
+ }
+ try {
+ codec.release();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ // Propagate exceptions caught during release back to the main thread.
+ shutdownException = e;
+ }
+ Logging.d(TAG, "Release on output thread done");
+ }
+
+ private void stopOnOutputThread(Exception e) {
+ outputThreadChecker.checkIsOnValidThread();
+ running = false;
+ shutdownException = e;
+ }
+
+ private boolean isSupportedColorFormat(int colorFormat) {
+ for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) {
+ if (supported == colorFormat) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // Visible for testing.
+ protected SurfaceTextureHelper createSurfaceTextureHelper() {
+ return SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
+ }
+
+ // Visible for testing.
+ // TODO(sakal): Remove once Robolectric commit fa991a0 has been rolled to WebRTC.
+ protected void releaseSurface() {
+ surface.release();
+ }
+
+ // Visible for testing.
+ protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) {
+ return JavaI420Buffer.allocate(width, height);
+ }
+
+ // Visible for testing.
+ protected void copyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
+ YuvHelper.copyPlane(src, srcStride, dst, dstStride, width, height);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
new file mode 100644
index 0000000000..3b5f5d2931
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
+class BaseBitrateAdjuster implements BitrateAdjuster {
+ protected int targetBitrateBps;
+ protected double targetFramerateFps;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ this.targetBitrateBps = targetBitrateBps;
+ this.targetFramerateFps = targetFramerateFps;
+ }
+
+ @Override
+ public void reportEncodedFrame(int size) {
+ // No op.
+ }
+
+ @Override
+ public int getAdjustedBitrateBps() {
+ return targetBitrateBps;
+ }
+
+ @Override
+ public double getAdjustedFramerateFps() {
+ return targetFramerateFps;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java
new file mode 100644
index 0000000000..bfa08bad89
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Object that adjusts the bitrate of a hardware codec. */
+interface BitrateAdjuster {
+ /**
+ * Sets the target bitrate in bits per second and framerate in frames per second.
+ */
+ void setTargets(int targetBitrateBps, double targetFramerateFps);
+
+ /**
+ * Should be used to report the size of an encoded frame to the bitrate adjuster. Use
+ * getAdjustedBitrateBps to get the updated bitrate after calling this method.
+ */
+ void reportEncodedFrame(int size);
+
+ /** Gets the current bitrate. */
+ int getAdjustedBitrateBps();
+
+ /** Gets the current framerate. */
+ double getAdjustedFramerateFps();
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java
new file mode 100644
index 0000000000..9b410ceaef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @CalledByNative is used by the JNI generator to create the necessary JNI
+ * bindings and expose this method to native code.
+ */
+@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
+@Retention(RetentionPolicy.CLASS)
+public @interface CalledByNative {
+ /*
+ * If present, tells which inner class the method belongs to.
+ */
+ public String value() default "";
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java
new file mode 100644
index 0000000000..8a00a7fadb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @CalledByNativeUnchecked is used to generate JNI bindings that do not check for exceptions.
+ * It only makes sense to use this annotation on methods that declare a throws... spec.
+ * However, note that the exception received native side maybe an 'unchecked' (RuntimeExpception)
+ * such as NullPointerException, so the native code should differentiate these cases.
+ * Usage of this should be very rare; where possible handle exceptions in the Java side and use a
+ * return value to indicate success / failure.
+ */
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.CLASS)
+public @interface CalledByNativeUnchecked {
+ /*
+ * If present, tells which inner class the method belongs to.
+ */
+ public String value() default "";
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
new file mode 100644
index 0000000000..a54f7201b2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.os.Handler;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+@SuppressWarnings("deprecation")
+class Camera1Session implements CameraSession {
+ private static final String TAG = "Camera1Session";
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+
+ private static final Histogram camera1StartTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
+ private static final Histogram camera1StopTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
+ private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
+ "WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
+
+ private static enum SessionState { RUNNING, STOPPED }
+
+ private final Handler cameraThreadHandler;
+ private final Events events;
+ private final boolean captureToTexture;
+ private final Context applicationContext;
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final int cameraId;
+ private final Camera camera;
+ private final Camera.CameraInfo info;
+ private final CaptureFormat captureFormat;
+ // Used only for stats. Only used on the camera thread.
+ private final long constructionTimeNs; // Construction time of this class.
+
+ private SessionState state;
+ private boolean firstFrameReported;
+
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ public static void create(final CreateSessionCallback callback, final Events events,
+ final boolean captureToTexture, final Context applicationContext,
+ final SurfaceTextureHelper surfaceTextureHelper, final String cameraName,
+ final int width, final int height, final int framerate) {
+ final long constructionTimeNs = System.nanoTime();
+ Logging.d(TAG, "Open camera " + cameraName);
+ events.onCameraOpening();
+
+ final int cameraId;
+ try {
+ cameraId = Camera1Enumerator.getCameraIndex(cameraName);
+ } catch (IllegalArgumentException e) {
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ final Camera camera;
+ try {
+ camera = Camera.open(cameraId);
+ } catch (RuntimeException e) {
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ if (camera == null) {
+ callback.onFailure(
+ FailureType.ERROR, "Camera.open returned null for camera id = " + cameraId);
+ return;
+ }
+
+ try {
+ camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
+ } catch (IOException | RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ final Camera.CameraInfo info = new Camera.CameraInfo();
+ Camera.getCameraInfo(cameraId, info);
+
+ final CaptureFormat captureFormat;
+ try {
+ final Camera.Parameters parameters = camera.getParameters();
+ captureFormat = findClosestCaptureFormat(parameters, width, height, framerate);
+ final Size pictureSize = findClosestPictureSize(parameters, width, height);
+ updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
+ } catch (RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ if (!captureToTexture) {
+ final int frameSize = captureFormat.frameSize();
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ camera.addCallbackBuffer(buffer.array());
+ }
+ }
+
+ // Calculate orientation manually and send it as CVO instead.
+ try {
+ camera.setDisplayOrientation(0 /* degrees */);
+ } catch (RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
+ surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
+ }
+
+ private static void updateCameraParameters(Camera camera, Camera.Parameters parameters,
+ CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) {
+ final List<String> focusModes = parameters.getSupportedFocusModes();
+
+ parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
+ parameters.setPreviewSize(captureFormat.width, captureFormat.height);
+ parameters.setPictureSize(pictureSize.width, pictureSize.height);
+ if (!captureToTexture) {
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ }
+
+ if (parameters.isVideoStabilizationSupported()) {
+ parameters.setVideoStabilization(true);
+ }
+ if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ }
+ camera.setParameters(parameters);
+ }
+
+ private static CaptureFormat findClosestCaptureFormat(
+ Camera.Parameters parameters, int width, int height, int framerate) {
+ // Find closest supported format for `width` x `height` @ `framerate`.
+ final List<CaptureFormat.FramerateRange> supportedFramerates =
+ Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
+ Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
+
+ final CaptureFormat.FramerateRange fpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
+
+ final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
+ CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
+
+ return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
+ }
+
+ private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) {
+ return CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
+ }
+
+ private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera,
+ Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) {
+ Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
+
+ this.cameraThreadHandler = new Handler();
+ this.events = events;
+ this.captureToTexture = captureToTexture;
+ this.applicationContext = applicationContext;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraId = cameraId;
+ this.camera = camera;
+ this.info = info;
+ this.captureFormat = captureFormat;
+ this.constructionTimeNs = constructionTimeNs;
+
+ surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
+
+ startCapturing();
+ }
+
+ @Override
+ public void stop() {
+ Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
+ checkIsOnCameraThread();
+ if (state != SessionState.STOPPED) {
+ final long stopStartTime = System.nanoTime();
+ stopInternal();
+ final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+ camera1StopTimeMsHistogram.addSample(stopTimeMs);
+ }
+ }
+
+ private void startCapturing() {
+ Logging.d(TAG, "Start capturing");
+ checkIsOnCameraThread();
+
+ state = SessionState.RUNNING;
+
+ camera.setErrorCallback(new Camera.ErrorCallback() {
+ @Override
+ public void onError(int error, Camera camera) {
+ String errorMessage;
+ if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
+ errorMessage = "Camera server died!";
+ } else {
+ errorMessage = "Camera error: " + error;
+ }
+ Logging.e(TAG, errorMessage);
+ stopInternal();
+ if (error == Camera.CAMERA_ERROR_EVICTED) {
+ events.onCameraDisconnected(Camera1Session.this);
+ } else {
+ events.onCameraError(Camera1Session.this, errorMessage);
+ }
+ }
+ });
+
+ if (captureToTexture) {
+ listenForTextureFrames();
+ } else {
+ listenForBytebufferFrames();
+ }
+ try {
+ camera.startPreview();
+ } catch (RuntimeException e) {
+ stopInternal();
+ events.onCameraError(this, e.getMessage());
+ }
+ }
+
+ private void stopInternal() {
+ Logging.d(TAG, "Stop internal");
+ checkIsOnCameraThread();
+ if (state == SessionState.STOPPED) {
+ Logging.d(TAG, "Camera is already stopped");
+ return;
+ }
+
+ state = SessionState.STOPPED;
+ surfaceTextureHelper.stopListening();
+ // Note: stopPreview or other driver code might deadlock. Deadlock in
+ // Camera._stopPreview(Native Method) has been observed on
+ // Nexus 5 (hammerhead), OS version LMY48I.
+ camera.stopPreview();
+ camera.release();
+ events.onCameraClosed(this);
+ Logging.d(TAG, "Stop done");
+ }
+
+ private void listenForTextureFrames() {
+ surfaceTextureHelper.startListening((VideoFrame frame) -> {
+ checkIsOnCameraThread();
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Texture frame captured but camera is no longer running.");
+ return;
+ }
+
+ if (!firstFrameReported) {
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera1StartTimeMsHistogram.addSample(startTimeMs);
+ firstFrameReported = true;
+ }
+
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ final VideoFrame modifiedFrame =
+ new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
+ (TextureBufferImpl) frame.getBuffer(),
+ /* mirror= */ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT,
+ /* rotation= */ 0),
+ /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
+ events.onFrameCaptured(Camera1Session.this, modifiedFrame);
+ modifiedFrame.release();
+ });
+ }
+
+ private void listenForBytebufferFrames() {
+ camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
+ @Override
+ public void onPreviewFrame(final byte[] data, Camera callbackCamera) {
+ checkIsOnCameraThread();
+
+ if (callbackCamera != camera) {
+ Logging.e(TAG, "Callback from a different camera. This should never happen.");
+ return;
+ }
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
+ return;
+ }
+
+ final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+
+ if (!firstFrameReported) {
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera1StartTimeMsHistogram.addSample(startTimeMs);
+ firstFrameReported = true;
+ }
+
+ VideoFrame.Buffer frameBuffer = new NV21Buffer(
+ data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> {
+ if (state == SessionState.RUNNING) {
+ camera.addCallbackBuffer(data);
+ }
+ }));
+ final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
+ events.onFrameCaptured(Camera1Session.this, frame);
+ frame.release();
+ }
+ });
+ }
+
+ private int getFrameOrientation() {
+ int rotation = CameraSession.getDeviceOrientation(applicationContext);
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+ rotation = 360 - rotation;
+ }
+ return (info.orientation + rotation) % 360;
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
new file mode 100644
index 0000000000..d5ee80c73e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
@@ -0,0 +1,428 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Handler;
+import android.util.Range;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+class Camera2Session implements CameraSession {
+ private static final String TAG = "Camera2Session";
+
+ private static final Histogram camera2StartTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
+ private static final Histogram camera2StopTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
+ private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
+ "WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
+
+ private static enum SessionState { RUNNING, STOPPED }
+
+ private final Handler cameraThreadHandler;
+ private final CreateSessionCallback callback;
+ private final Events events;
+ private final Context applicationContext;
+ private final CameraManager cameraManager;
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final String cameraId;
+ private final int width;
+ private final int height;
+ private final int framerate;
+
+ // Initialized at start
+ private CameraCharacteristics cameraCharacteristics;
+ private int cameraOrientation;
+ private boolean isCameraFrontFacing;
+ private int fpsUnitFactor;
+ private CaptureFormat captureFormat;
+
+ // Initialized when camera opens
+ @Nullable private CameraDevice cameraDevice;
+ @Nullable private Surface surface;
+
+ // Initialized when capture session is created
+ @Nullable private CameraCaptureSession captureSession;
+
+ // State
+ private SessionState state = SessionState.RUNNING;
+ private boolean firstFrameReported;
+
+ // Used only for stats. Only used on the camera thread.
+ private final long constructionTimeNs; // Construction time of this class.
+
+ private class CameraStateCallback extends CameraDevice.StateCallback {
+ private String getErrorDescription(int errorCode) {
+ switch (errorCode) {
+ case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
+ return "Camera device has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
+ return "Camera device could not be opened due to a device policy.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
+ return "Camera device is in use already.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
+ return "Camera service has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
+ return "Camera device could not be opened because"
+ + " there are too many other open camera devices.";
+ default:
+ return "Unknown camera error: " + errorCode;
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ checkIsOnCameraThread();
+ final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
+ state = SessionState.STOPPED;
+ stopInternal();
+ if (startFailure) {
+ callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
+ } else {
+ events.onCameraDisconnected(Camera2Session.this);
+ }
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int errorCode) {
+ checkIsOnCameraThread();
+ reportError(getErrorDescription(errorCode));
+ }
+
+ @Override
+ public void onOpened(CameraDevice camera) {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Camera opened.");
+ cameraDevice = camera;
+
+ surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ try {
+ camera.createCaptureSession(
+ Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to create capture session. " + e);
+ return;
+ }
+ }
+
+ @Override
+ public void onClosed(CameraDevice camera) {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Camera device closed.");
+ events.onCameraClosed(Camera2Session.this);
+ }
+ }
+
+ private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
+ @Override
+ public void onConfigureFailed(CameraCaptureSession session) {
+ checkIsOnCameraThread();
+ session.close();
+ reportError("Failed to configure capture session.");
+ }
+
+ @Override
+ public void onConfigured(CameraCaptureSession session) {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "Camera capture session configured.");
+ captureSession = session;
+ try {
+ /*
+ * The viable options for video capture requests are:
+ * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
+ * post-processing.
+ * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
+ * quality.
+ */
+ final CaptureRequest.Builder captureRequestBuilder =
+ cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ // Set auto exposure fps range.
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
+ new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
+ captureFormat.framerate.max / fpsUnitFactor));
+ captureRequestBuilder.set(
+ CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
+ chooseStabilizationMode(captureRequestBuilder);
+ chooseFocusMode(captureRequestBuilder);
+
+ captureRequestBuilder.addTarget(surface);
+ session.setRepeatingRequest(
+ captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to start capture request. " + e);
+ return;
+ }
+
+ surfaceTextureHelper.startListening((VideoFrame frame) -> {
+ checkIsOnCameraThread();
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Texture frame captured but camera is no longer running.");
+ return;
+ }
+
+ if (!firstFrameReported) {
+ firstFrameReported = true;
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera2StartTimeMsHistogram.addSample(startTimeMs);
+ }
+
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ // Also, undo camera orientation, we report it as rotation instead.
+ final VideoFrame modifiedFrame =
+ new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
+ (TextureBufferImpl) frame.getBuffer(),
+ /* mirror= */ isCameraFrontFacing,
+ /* rotation= */ -cameraOrientation),
+ /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
+ events.onFrameCaptured(Camera2Session.this, modifiedFrame);
+ modifiedFrame.release();
+ });
+ Logging.d(TAG, "Camera device successfully started.");
+ callback.onDone(Camera2Session.this);
+ }
+
+ // Prefers optical stabilization over software stabilization if available. Only enables one of
+ // the stabilization modes at a time because having both enabled can cause strange results.
+ private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
+ final int[] availableOpticalStabilization = cameraCharacteristics.get(
+ CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
+ if (availableOpticalStabilization != null) {
+ for (int mode : availableOpticalStabilization) {
+ if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
+ captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+ CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
+ Logging.d(TAG, "Using optical stabilization.");
+ return;
+ }
+ }
+ }
+ // If no optical mode is available, try software.
+ final int[] availableVideoStabilization = cameraCharacteristics.get(
+ CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
+ if (availableVideoStabilization != null) {
+ for (int mode : availableVideoStabilization) {
+ if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
+ captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+ CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
+ Logging.d(TAG, "Using video stabilization.");
+ return;
+ }
+ }
+ }
+ Logging.d(TAG, "Stabilization not available.");
+ }
+
+ private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
+ final int[] availableFocusModes =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ for (int mode : availableFocusModes) {
+ if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
+ captureRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
+ Logging.d(TAG, "Using continuous video auto-focus.");
+ return;
+ }
+ }
+ Logging.d(TAG, "Auto-focus is not available.");
+ }
+ }
+
+ private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
+ @Override
+ public void onCaptureFailed(
+ CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
+ Logging.d(TAG, "Capture failed: " + failure);
+ }
+ }
+
+ public static void create(CreateSessionCallback callback, Events events,
+ Context applicationContext, CameraManager cameraManager,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
+ int framerate) {
+ new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
+ cameraId, width, height, framerate);
+ }
+
+ private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
+ CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
+ int width, int height, int framerate) {
+ Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
+
+ constructionTimeNs = System.nanoTime();
+
+ this.cameraThreadHandler = new Handler();
+ this.callback = callback;
+ this.events = events;
+ this.applicationContext = applicationContext;
+ this.cameraManager = cameraManager;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraId = cameraId;
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+
+ start();
+ }
+
+ private void start() {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "start");
+
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+ } catch (CameraAccessException | IllegalArgumentException e) {
+ reportError("getCameraCharacteristics(): " + e.getMessage());
+ return;
+ }
+ cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+
+ findCaptureFormat();
+
+ if (captureFormat == null) {
+ // findCaptureFormat reports an error already.
+ return;
+ }
+
+ openCamera();
+ }
+
+ private void findCaptureFormat() {
+ checkIsOnCameraThread();
+
+ Range<Integer>[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
+ List<CaptureFormat.FramerateRange> framerateRanges =
+ Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
+ List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
+ Logging.d(TAG, "Available preview sizes: " + sizes);
+ Logging.d(TAG, "Available fps ranges: " + framerateRanges);
+
+ if (framerateRanges.isEmpty() || sizes.isEmpty()) {
+ reportError("No supported capture formats.");
+ return;
+ }
+
+ final CaptureFormat.FramerateRange bestFpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
+
+ final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
+ CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
+
+ captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
+ Logging.d(TAG, "Using capture format: " + captureFormat);
+ }
+
+ @SuppressLint("MissingPermission")
+ private void openCamera() {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Opening camera " + cameraId);
+ events.onCameraOpening();
+
+ try {
+ cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
+ } catch (CameraAccessException | IllegalArgumentException | SecurityException e) {
+ reportError("Failed to open camera: " + e);
+ return;
+ }
+ }
+
+ @Override
+ public void stop() {
+ Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
+ checkIsOnCameraThread();
+ if (state != SessionState.STOPPED) {
+ final long stopStartTime = System.nanoTime();
+ state = SessionState.STOPPED;
+ stopInternal();
+ final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+ camera2StopTimeMsHistogram.addSample(stopTimeMs);
+ }
+ }
+
+ private void stopInternal() {
+ Logging.d(TAG, "Stop internal");
+ checkIsOnCameraThread();
+
+ surfaceTextureHelper.stopListening();
+
+ if (captureSession != null) {
+ captureSession.close();
+ captureSession = null;
+ }
+ if (surface != null) {
+ surface.release();
+ surface = null;
+ }
+ if (cameraDevice != null) {
+ cameraDevice.close();
+ cameraDevice = null;
+ }
+
+ Logging.d(TAG, "Stop done");
+ }
+
+ private void reportError(String error) {
+ checkIsOnCameraThread();
+ Logging.e(TAG, "Error: " + error);
+
+ final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
+ state = SessionState.STOPPED;
+ stopInternal();
+ if (startFailure) {
+ callback.onFailure(FailureType.ERROR, error);
+ } else {
+ events.onCameraError(this, error);
+ }
+ }
+
+ private int getFrameOrientation() {
+ int rotation = CameraSession.getDeviceOrientation(applicationContext);
+ if (!isCameraFrontFacing) {
+ rotation = 360 - rotation;
+ }
+ return (cameraOrientation + rotation) % 360;
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
new file mode 100644
index 0000000000..1922a529e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
@@ -0,0 +1,458 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.Looper;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+abstract class CameraCapturer implements CameraVideoCapturer {
+ enum SwitchState {
+ IDLE, // No switch requested.
+ PENDING, // Waiting for previous capture session to open.
+ IN_PROGRESS, // Waiting for new switched capture session to start.
+ }
+
+ private static final String TAG = "CameraCapturer";
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private final static int OPEN_CAMERA_TIMEOUT = 10000;
+
+ private final CameraEnumerator cameraEnumerator;
+ private final CameraEventsHandler eventsHandler;
+ private final Handler uiThreadHandler;
+
+ @Nullable
+ private final CameraSession.CreateSessionCallback createSessionCallback =
+ new CameraSession.CreateSessionCallback() {
+ @Override
+ public void onDone(CameraSession session) {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "Create session done. Switch state: " + switchState);
+ uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
+ synchronized (stateLock) {
+ capturerObserver.onCapturerStarted(true /* success */);
+ sessionOpening = false;
+ currentSession = session;
+ cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
+ firstFrameObserved = false;
+ stateLock.notifyAll();
+
+ if (switchState == SwitchState.IN_PROGRESS) {
+ switchState = SwitchState.IDLE;
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
+ switchEventsHandler = null;
+ }
+ } else if (switchState == SwitchState.PENDING) {
+ String selectedCameraName = pendingCameraName;
+ pendingCameraName = null;
+ switchState = SwitchState.IDLE;
+ switchCameraInternal(switchEventsHandler, selectedCameraName);
+ }
+ }
+ }
+
+ @Override
+ public void onFailure(CameraSession.FailureType failureType, String error) {
+ checkIsOnCameraThread();
+ uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
+ synchronized (stateLock) {
+ capturerObserver.onCapturerStarted(false /* success */);
+ openAttemptsRemaining--;
+
+ if (openAttemptsRemaining <= 0) {
+ Logging.w(TAG, "Opening camera failed, passing: " + error);
+ sessionOpening = false;
+ stateLock.notifyAll();
+
+ if (switchState != SwitchState.IDLE) {
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(error);
+ switchEventsHandler = null;
+ }
+ switchState = SwitchState.IDLE;
+ }
+
+ if (failureType == CameraSession.FailureType.DISCONNECTED) {
+ eventsHandler.onCameraDisconnected();
+ } else {
+ eventsHandler.onCameraError(error);
+ }
+ } else {
+ Logging.w(TAG, "Opening camera failed, retry: " + error);
+ createSessionInternal(OPEN_CAMERA_DELAY_MS);
+ }
+ }
+ }
+ };
+
+ @Nullable
+ private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
+ @Override
+ public void onCameraOpening() {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (currentSession != null) {
+ Logging.w(TAG, "onCameraOpening while session was open.");
+ return;
+ }
+ eventsHandler.onCameraOpening(cameraName);
+ }
+ }
+
+ @Override
+ public void onCameraError(CameraSession session, String error) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onCameraError from another session: " + error);
+ return;
+ }
+ eventsHandler.onCameraError(error);
+ stopCapture();
+ }
+ }
+
+ @Override
+ public void onCameraDisconnected(CameraSession session) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onCameraDisconnected from another session.");
+ return;
+ }
+ eventsHandler.onCameraDisconnected();
+ stopCapture();
+ }
+ }
+
+ @Override
+ public void onCameraClosed(CameraSession session) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession && currentSession != null) {
+ Logging.d(TAG, "onCameraClosed from another session.");
+ return;
+ }
+ eventsHandler.onCameraClosed();
+ }
+ }
+
+ @Override
+ public void onFrameCaptured(CameraSession session, VideoFrame frame) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onFrameCaptured from another session.");
+ return;
+ }
+ if (!firstFrameObserved) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameObserved = true;
+ }
+ cameraStatistics.addFrame();
+ capturerObserver.onFrameCaptured(frame);
+ }
+ }
+ };
+
+ private final Runnable openCameraTimeoutRunnable = new Runnable() {
+ @Override
+ public void run() {
+ eventsHandler.onCameraError("Camera failed to start within timeout.");
+ }
+ };
+
+ // Initialized on initialize
+ // -------------------------
+ private Handler cameraThreadHandler;
+ private Context applicationContext;
+ private org.webrtc.CapturerObserver capturerObserver;
+ private SurfaceTextureHelper surfaceHelper;
+
+ private final Object stateLock = new Object();
+ private boolean sessionOpening; /* guarded by stateLock */
+ @Nullable private CameraSession currentSession; /* guarded by stateLock */
+ private String cameraName; /* guarded by stateLock */
+ private String pendingCameraName; /* guarded by stateLock */
+ private int width; /* guarded by stateLock */
+ private int height; /* guarded by stateLock */
+ private int framerate; /* guarded by stateLock */
+ private int openAttemptsRemaining; /* guarded by stateLock */
+ private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
+ @Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
+ // Valid from onDone call until stopCapture, otherwise null.
+ @Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
+ private boolean firstFrameObserved; /* guarded by stateLock */
+
+ public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
+ CameraEnumerator cameraEnumerator) {
+ if (eventsHandler == null) {
+ eventsHandler = new CameraEventsHandler() {
+ @Override
+ public void onCameraError(String errorDescription) {}
+ @Override
+ public void onCameraDisconnected() {}
+ @Override
+ public void onCameraFreezed(String errorDescription) {}
+ @Override
+ public void onCameraOpening(String cameraName) {}
+ @Override
+ public void onFirstFrameAvailable() {}
+ @Override
+ public void onCameraClosed() {}
+ };
+ }
+
+ this.eventsHandler = eventsHandler;
+ this.cameraEnumerator = cameraEnumerator;
+ this.cameraName = cameraName;
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+ uiThreadHandler = new Handler(Looper.getMainLooper());
+
+ if (deviceNames.isEmpty()) {
+ throw new RuntimeException("No cameras attached.");
+ }
+ if (!deviceNames.contains(this.cameraName)) {
+ throw new IllegalArgumentException(
+ "Camera name " + this.cameraName + " does not match any known camera device.");
+ }
+ }
+
+ @Override
+ public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ org.webrtc.CapturerObserver capturerObserver) {
+ this.applicationContext = applicationContext;
+ this.capturerObserver = capturerObserver;
+ this.surfaceHelper = surfaceTextureHelper;
+ this.cameraThreadHandler = surfaceTextureHelper.getHandler();
+ }
+
+ @Override
+ public void startCapture(int width, int height, int framerate) {
+ Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
+ if (applicationContext == null) {
+ throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
+ }
+
+ synchronized (stateLock) {
+ if (sessionOpening || currentSession != null) {
+ Logging.w(TAG, "Session already open");
+ return;
+ }
+
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+
+ sessionOpening = true;
+ openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
+ createSessionInternal(0);
+ }
+ }
+
+ private void createSessionInternal(int delayMs) {
+ uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
+ cameraThreadHandler.postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
+ surfaceHelper, cameraName, width, height, framerate);
+ }
+ }, delayMs);
+ }
+
+ @Override
+ public void stopCapture() {
+ Logging.d(TAG, "Stop capture");
+
+ synchronized (stateLock) {
+ while (sessionOpening) {
+ Logging.d(TAG, "Stop capture: Waiting for session to open");
+ try {
+ stateLock.wait();
+ } catch (InterruptedException e) {
+ Logging.w(TAG, "Stop capture interrupted while waiting for the session to open.");
+ Thread.currentThread().interrupt();
+ return;
+ }
+ }
+
+ if (currentSession != null) {
+ Logging.d(TAG, "Stop capture: Nulling session");
+ cameraStatistics.release();
+ cameraStatistics = null;
+ final CameraSession oldSession = currentSession;
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ oldSession.stop();
+ }
+ });
+ currentSession = null;
+ capturerObserver.onCapturerStopped();
+ } else {
+ Logging.d(TAG, "Stop capture: No session open");
+ }
+ }
+
+ Logging.d(TAG, "Stop capture done");
+ }
+
+ @Override
+ public void changeCaptureFormat(int width, int height, int framerate) {
+ Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
+ synchronized (stateLock) {
+ stopCapture();
+ startCapture(width, height, framerate);
+ }
+ }
+
+ @Override
+ public void dispose() {
+ Logging.d(TAG, "dispose");
+ stopCapture();
+ }
+
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
+ Logging.d(TAG, "switchCamera");
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+
+ if (deviceNames.size() < 2) {
+ reportCameraSwitchError("No camera to switch to.", switchEventsHandler);
+ return;
+ }
+
+ int cameraNameIndex = deviceNames.indexOf(cameraName);
+ String cameraName = deviceNames.get((cameraNameIndex + 1) % deviceNames.size());
+ switchCameraInternal(switchEventsHandler, cameraName);
+ }
+ });
+ }
+
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler, final String cameraName) {
+ Logging.d(TAG, "switchCamera");
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ switchCameraInternal(switchEventsHandler, cameraName);
+ }
+ });
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return false;
+ }
+
+ public void printStackTrace() {
+ Thread cameraThread = null;
+ if (cameraThreadHandler != null) {
+ cameraThread = cameraThreadHandler.getLooper().getThread();
+ }
+ if (cameraThread != null) {
+ StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
+ if (cameraStackTrace.length > 0) {
+ Logging.d(TAG, "CameraCapturer stack trace:");
+ for (StackTraceElement traceElem : cameraStackTrace) {
+ Logging.d(TAG, traceElem.toString());
+ }
+ }
+ }
+ }
+
+ private void reportCameraSwitchError(
+ String error, @Nullable CameraSwitchHandler switchEventsHandler) {
+ Logging.e(TAG, error);
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(error);
+ }
+ }
+
+ private void switchCameraInternal(
+ @Nullable final CameraSwitchHandler switchEventsHandler, final String selectedCameraName) {
+ Logging.d(TAG, "switchCamera internal");
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+
+ if (!deviceNames.contains(selectedCameraName)) {
+ reportCameraSwitchError("Attempted to switch to unknown camera device " + selectedCameraName,
+ switchEventsHandler);
+ return;
+ }
+
+ synchronized (stateLock) {
+ if (switchState != SwitchState.IDLE) {
+ reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
+ return;
+ }
+ if (!sessionOpening && currentSession == null) {
+ reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
+ return;
+ }
+
+ this.switchEventsHandler = switchEventsHandler;
+ if (sessionOpening) {
+ switchState = SwitchState.PENDING;
+ pendingCameraName = selectedCameraName;
+ return;
+ } else {
+ switchState = SwitchState.IN_PROGRESS;
+ }
+
+ Logging.d(TAG, "switchCamera: Stopping session");
+ cameraStatistics.release();
+ cameraStatistics = null;
+ final CameraSession oldSession = currentSession;
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ oldSession.stop();
+ }
+ });
+ currentSession = null;
+
+ cameraName = selectedCameraName;
+
+ sessionOpening = true;
+ openAttemptsRemaining = 1;
+ createSessionInternal(0);
+ }
+ Logging.d(TAG, "switchCamera done");
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ Logging.e(TAG, "Check is on camera thread failed.");
+ throw new RuntimeException("Not on camera thread.");
+ }
+ }
+
+ protected String getCameraName() {
+ synchronized (stateLock) {
+ return cameraName;
+ }
+ }
+
+ abstract protected void createCameraSession(
+ CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
+ Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
+ int width, int height, int framerate);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java
new file mode 100644
index 0000000000..8d137854d8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.Matrix;
+import android.view.WindowManager;
+import android.view.Surface;
+
+interface CameraSession {
+ enum FailureType { ERROR, DISCONNECTED }
+
+ // Callbacks are fired on the camera thread.
+ interface CreateSessionCallback {
+ void onDone(CameraSession session);
+ void onFailure(FailureType failureType, String error);
+ }
+
+ // Events are fired on the camera thread.
+ interface Events {
+ void onCameraOpening();
+ void onCameraError(CameraSession session, String error);
+ void onCameraDisconnected(CameraSession session);
+ void onCameraClosed(CameraSession session);
+ void onFrameCaptured(CameraSession session, VideoFrame frame);
+ }
+
+ /**
+ * Stops the capture. Waits until no more calls to capture observer will be made.
+ * If waitCameraStop is true, also waits for the camera to stop.
+ */
+ void stop();
+
+ static int getDeviceOrientation(Context context) {
+ final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+ switch (wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ return 90;
+ case Surface.ROTATION_180:
+ return 180;
+ case Surface.ROTATION_270:
+ return 270;
+ case Surface.ROTATION_0:
+ default:
+ return 0;
+ }
+ }
+
+ static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
+ TextureBufferImpl buffer, boolean mirror, int rotation) {
+ final Matrix transformMatrix = new Matrix();
+ // Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture.
+ transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f);
+ if (mirror) {
+ transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f);
+ }
+ transformMatrix.preRotate(rotation);
+ transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f);
+
+ // The width and height are not affected by rotation since Camera2Session has set them to the
+ // value they should be after undoing the rotation.
+ return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
new file mode 100644
index 0000000000..96a15bbfe1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
+ * bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
+ * target bitrate by unacceptable margins.
+ */
+class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
+ // Change the bitrate at most once every three seconds.
+ private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
+ // Maximum bitrate adjustment scale - no more than 4 times.
+ private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
+ // Amount of adjustment steps to reach maximum scale.
+ private static final int BITRATE_ADJUSTMENT_STEPS = 20;
+
+ private static final double BITS_PER_BYTE = 8.0;
+
+ // How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
+ private double deviationBytes;
+ private double timeSinceLastAdjustmentMs;
+ private int bitrateAdjustmentScaleExp;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
+ // Rescale the accumulator level if the accumulator max decreases
+ deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
+ }
+ super.setTargets(targetBitrateBps, targetFramerateFps);
+ }
+
+ @Override
+ public void reportEncodedFrame(int size) {
+ if (targetFramerateFps == 0) {
+ return;
+ }
+
+ // Accumulate the difference between actual and expected frame sizes.
+ double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFramerateFps;
+ deviationBytes += (size - expectedBytesPerFrame);
+ timeSinceLastAdjustmentMs += 1000.0 / targetFramerateFps;
+
+ // Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
+ // shortfall of the target.
+ double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
+
+ // Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
+ // bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
+ double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
+ deviationBytes = Math.min(deviationBytes, deviationCap);
+ deviationBytes = Math.max(deviationBytes, -deviationCap);
+
+ // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
+ // from the target value.
+ if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
+ return;
+ }
+
+ if (deviationBytes > deviationThresholdBytes) {
+ // Encoder generates too high bitrate - need to reduce the scale.
+ int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
+ bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
+ // Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
+ // This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
+ bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
+ deviationBytes = deviationThresholdBytes;
+ } else if (deviationBytes < -deviationThresholdBytes) {
+ // Encoder generates too low bitrate - need to increase the scale.
+ int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
+ bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
+ // Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
+ // This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
+ bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
+ deviationBytes = -deviationThresholdBytes;
+ }
+ timeSinceLastAdjustmentMs = 0;
+ }
+
+ private double getBitrateAdjustmentScale() {
+ return Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
+ (double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS);
+ }
+
+ @Override
+ public int getAdjustedBitrateBps() {
+ return (int) (targetBitrateBps * getBitrateAdjustmentScale());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java
new file mode 100644
index 0000000000..254a17c750
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java
@@ -0,0 +1,365 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.GLException;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import androidx.annotation.Nullable;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+class EglBase10Impl implements EglBase10 {
+ private static final String TAG = "EglBase10Impl";
+ // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ private final EGL10 egl;
+ private EGLContext eglContext;
+ @Nullable private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+ // EGL wrapper for an actual EGLContext.
+ private static class Context implements EglBase10.Context {
+ private final EGL10 egl;
+ private final EGLContext eglContext;
+ private final EGLConfig eglContextConfig;
+
+ @Override
+ public EGLContext getRawContext() {
+ return eglContext;
+ }
+
+ @Override
+ public long getNativeEglContext() {
+ EGLContext previousContext = egl.eglGetCurrentContext();
+ EGLDisplay currentDisplay = egl.eglGetCurrentDisplay();
+ EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL10.EGL_DRAW);
+ EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL10.EGL_READ);
+ EGLSurface tempEglSurface = null;
+
+ if (currentDisplay == EGL10.EGL_NO_DISPLAY) {
+ currentDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ }
+
+ try {
+ if (previousContext != eglContext) {
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, 1, EGL10.EGL_HEIGHT, 1, EGL10.EGL_NONE};
+ tempEglSurface =
+ egl.eglCreatePbufferSurface(currentDisplay, eglContextConfig, surfaceAttribs);
+ if (!egl.eglMakeCurrent(currentDisplay, tempEglSurface, tempEglSurface, eglContext)) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to make temporary EGL surface active: " + egl.eglGetError());
+ }
+ }
+
+ return nativeGetCurrentNativeEGLContext();
+ } finally {
+ if (tempEglSurface != null) {
+ egl.eglMakeCurrent(
+ currentDisplay, previousDrawSurface, previousReadSurface, previousContext);
+ egl.eglDestroySurface(currentDisplay, tempEglSurface);
+ }
+ }
+ }
+
+ public Context(EGL10 egl, EGLContext eglContext, EGLConfig eglContextConfig) {
+ this.egl = egl;
+ this.eglContext = eglContext;
+ this.eglContextConfig = eglContextConfig;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) {
+ this.egl = (EGL10) EGLContext.getEGL();
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(egl, eglDisplay, configAttributes);
+ final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
+ Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
+ }
+
+ @Override
+ public void createSurface(Surface surface) {
+ /**
+ * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+ * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+ * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+ */
+ class FakeSurfaceHolder implements SurfaceHolder {
+ private final Surface surface;
+
+ FakeSurfaceHolder(Surface surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ public void addCallback(Callback callback) {}
+
+ @Override
+ public void removeCallback(Callback callback) {}
+
+ @Override
+ public boolean isCreating() {
+ return false;
+ }
+
+ @Deprecated
+ @Override
+ public void setType(int i) {}
+
+ @Override
+ public void setFixedSize(int i, int i2) {}
+
+ @Override
+ public void setSizeFromLayout() {}
+
+ @Override
+ public void setFormat(int i) {}
+
+ @Override
+ public void setKeepScreenOn(boolean b) {}
+
+ @Nullable
+ @Override
+ public Canvas lockCanvas() {
+ return null;
+ }
+
+ @Nullable
+ @Override
+ public Canvas lockCanvas(Rect rect) {
+ return null;
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas) {}
+
+ @Nullable
+ @Override
+ public Rect getSurfaceFrame() {
+ return null;
+ }
+
+ @Override
+ public Surface getSurface() {
+ return surface;
+ }
+ }
+
+ createSurfaceInternal(new FakeSurfaceHolder(surface));
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+
+ @Override
+ public org.webrtc.EglBase.Context getEglBaseContext() {
+ return new Context(egl, eglContext, eglConfig);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ egl.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new GLException(egl.eglGetError(),
+ "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new GLException(egl.eglGetError(),
+ "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ egl.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ @Override
+ public void swapBuffers(long timeStampNs) {
+ // Setting presentation time is not supported for EGL 1.0.
+ swapBuffers();
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new GLException(egl.eglGetError(),
+ "Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new GLException(egl.eglGetError(),
+ "Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGL10 egl, EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new GLException(
+ egl.eglGetError(), "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLContext createEglContext(@Nullable EGLContext sharedContext, EGLDisplay eglDisplay,
+ EGLConfig eglConfig, int openGlesVersion) {
+ if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE};
+ EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+ }
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ return eglContext;
+ }
+
+ private static native long nativeGetCurrentNativeEGLContext();
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java
new file mode 100644
index 0000000000..caf45b091e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java
@@ -0,0 +1,271 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.opengl.GLException;
+import android.os.Build;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
+class EglBase14Impl implements EglBase14 {
+ private static final String TAG = "EglBase14Impl";
+ private EGLContext eglContext;
+ @Nullable private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
+ public static class Context implements EglBase14.Context {
+ private final EGLContext egl14Context;
+
+ @Override
+ public EGLContext getRawContext() {
+ return egl14Context;
+ }
+
+ @Override
+ public long getNativeEglContext() {
+ return egl14Context.getNativeHandle();
+ }
+
+ public Context(android.opengl.EGLContext eglContext) {
+ this.egl14Context = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ // `sharedContext` may be null.
+ public EglBase14Impl(EGLContext sharedContext, int[] configAttributes) {
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
+ Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
+ }
+
+ // Create EGLSurface from the Android Surface.
+ @Override
+ public void createSurface(Surface surface) {
+ createSurfaceInternal(surface);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either Surface or SurfaceTexture.
+ private void createSurfaceInternal(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+
+ @Override
+ public Context getEglBaseContext() {
+ return new Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL14.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ synchronized (EglBase.lock) {
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ }
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ @Override
+ public void swapBuffers(long timeStampNs) {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ // See
+ // https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+ EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new GLException(EGL14.eglGetError(),
+ "Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new GLException(EGL14.eglGetError(),
+ "Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(
+ eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(@Nullable EGLContext sharedContext,
+ EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) {
+ if (sharedContext != null && sharedContext == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL14.EGL_NONE};
+ EGLContext rootContext = sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+ }
+ if (eglContext == EGL14.EGL_NO_CONTEXT) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ return eglContext;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java
new file mode 100644
index 0000000000..fe9481e182
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Empty class for use in libjingle_peerconnection_java because all targets require at least one
+ * Java file.
+ */
+class Empty {}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java
new file mode 100644
index 0000000000..e28b7b5a26
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with
+ * hardware codecs that assume the framerate never changes.
+ */
+class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
+ private static final int DEFAULT_FRAMERATE_FPS = 30;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ // Keep frame rate unchanged and adjust bit rate.
+ this.targetFramerateFps = DEFAULT_FRAMERATE_FPS;
+ this.targetBitrateBps = (int) (targetBitrateBps * DEFAULT_FRAMERATE_FPS / targetFramerateFps);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java
new file mode 100644
index 0000000000..34144e2f75
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java
@@ -0,0 +1,281 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import androidx.annotation.Nullable;
+import java.nio.FloatBuffer;
+
+/**
+ * Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input
+ * sources (OES, RGB, or YUV) using a generic fragment shader as input. The generic fragment shader
+ * should sample pixel values from the function "sample" that will be provided by this class and
+ * provides an abstraction for the input source type (OES, RGB, or YUV). The texture coordinate
+ * variable name will be "tc" and the texture matrix in the vertex shader will be "tex_mat". The
+ * simplest possible generic shader that just draws pixel from the frame unmodified looks like:
+ * void main() {
+ * gl_FragColor = sample(tc);
+ * }
+ * This class covers the cases for most simple shaders and generates the necessary boiler plate.
+ * Advanced shaders can always implement RendererCommon.GlDrawer directly.
+ */
+class GlGenericDrawer implements RendererCommon.GlDrawer {
+ /**
+ * The different shader types representing different input sources. YUV here represents three
+ * separate Y, U, V textures.
+ */
+ public static enum ShaderType { OES, RGB, YUV }
+
+ /**
+ * The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set
+ * uniform variables in the shader before a frame is drawn.
+ */
+ public static interface ShaderCallbacks {
+ /**
+ * This callback is called when a new shader has been compiled and created. It will be called
+ * for the first frame as well as when the shader type is changed. This callback can be used to
+ * do custom initialization of the shader that only needs to happen once.
+ */
+ void onNewShader(GlShader shader);
+
+ /**
+ * This callback is called before rendering a frame. It can be used to do custom preparation of
+ * the shader that needs to happen every frame.
+ */
+ void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight);
+ }
+
+ private static final String INPUT_VERTEX_COORDINATE_NAME = "in_pos";
+ private static final String INPUT_TEXTURE_COORDINATE_NAME = "in_tc";
+ private static final String TEXTURE_MATRIX_NAME = "tex_mat";
+ private static final String DEFAULT_VERTEX_SHADER_STRING = "varying vec2 tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "uniform mat4 tex_mat;\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " tc = (tex_mat * in_tc).xy;\n"
+ + "}\n";
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1)
+ // is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) {
+ final StringBuilder stringBuilder = new StringBuilder();
+ if (shaderType == ShaderType.OES) {
+ stringBuilder.append("#extension GL_OES_EGL_image_external : require\n");
+ }
+ stringBuilder.append("precision mediump float;\n");
+ stringBuilder.append("varying vec2 tc;\n");
+
+ if (shaderType == ShaderType.YUV) {
+ stringBuilder.append("uniform sampler2D y_tex;\n");
+ stringBuilder.append("uniform sampler2D u_tex;\n");
+ stringBuilder.append("uniform sampler2D v_tex;\n");
+
+ // Add separate function for sampling texture.
+ // yuv_to_rgb_mat is inverse of the matrix defined in YuvConverter.
+ stringBuilder.append("vec4 sample(vec2 p) {\n");
+ stringBuilder.append(" float y = texture2D(y_tex, p).r * 1.16438;\n");
+ stringBuilder.append(" float u = texture2D(u_tex, p).r;\n");
+ stringBuilder.append(" float v = texture2D(v_tex, p).r;\n");
+ stringBuilder.append(" return vec4(y + 1.59603 * v - 0.874202,\n");
+ stringBuilder.append(" y - 0.391762 * u - 0.812968 * v + 0.531668,\n");
+ stringBuilder.append(" y + 2.01723 * u - 1.08563, 1);\n");
+ stringBuilder.append("}\n");
+ stringBuilder.append(genericFragmentSource);
+ } else {
+ final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D";
+ stringBuilder.append("uniform ").append(samplerName).append(" tex;\n");
+
+ // Update the sampling function in-place.
+ stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, "));
+ }
+
+ return stringBuilder.toString();
+ }
+
+ private final String genericFragmentSource;
+ private final String vertexShader;
+ private final ShaderCallbacks shaderCallbacks;
+ @Nullable private ShaderType currentShaderType;
+ @Nullable private GlShader currentShader;
+ private int inPosLocation;
+ private int inTcLocation;
+ private int texMatrixLocation;
+
+ public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks);
+ }
+
+ public GlGenericDrawer(
+ String vertexShader, String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ this.vertexShader = vertexShader;
+ this.genericFragmentSource = genericFragmentSource;
+ this.shaderCallbacks = shaderCallbacks;
+ }
+
+ // Visible for testing.
+ GlShader createShader(ShaderType shaderType) {
+ return new GlShader(
+ vertexShader, createFragmentShaderString(genericFragmentSource, shaderType));
+ }
+
+ /**
+ * Draw an OES texture frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ @Override
+ public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
+ // Draw the texture.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+
+ /**
+ * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
+ * are allocated at the first call to this function.
+ */
+ @Override
+ public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ // Draw the texture.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ /**
+ * Draw a YUV frame with specified texture transformation matrix. Required resources are allocated
+ * at the first call to this function.
+ */
+ @Override
+ public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ }
+ // Draw the textures.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the textures as a precaution.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+ }
+
+ private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth,
+ int frameHeight, int viewportWidth, int viewportHeight) {
+ final GlShader shader;
+ if (shaderType.equals(currentShaderType)) {
+ // Same shader type as before, reuse exising shader.
+ shader = currentShader;
+ } else {
+ // Allocate new shader.
+ currentShaderType = null;
+ if (currentShader != null) {
+ currentShader.release();
+ currentShader = null;
+ }
+
+ shader = createShader(shaderType);
+ currentShaderType = shaderType;
+ currentShader = shader;
+
+ shader.useProgram();
+ // Set input texture units.
+ if (shaderType == ShaderType.YUV) {
+ GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
+ } else {
+ GLES20.glUniform1i(shader.getUniformLocation("tex"), 0);
+ }
+
+ GlUtil.checkNoGLES2Error("Create shader");
+ shaderCallbacks.onNewShader(shader);
+ texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME);
+ inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME);
+ inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME);
+ }
+
+ shader.useProgram();
+
+ // Upload the vertex coordinates.
+ GLES20.glEnableVertexAttribArray(inPosLocation);
+ GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2,
+ /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
+ FULL_RECTANGLE_BUFFER);
+
+ // Upload the texture coordinates.
+ GLES20.glEnableVertexAttribArray(inTcLocation);
+ GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2,
+ /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
+ FULL_RECTANGLE_TEXTURE_BUFFER);
+
+ // Upload the texture transformation matrix.
+ GLES20.glUniformMatrix4fv(
+ texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */);
+
+ // Do custom per-frame shader preparation.
+ shaderCallbacks.onPrepareShader(
+ shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ GlUtil.checkNoGLES2Error("Prepare shader");
+ }
+
+ /**
+ * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
+ */
+ @Override
+ public void release() {
+ if (currentShader != null) {
+ currentShader.release();
+ currentShader = null;
+ currentShaderType = null;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java
new file mode 100644
index 0000000000..abb79c6582
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Map;
+import java.util.HashMap;
+
+/** Container for static helper functions related to dealing with H264 codecs. */
+class H264Utils {
+ public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
+ public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
+ public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
+
+ public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0";
+ public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
+ public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
+ public static final String H264_CONSTRAINED_HIGH_3_1 =
+ H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
+ public static final String H264_CONSTRAINED_BASELINE_3_1 =
+ H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
+
+ public static Map<String, String> getDefaultH264Params(boolean isHighProfile) {
+ final Map<String, String> params = new HashMap<>();
+ params.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1");
+ params.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1");
+ params.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID,
+ isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1
+ : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
+ return params;
+ }
+
+ public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC =
+ new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false));
+ public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC =
+ new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true));
+
+ public static boolean isSameH264Profile(
+ Map<String, String> params1, Map<String, String> params2) {
+ return nativeIsSameH264Profile(params1, params2);
+ }
+
+ private static native boolean nativeIsSameH264Profile(
+ Map<String, String> params1, Map<String, String> params2);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
new file mode 100644
index 0000000000..42a3ccfbfd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
@@ -0,0 +1,763 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Bundle;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.ThreadUtils.ThreadChecker;
+
+/**
+ * Android hardware video encoder.
+ */
+class HardwareVideoEncoder implements VideoEncoder {
+ private static final String TAG = "HardwareVideoEncoder";
+
+ // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
+ // in OMX_Video.h
+ private static final int VIDEO_ControlRateConstant = 2;
+ // Key associated with the bitrate control mode value (above). Not present as a MediaFormat
+ // constant until API level 21.
+ private static final String KEY_BITRATE_MODE = "bitrate-mode";
+
+ private static final int VIDEO_AVC_PROFILE_HIGH = 8;
+ private static final int VIDEO_AVC_LEVEL_3 = 0x100;
+
+ private static final int MAX_VIDEO_FRAMERATE = 30;
+
+ // See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc.
+ private static final int MAX_ENCODER_Q_SIZE = 2;
+
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+ private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
+
+ // Size of the input frames should be multiple of 16 for the H/W encoder.
+ private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16;
+
+ /**
+ * Keeps track of the number of output buffers that have been passed down the pipeline and not yet
+ * released. We need to wait for this to go down to zero before operations invalidating the output
+ * buffers, i.e., stop() and getOutputBuffer().
+ */
+ private static class BusyCount {
+ private final Object countLock = new Object();
+ private int count;
+
+ public void increment() {
+ synchronized (countLock) {
+ count++;
+ }
+ }
+
+ // This method may be called on an arbitrary thread.
+ public void decrement() {
+ synchronized (countLock) {
+ count--;
+ if (count == 0) {
+ countLock.notifyAll();
+ }
+ }
+ }
+
+ // The increment and waitForZero methods are called on the same thread (deliverEncodedImage,
+ // running on the output thread). Hence, after waitForZero returns, the count will stay zero
+ // until the same thread calls increment.
+ public void waitForZero() {
+ boolean wasInterrupted = false;
+ synchronized (countLock) {
+ while (count > 0) {
+ try {
+ countLock.wait();
+ } catch (InterruptedException e) {
+ Logging.e(TAG, "Interrupted while waiting on busy count", e);
+ wasInterrupted = true;
+ }
+ }
+ }
+
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ }
+ }
+ // --- Initialized on construction.
+ private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
+ private final String codecName;
+ private final VideoCodecMimeType codecType;
+ private final Integer surfaceColorFormat;
+ private final Integer yuvColorFormat;
+ private final YuvFormat yuvFormat;
+ private final Map<String, String> params;
+ private final int keyFrameIntervalSec; // Base interval for generating key frames.
+ // Interval at which to force a key frame. Used to reduce color distortions caused by some
+ // Qualcomm video encoders.
+ private final long forcedKeyFrameNs;
+ private final BitrateAdjuster bitrateAdjuster;
+ // EGL context shared with the application. Used to access texture inputs.
+ private final EglBase14.Context sharedContext;
+
+ // Drawer used to draw input textures onto the codec's input surface.
+ private final GlRectDrawer textureDrawer = new GlRectDrawer();
+ private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
+ // A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
+ // pre-populated with all the information that can't be sent through MediaCodec.
+ private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>();
+
+ private final ThreadChecker encodeThreadChecker = new ThreadChecker();
+ private final ThreadChecker outputThreadChecker = new ThreadChecker();
+ private final BusyCount outputBuffersBusyCount = new BusyCount();
+
+ // --- Set on initialize and immutable until release.
+ private Callback callback;
+ private boolean automaticResizeOn;
+
+ // --- Valid and immutable while an encoding session is running.
+ @Nullable private MediaCodecWrapper codec;
+ // Thread that delivers encoded frames to the user callback.
+ @Nullable private Thread outputThread;
+
+ // EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
+ // input surface. Making this base current allows textures from the context to be drawn onto the
+ // surface.
+ @Nullable private EglBase14 textureEglBase;
+ // Input surface for the codec. The encoder will draw input textures onto this surface.
+ @Nullable private Surface textureInputSurface;
+
+ private int width;
+ private int height;
+ // Y-plane strides in the encoder's input
+ private int stride;
+ // Y-plane slice-height in the encoder's input
+ private int sliceHeight;
+ private boolean useSurfaceMode;
+
+ // --- Only accessed from the encoding thread.
+ // Presentation timestamp of next frame to encode.
+ private long nextPresentationTimestampUs;
+ // Presentation timestamp of the last requested (or forced) key frame.
+ private long lastKeyFrameNs;
+
+ // --- Only accessed on the output thread.
+ // Contents of the last observed config frame output by the MediaCodec. Used by H.264.
+ @Nullable private ByteBuffer configBuffer;
+ private int adjustedBitrate;
+
+ // Whether the encoder is running. Volatile so that the output thread can watch this value and
+ // exit when the encoder stops.
+ private volatile boolean running;
+ // Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
+ // value to send exceptions thrown during release back to the encoder thread.
+ @Nullable private volatile Exception shutdownException;
+
+ /**
+ * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
+ * intervals, and bitrateAdjuster.
+ *
+ * @param codecName the hardware codec implementation to use
+ * @param codecType the type of the given video codec (eg. VP8, VP9, H264 or AV1)
+ * @param surfaceColorFormat color format for surface mode or null if not available
+ * @param yuvColorFormat color format for bytebuffer mode
+ * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
+ * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
+ * used to reduce distortion caused by some codec implementations
+ * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
+ * desired bitrates
+ * @throws IllegalArgumentException if colorFormat is unsupported
+ */
+ public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
+ Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
+ BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
+ this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
+ this.codecName = codecName;
+ this.codecType = codecType;
+ this.surfaceColorFormat = surfaceColorFormat;
+ this.yuvColorFormat = yuvColorFormat;
+ this.yuvFormat = YuvFormat.valueOf(yuvColorFormat);
+ this.params = params;
+ this.keyFrameIntervalSec = keyFrameIntervalSec;
+ this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
+ this.bitrateAdjuster = bitrateAdjuster;
+ this.sharedContext = sharedContext;
+
+ // Allow construction on a different thread.
+ encodeThreadChecker.detachThread();
+ }
+
+ @Override
+ public VideoCodecStatus initEncode(Settings settings, Callback callback) {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ this.callback = callback;
+ automaticResizeOn = settings.automaticResizeOn;
+
+ if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0
+ || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) {
+ Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned.");
+ return VideoCodecStatus.ERR_SIZE;
+ }
+ this.width = settings.width;
+ this.height = settings.height;
+ useSurfaceMode = canUseSurface();
+
+ if (settings.startBitrate != 0 && settings.maxFramerate != 0) {
+ bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate);
+ }
+ adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
+
+ Logging.d(TAG,
+ "initEncode: " + width + " x " + height + ". @ " + settings.startBitrate
+ + "kbps. Fps: " + settings.maxFramerate + " Use surface mode: " + useSurfaceMode);
+ return initEncodeInternal();
+ }
+
+ private VideoCodecStatus initEncodeInternal() {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ nextPresentationTimestampUs = 0;
+ lastKeyFrameNs = -1;
+
+ try {
+ codec = mediaCodecWrapperFactory.createByCodecName(codecName);
+ } catch (IOException | IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot create media encoder " + codecName);
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat;
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
+ format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ format.setFloat(
+ MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps());
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
+ if (codecType == VideoCodecMimeType.H264) {
+ String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
+ if (profileLevelId == null) {
+ profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1;
+ }
+ switch (profileLevelId) {
+ case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1:
+ format.setInteger("profile", VIDEO_AVC_PROFILE_HIGH);
+ format.setInteger("level", VIDEO_AVC_LEVEL_3);
+ break;
+ case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1:
+ break;
+ default:
+ Logging.w(TAG, "Unknown profile level id: " + profileLevelId);
+ }
+ }
+ Logging.d(TAG, "Format: " + format);
+ codec.configure(
+ format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (useSurfaceMode) {
+ textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE);
+ textureInputSurface = codec.createInputSurface();
+ textureEglBase.createSurface(textureInputSurface);
+ textureEglBase.makeCurrent();
+ }
+
+ MediaFormat inputFormat = codec.getInputFormat();
+ stride = getStride(inputFormat, width);
+ sliceHeight = getSliceHeight(inputFormat, height);
+
+ codec.start();
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initEncodeInternal failed", e);
+ release();
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ running = true;
+ outputThreadChecker.detachThread();
+ outputThread = createOutputThread();
+ outputThread.start();
+
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ final VideoCodecStatus returnValue;
+ if (outputThread == null) {
+ returnValue = VideoCodecStatus.OK;
+ } else {
+ // The outputThread actually stops and releases the codec once running is false.
+ running = false;
+ if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media encoder release timeout");
+ returnValue = VideoCodecStatus.TIMEOUT;
+ } else if (shutdownException != null) {
+ // Log the exception and turn it into an error.
+ Logging.e(TAG, "Media encoder release exception", shutdownException);
+ returnValue = VideoCodecStatus.ERROR;
+ } else {
+ returnValue = VideoCodecStatus.OK;
+ }
+ }
+
+ textureDrawer.release();
+ videoFrameDrawer.release();
+ if (textureEglBase != null) {
+ textureEglBase.release();
+ textureEglBase = null;
+ }
+ if (textureInputSurface != null) {
+ textureInputSurface.release();
+ textureInputSurface = null;
+ }
+ outputBuilders.clear();
+
+ codec = null;
+ outputThread = null;
+
+ // Allow changing thread after release.
+ encodeThreadChecker.detachThread();
+
+ return returnValue;
+ }
+
+ @Override
+ public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (codec == null) {
+ return VideoCodecStatus.UNINITIALIZED;
+ }
+
+ final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
+ final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer;
+
+ // If input resolution changed, restart the codec with the new resolution.
+ final int frameWidth = videoFrame.getBuffer().getWidth();
+ final int frameHeight = videoFrame.getBuffer().getHeight();
+ final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer;
+ if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) {
+ VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ }
+
+ if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
+ // Too many frames in the encoder. Drop this frame.
+ Logging.e(TAG, "Dropped frame, encoder queue full");
+ return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
+ }
+
+ boolean requestedKeyFrame = false;
+ for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
+ if (frameType == EncodedImage.FrameType.VideoFrameKey) {
+ requestedKeyFrame = true;
+ }
+ }
+
+ if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
+ requestKeyFrame(videoFrame.getTimestampNs());
+ }
+
+ // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
+ // subsampled at one byte per four pixels.
+ int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
+ EncodedImage.Builder builder = EncodedImage.builder()
+ .setCaptureTimeNs(videoFrame.getTimestampNs())
+ .setEncodedWidth(videoFrame.getBuffer().getWidth())
+ .setEncodedHeight(videoFrame.getBuffer().getHeight())
+ .setRotation(videoFrame.getRotation());
+ outputBuilders.offer(builder);
+
+ long presentationTimestampUs = nextPresentationTimestampUs;
+ // Round frame duration down to avoid bitrate overshoot.
+ long frameDurationUs =
+ (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps());
+ nextPresentationTimestampUs += frameDurationUs;
+
+ final VideoCodecStatus returnValue;
+ if (useSurfaceMode) {
+ returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs);
+ } else {
+ returnValue =
+ encodeByteBuffer(videoFrame, presentationTimestampUs, videoFrameBuffer, bufferSize);
+ }
+
+ // Check if the queue was successful.
+ if (returnValue != VideoCodecStatus.OK) {
+ // Keep the output builders in sync with buffers in the codec.
+ outputBuilders.pollLast();
+ }
+
+ return returnValue;
+ }
+
+ private VideoCodecStatus encodeTextureBuffer(
+ VideoFrame videoFrame, long presentationTimestampUs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ try {
+ // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+ // but it's a workaround for bug webrtc:5147.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // It is not necessary to release this frame because it doesn't own the buffer.
+ VideoFrame derotatedFrame =
+ new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
+ videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
+ textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "encodeTexture failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs,
+ VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
+ encodeThreadChecker.checkIsOnValidThread();
+ // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
+ int index;
+ try {
+ index = codec.dequeueInputBuffer(0 /* timeout */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueInputBuffer failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (index == -1) {
+ // Encoder is falling behind. No input buffers available. Drop the frame.
+ Logging.d(TAG, "Dropped frame, no input buffers available");
+ return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
+ }
+
+ ByteBuffer buffer;
+ try {
+ buffer = codec.getInputBuffer(index);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ fillInputBuffer(buffer, videoFrameBuffer);
+
+ try {
+ codec.queueInputBuffer(
+ index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "queueInputBuffer failed", e);
+ // IllegalStateException thrown when the codec is in the wrong state.
+ return VideoCodecStatus.ERROR;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (framerate > MAX_VIDEO_FRAMERATE) {
+ framerate = MAX_VIDEO_FRAMERATE;
+ }
+ bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRates(RateControlParameters rcParameters) {
+ encodeThreadChecker.checkIsOnValidThread();
+ bitrateAdjuster.setTargets(rcParameters.bitrate.getSum(), rcParameters.framerateFps);
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public ScalingSettings getScalingSettings() {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (automaticResizeOn) {
+ if (codecType == VideoCodecMimeType.VP8) {
+ final int kLowVp8QpThreshold = 29;
+ final int kHighVp8QpThreshold = 95;
+ return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold);
+ } else if (codecType == VideoCodecMimeType.H264) {
+ final int kLowH264QpThreshold = 24;
+ final int kHighH264QpThreshold = 37;
+ return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
+ }
+ }
+ return ScalingSettings.OFF;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return codecName;
+ }
+
+ @Override
+ public EncoderInfo getEncoderInfo() {
+ // Since our MediaCodec is guaranteed to encode 16-pixel-aligned frames only, we set alignment
+ // value to be 16. Additionally, this encoder produces a single stream. So it should not require
+ // alignment for all layers.
+ return new EncoderInfo(
+ /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT,
+ /* applyAlignmentToAllSimulcastLayers= */ false);
+ }
+
+ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) {
+ encodeThreadChecker.checkIsOnValidThread();
+ VideoCodecStatus status = release();
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+
+ if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0
+ || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) {
+ Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned.");
+ return VideoCodecStatus.ERR_SIZE;
+ }
+ width = newWidth;
+ height = newHeight;
+ useSurfaceMode = newUseSurfaceMode;
+ return initEncodeInternal();
+ }
+
+ private boolean shouldForceKeyFrame(long presentationTimestampNs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
+ }
+
+ private void requestKeyFrame(long presentationTimestampNs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+ // indicate this in queueInputBuffer() below and guarantee _this_ frame
+ // be encoded as a key frame, but sadly that flag is ignored. Instead,
+ // we request a key frame "soon".
+ try {
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ codec.setParameters(b);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "requestKeyFrame failed", e);
+ return;
+ }
+ lastKeyFrameNs = presentationTimestampNs;
+ }
+
+ private Thread createOutputThread() {
+ return new Thread() {
+ @Override
+ public void run() {
+ while (running) {
+ deliverEncodedImage();
+ }
+ releaseCodecOnOutputThread();
+ }
+ };
+ }
+
+ // Visible for testing.
+ protected void deliverEncodedImage() {
+ outputThreadChecker.checkIsOnValidThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
+ if (index < 0) {
+ if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffersBusyCount.waitForZero();
+ }
+ return;
+ }
+
+ ByteBuffer codecOutputBuffer = codec.getOutputBuffer(index);
+ codecOutputBuffer.position(info.offset);
+ codecOutputBuffer.limit(info.offset + info.size);
+
+ if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
+ configBuffer = ByteBuffer.allocateDirect(info.size);
+ configBuffer.put(codecOutputBuffer);
+ } else {
+ bitrateAdjuster.reportEncodedFrame(info.size);
+ if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
+ updateBitrate();
+ }
+
+ final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+ if (isKeyFrame) {
+ Logging.d(TAG, "Sync frame generated");
+ }
+
+ final ByteBuffer frameBuffer;
+ if (isKeyFrame && codecType == VideoCodecMimeType.H264) {
+ Logging.d(TAG,
+ "Prepending config frame of size " + configBuffer.capacity()
+ + " to output buffer with offset " + info.offset + ", size " + info.size);
+ // For H.264 key frame prepend SPS and PPS NALs at the start.
+ frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
+ configBuffer.rewind();
+ frameBuffer.put(configBuffer);
+ frameBuffer.put(codecOutputBuffer);
+ frameBuffer.rewind();
+ } else {
+ frameBuffer = codecOutputBuffer.slice();
+ }
+
+ final EncodedImage.FrameType frameType = isKeyFrame
+ ? EncodedImage.FrameType.VideoFrameKey
+ : EncodedImage.FrameType.VideoFrameDelta;
+
+ outputBuffersBusyCount.increment();
+ EncodedImage.Builder builder = outputBuilders.poll();
+ EncodedImage encodedImage = builder
+ .setBuffer(frameBuffer,
+ () -> {
+ // This callback should not throw any exceptions since
+ // it may be called on an arbitrary thread.
+ // Check bug webrtc:11230 for more details.
+ try {
+ codec.releaseOutputBuffer(index, false);
+ } catch (Exception e) {
+ Logging.e(TAG, "releaseOutputBuffer failed", e);
+ }
+ outputBuffersBusyCount.decrement();
+ })
+ .setFrameType(frameType)
+ .createEncodedImage();
+ // TODO(mellem): Set codec-specific info.
+ callback.onEncodedFrame(encodedImage, new CodecSpecificInfo());
+ // Note that the callback may have retained the image.
+ encodedImage.release();
+ }
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "deliverOutput failed", e);
+ }
+ }
+
+ private void releaseCodecOnOutputThread() {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Releasing MediaCodec on output thread");
+ outputBuffersBusyCount.waitForZero();
+ try {
+ codec.stop();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder stop failed", e);
+ }
+ try {
+ codec.release();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder release failed", e);
+ // Propagate exceptions caught during release back to the main thread.
+ shutdownException = e;
+ }
+ configBuffer = null;
+ Logging.d(TAG, "Release on output thread done");
+ }
+
+ private VideoCodecStatus updateBitrate() {
+ outputThreadChecker.checkIsOnValidThread();
+ adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
+ try {
+ Bundle params = new Bundle();
+ params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
+ codec.setParameters(params);
+ return VideoCodecStatus.OK;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "updateBitrate failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ }
+
+ private boolean canUseSurface() {
+ return sharedContext != null && surfaceColorFormat != null;
+ }
+
+ private static int getStride(MediaFormat inputFormat, int width) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null
+ && inputFormat.containsKey(MediaFormat.KEY_STRIDE)) {
+ return inputFormat.getInteger(MediaFormat.KEY_STRIDE);
+ }
+ return width;
+ }
+
+ private static int getSliceHeight(MediaFormat inputFormat, int height) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null
+ && inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
+ return inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
+ }
+ return height;
+ }
+
+ // Visible for testing.
+ protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer videoFrameBuffer) {
+ yuvFormat.fillBuffer(buffer, videoFrameBuffer, stride, sliceHeight);
+ }
+
+ /**
+ * Enumeration of supported YUV color formats used for MediaCodec's input.
+ */
+ private enum YuvFormat {
+ I420 {
+ @Override
+ void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) {
+ /*
+ * According to the docs in Android MediaCodec, the stride of the U and V planes can be
+ * calculated based on the color format, though it is generally undefined and depends on the
+ * device and release.
+ * <p/> Assuming the width and height, dstStrideY and dstSliceHeightY are
+ * even, it works fine when we define the stride and slice-height of the dst U/V plane to be
+ * half of the dst Y plane.
+ */
+ int dstStrideU = dstStrideY / 2;
+ int dstSliceHeight = dstSliceHeightY / 2;
+ VideoFrame.I420Buffer i420 = srcBuffer.toI420();
+ YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(),
+ dstStrideY, dstSliceHeightY, dstStrideU, dstSliceHeight);
+ i420.release();
+ }
+ },
+ NV12 {
+ @Override
+ void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) {
+ VideoFrame.I420Buffer i420 = srcBuffer.toI420();
+ YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(),
+ dstStrideY, dstSliceHeightY);
+ i420.release();
+ }
+ };
+
+ abstract void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY);
+
+ static YuvFormat valueOf(int colorFormat) {
+ switch (colorFormat) {
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
+ return I420;
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
+ case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ return NV12;
+ default:
+ throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java
new file mode 100644
index 0000000000..c1d2d61a71
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Class for holding the native pointer of a histogram. Since there is no way to destroy a
+ * histogram, please don't create unnecessary instances of this object. This class is thread safe.
+ *
+ * Usage example:
+ * private static final Histogram someMetricHistogram =
+ * Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50);
+ * someMetricHistogram.addSample(someVariable);
+ */
+class Histogram {
+ private final long handle;
+
+ private Histogram(long handle) {
+ this.handle = handle;
+ }
+
+ static public Histogram createCounts(String name, int min, int max, int bucketCount) {
+ return new Histogram(0);
+ }
+
+ static public Histogram createEnumeration(String name, int max) {
+ return new Histogram(0);
+ }
+
+ public void addSample(int sample) {
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java
new file mode 100644
index 0000000000..f391db61a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CalledByNative;
+import org.webrtc.Loggable;
+import org.webrtc.Logging.Severity;
+
+class JNILogging {
+ private final Loggable loggable;
+
+ public JNILogging(Loggable loggable) {
+ this.loggable = loggable;
+ }
+
+ @CalledByNative
+ public void logToInjectable(String message, Integer severity, String tag) {
+ loggable.onLogMessage(message, Severity.values()[severity], tag);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java
new file mode 100644
index 0000000000..e1b2e513d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Class with static JNI helper functions that are used in many places. */
+public class JniCommon {
+ /** Functions to increment/decrement an rtc::RefCountInterface pointer. */
+ public static native void nativeAddRef(long refCountedPointer);
+ public static native void nativeReleaseRef(long refCountedPointer);
+
+ public static native ByteBuffer nativeAllocateByteBuffer(int size);
+ public static native void nativeFreeByteBuffer(ByteBuffer buffer);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
new file mode 100644
index 0000000000..d5ccae9688
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Container class for static constants and helpers used with MediaCodec. */
+// We are forced to use the old API because we want to support API level < 21.
+@SuppressWarnings("deprecation")
+class MediaCodecUtils {
+ private static final String TAG = "MediaCodecUtils";
+
+ // Prefixes for supported hardware encoder/decoder component names.
+ static final String EXYNOS_PREFIX = "OMX.Exynos.";
+ static final String INTEL_PREFIX = "OMX.Intel.";
+ static final String NVIDIA_PREFIX = "OMX.Nvidia.";
+ static final String QCOM_PREFIX = "OMX.qcom.";
+ static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = {
+ "OMX.google.", "OMX.SEC.", "c2.android"};
+
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
+ static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+
+ // Color formats supported by hardware decoder - in order of preference.
+ static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+
+ // Color formats supported by hardware encoder - in order of preference.
+ static final int[] ENCODER_COLOR_FORMATS = {
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+
+ // Color formats supported by texture mode encoding - in order of preference.
+ static final int[] TEXTURE_COLOR_FORMATS =
+ new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
+
+ static @Nullable Integer selectColorFormat(
+ int[] supportedColorFormats, CodecCapabilities capabilities) {
+ for (int supportedColorFormat : supportedColorFormats) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ return codecColorFormat;
+ }
+ }
+ }
+ return null;
+ }
+
+ static boolean codecSupportsType(MediaCodecInfo info, VideoCodecMimeType type) {
+ for (String mimeType : info.getSupportedTypes()) {
+ if (type.mimeType().equals(mimeType)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static Map<String, String> getCodecProperties(VideoCodecMimeType type, boolean highProfile) {
+ switch (type) {
+ case VP8:
+ case VP9:
+ case AV1:
+ return new HashMap<String, String>();
+ case H264:
+ return H264Utils.getDefaultH264Params(highProfile);
+ default:
+ throw new IllegalArgumentException("Unsupported codec: " + type);
+ }
+ }
+
+ static boolean isHardwareAccelerated(MediaCodecInfo info) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isHardwareAcceleratedQOrHigher(info);
+ }
+ return !isSoftwareOnly(info);
+ }
+
+ @TargetApi(29)
+ private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isHardwareAccelerated();
+ }
+
+ static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isSoftwareOnlyQOrHigher(codecInfo);
+ }
+ String name = codecInfo.getName();
+ for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) {
+ if (name.startsWith(prefix)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @TargetApi(29)
+ private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isSoftwareOnly();
+ }
+
+ private MediaCodecUtils() {
+ // This class should not be instantiated.
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
new file mode 100644
index 0000000000..bf591dda26
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
+import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Factory for decoders backed by Android MediaCodec API. */
+@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
+class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
+ private static final String TAG = "MediaCodecVideoDecoderFactory";
+
+ private final @Nullable EglBase.Context sharedContext;
+ private final @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate;
+
+ /**
+ * MediaCodecVideoDecoderFactory with support of codecs filtering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param codecAllowedPredicate optional predicate to test if codec allowed. All codecs are
+ * allowed when predicate is not provided.
+ */
+ public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
+ @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) {
+ this.sharedContext = sharedContext;
+ this.codecAllowedPredicate = codecAllowedPredicate;
+ }
+
+ @Nullable
+ @Override
+ public VideoDecoder createDecoder(VideoCodecInfo codecType) {
+ VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName());
+ MediaCodecInfo info = findCodecForType(type);
+
+ if (info == null) {
+ return null;
+ }
+
+ CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
+ return new AndroidVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type,
+ MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
+ sharedContext);
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
+ // Generate a list of supported codecs in order of preference:
+ // VP8, VP9, H264 (high profile), and H264 (baseline profile).
+ for (VideoCodecMimeType type : new VideoCodecMimeType[] {VideoCodecMimeType.VP8,
+ VideoCodecMimeType.VP9, VideoCodecMimeType.H264, VideoCodecMimeType.AV1}) {
+ MediaCodecInfo codec = findCodecForType(type);
+ if (codec != null) {
+ String name = type.name();
+ if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
+ }
+
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)));
+ }
+ }
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+
+ private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = null;
+ try {
+ info = MediaCodecList.getCodecInfoAt(i);
+ } catch (IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot retrieve decoder codec info", e);
+ }
+
+ if (info == null || info.isEncoder()) {
+ continue;
+ }
+
+ if (isSupportedCodec(info, type)) {
+ return info;
+ }
+ }
+
+ return null; // No support for this type.
+ }
+
+ // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
+ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) {
+ if (!MediaCodecUtils.codecSupportsType(info, type)) {
+ return false;
+ }
+ // Check for a supported color format.
+ if (MediaCodecUtils.selectColorFormat(
+ MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
+ == null) {
+ return false;
+ }
+ return isCodecAllowed(info);
+ }
+
+ private boolean isCodecAllowed(MediaCodecInfo info) {
+ if (codecAllowedPredicate == null) {
+ return true;
+ }
+ return codecAllowedPredicate.test(info);
+ }
+
+ private boolean isH264HighProfileSupported(MediaCodecInfo info) {
+ String name = info.getName();
+ // Support H.264 HP decoding on QCOM chips.
+ if (name.startsWith(QCOM_PREFIX)) {
+ return true;
+ }
+ // Support H.264 HP decoding on Exynos chips for Android M and above.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && name.startsWith(EXYNOS_PREFIX)) {
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java
new file mode 100644
index 0000000000..60c853df35
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import java.nio.ByteBuffer;
+
+/**
+ * Subset of methods defined in {@link android.media.MediaCodec} needed by
+ * {@link HardwareVideoEncoder} and {@link AndroidVideoDecoder}. This interface
+ * exists to allow mocking and using a fake implementation in tests.
+ */
+interface MediaCodecWrapper {
+ void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags);
+
+ void start();
+
+ void flush();
+
+ void stop();
+
+ void release();
+
+ int dequeueInputBuffer(long timeoutUs);
+
+ void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags);
+
+ int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs);
+
+ void releaseOutputBuffer(int index, boolean render);
+
+ MediaFormat getInputFormat();
+
+ MediaFormat getOutputFormat();
+
+ ByteBuffer getInputBuffer(int index);
+
+ ByteBuffer getOutputBuffer(int index);
+
+ Surface createInputSurface();
+
+ void setParameters(Bundle params);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java
new file mode 100644
index 0000000000..2962cb62a7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.io.IOException;
+
+interface MediaCodecWrapperFactory {
+ /**
+ * Creates a new {@link MediaCodecWrapper} by codec name.
+ *
+ * <p>For additional information see {@link android.media.MediaCodec#createByCodecName}.
+ */
+ MediaCodecWrapper createByCodecName(String name) throws IOException;
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
new file mode 100644
index 0000000000..2ba62ac7d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * Implementation of MediaCodecWrapperFactory that returns MediaCodecInterfaces wrapping
+ * {@link android.media.MediaCodec} objects.
+ */
+class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
+ private static class MediaCodecWrapperImpl implements MediaCodecWrapper {
+ private final MediaCodec mediaCodec;
+
+ public MediaCodecWrapperImpl(MediaCodec mediaCodec) {
+ this.mediaCodec = mediaCodec;
+ }
+
+ @Override
+ public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
+ mediaCodec.configure(format, surface, crypto, flags);
+ }
+
+ @Override
+ public void start() {
+ mediaCodec.start();
+ }
+
+ @Override
+ public void flush() {
+ mediaCodec.flush();
+ }
+
+ @Override
+ public void stop() {
+ mediaCodec.stop();
+ }
+
+ @Override
+ public void release() {
+ mediaCodec.release();
+ }
+
+ @Override
+ public int dequeueInputBuffer(long timeoutUs) {
+ return mediaCodec.dequeueInputBuffer(timeoutUs);
+ }
+
+ @Override
+ public void queueInputBuffer(
+ int index, int offset, int size, long presentationTimeUs, int flags) {
+ mediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
+ }
+
+ @Override
+ public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) {
+ return mediaCodec.dequeueOutputBuffer(info, timeoutUs);
+ }
+
+ @Override
+ public void releaseOutputBuffer(int index, boolean render) {
+ mediaCodec.releaseOutputBuffer(index, render);
+ }
+
+ @Override
+ public MediaFormat getInputFormat() {
+ return mediaCodec.getInputFormat();
+ }
+
+ @Override
+ public MediaFormat getOutputFormat() {
+ return mediaCodec.getOutputFormat();
+ }
+
+ @Override
+ public ByteBuffer getInputBuffer(int index) {
+ return mediaCodec.getInputBuffer(index);
+ }
+
+ @Override
+ public ByteBuffer getOutputBuffer(int index) {
+ return mediaCodec.getOutputBuffer(index);
+ }
+
+ @Override
+ public Surface createInputSurface() {
+ return mediaCodec.createInputSurface();
+ }
+
+ @Override
+ public void setParameters(Bundle params) {
+ mediaCodec.setParameters(params);
+ }
+ }
+
+ @Override
+ public MediaCodecWrapper createByCodecName(String name) throws IOException {
+ return new MediaCodecWrapperImpl(MediaCodec.createByCodecName(name));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java
new file mode 100644
index 0000000000..fe0221d826
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+public class NV12Buffer implements VideoFrame.Buffer {
+ private final int width;
+ private final int height;
+ private final int stride;
+ private final int sliceHeight;
+ private final ByteBuffer buffer;
+ private final RefCountDelegate refCountDelegate;
+
+ public NV12Buffer(int width, int height, int stride, int sliceHeight, ByteBuffer buffer,
+ @Nullable Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.stride = stride;
+ this.sliceHeight = sliceHeight;
+ this.buffer = buffer;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ return (VideoFrame.I420Buffer) cropAndScale(0, 0, width, height, width, height);
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, buffer, width,
+ height, stride, sliceHeight, newBuffer.getDataY(), newBuffer.getStrideY(),
+ newBuffer.getDataU(), newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, ByteBuffer src, int srcWidth, int srcHeight, int srcStride,
+ int srcSliceHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int dstStrideU,
+ ByteBuffer dstV, int dstStrideV);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
new file mode 100644
index 0000000000..0fb1afe74b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+public class NV21Buffer implements VideoFrame.Buffer {
+ private final byte[] data;
+ private final int width;
+ private final int height;
+ private final RefCountDelegate refCountDelegate;
+
+ public NV21Buffer(byte[] data, int width, int height, @Nullable Runnable releaseCallback) {
+ this.data = data;
+ this.width = width;
+ this.height = height;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ // Cropping converts the frame to I420. Just crop and scale to the whole image.
+ return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
+ height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
+ height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+ newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
+ int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java
new file mode 100644
index 0000000000..d4fba481e8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoProcessor;
+
+/**
+ * This class is meant to be a simple layer that only handles the JNI wrapping of a C++
+ * AndroidVideoTrackSource, that can easily be mocked out in Java unit tests. Refrain from adding
+ * any unnecessary logic to this class.
+ * This class is thred safe and methods can be called from any thread, but if frames A, B, ..., are
+ * sent to adaptFrame(), the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same
+ * order to onFrameCaptured().
+ */
+class NativeAndroidVideoTrackSource {
+ // Pointer to webrtc::jni::AndroidVideoTrackSource.
+ private final long nativeAndroidVideoTrackSource;
+
+ public NativeAndroidVideoTrackSource(long nativeAndroidVideoTrackSource) {
+ this.nativeAndroidVideoTrackSource = nativeAndroidVideoTrackSource;
+ }
+
+ /**
+ * Set the state for the native MediaSourceInterface. Maps boolean to either
+ * SourceState::kLive or SourceState::kEnded.
+ */
+ public void setState(boolean isLive) {
+ nativeSetState(nativeAndroidVideoTrackSource, isLive);
+ }
+
+ /**
+ * This function should be called before delivering any frame to determine if the frame should be
+ * dropped or what the cropping and scaling parameters should be. If the return value is null, the
+ * frame should be dropped, otherwise the frame should be adapted in accordance to the frame
+ * adaptation parameters before calling onFrameCaptured().
+ */
+ @Nullable
+ public VideoProcessor.FrameAdaptationParameters adaptFrame(VideoFrame frame) {
+ return nativeAdaptFrame(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(),
+ frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs());
+ }
+
+ /**
+ * Pass an adapted frame to the native AndroidVideoTrackSource. Note that adaptFrame() is
+ * expected to be called first and that the passed frame conforms to those parameters.
+ */
+ public void onFrameCaptured(VideoFrame frame) {
+ nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getRotation(),
+ frame.getTimestampNs(), frame.getBuffer());
+ }
+
+ /**
+ * Calling this function will cause frames to be scaled down to the requested resolution. Also,
+ * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
+ * the requested fps.
+ */
+ public void adaptOutputFormat(VideoSource.AspectRatio targetLandscapeAspectRatio,
+ @Nullable Integer maxLandscapePixelCount, VideoSource.AspectRatio targetPortraitAspectRatio,
+ @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
+ nativeAdaptOutputFormat(nativeAndroidVideoTrackSource, targetLandscapeAspectRatio.width,
+ targetLandscapeAspectRatio.height, maxLandscapePixelCount, targetPortraitAspectRatio.width,
+ targetPortraitAspectRatio.height, maxPortraitPixelCount, maxFps);
+ }
+
+ public void setIsScreencast(boolean isScreencast) {
+ nativeSetIsScreencast(nativeAndroidVideoTrackSource, isScreencast);
+ }
+
+ @CalledByNative
+ static VideoProcessor.FrameAdaptationParameters createFrameAdaptationParameters(int cropX,
+ int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight, long timestampNs,
+ boolean drop) {
+ return new VideoProcessor.FrameAdaptationParameters(
+ cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, timestampNs, drop);
+ }
+
+ private static native void nativeSetIsScreencast(
+ long nativeAndroidVideoTrackSource, boolean isScreencast);
+ private static native void nativeSetState(long nativeAndroidVideoTrackSource, boolean isLive);
+ private static native void nativeAdaptOutputFormat(long nativeAndroidVideoTrackSource,
+ int landscapeWidth, int landscapeHeight, @Nullable Integer maxLandscapePixelCount,
+ int portraitWidth, int portraitHeight, @Nullable Integer maxPortraitPixelCount,
+ @Nullable Integer maxFps);
+ @Nullable
+ private static native VideoProcessor.FrameAdaptationParameters nativeAdaptFrame(
+ long nativeAndroidVideoTrackSource, int width, int height, int rotation, long timestampNs);
+ private static native void nativeOnFrameCaptured(
+ long nativeAndroidVideoTrackSource, int rotation, long timestampNs, VideoFrame.Buffer buffer);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java
new file mode 100644
index 0000000000..c195fb3a4c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoFrame;
+
+/**
+ * Used from native api and implements a simple VideoCapturer.CapturerObserver that feeds frames to
+ * a webrtc::jni::AndroidVideoTrackSource.
+ */
+class NativeCapturerObserver implements CapturerObserver {
+ private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
+
+ @CalledByNative
+ public NativeCapturerObserver(long nativeSource) {
+ this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
+ }
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeAndroidVideoTrackSource.setState(success);
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
+ }
+
+ @Override
+ public void onFrameCaptured(VideoFrame frame) {
+ final VideoProcessor.FrameAdaptationParameters parameters =
+ nativeAndroidVideoTrackSource.adaptFrame(frame);
+ if (parameters == null) {
+ // Drop frame.
+ return;
+ }
+
+ final VideoFrame.Buffer adaptedBuffer =
+ frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth,
+ parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight);
+ nativeAndroidVideoTrackSource.onFrameCaptured(
+ new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs));
+ adaptedBuffer.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java
new file mode 100644
index 0000000000..531c216302
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+class NativeLibrary {
+ private static String TAG = "NativeLibrary";
+
+ static class DefaultLoader implements NativeLibraryLoader {
+ @Override
+ public boolean load(String name) {
+ Logging.d(TAG, "Loading library: " + name);
+ System.loadLibrary(name);
+
+ // Not relevant, but kept for API compatibility.
+ return true;
+ }
+ }
+
+ private static Object lock = new Object();
+ private static boolean libraryLoaded;
+
+ /**
+ * Loads the native library. Clients should call PeerConnectionFactory.initialize. It will call
+ * this method for them.
+ */
+ static void initialize(NativeLibraryLoader loader, String libraryName) {
+ synchronized (lock) {
+ if (libraryLoaded) {
+ Logging.d(TAG, "Native library has already been loaded.");
+ return;
+ }
+ Logging.d(TAG, "Loading native library: " + libraryName);
+ libraryLoaded = loader.load(libraryName);
+ }
+ }
+
+ /** Returns true if the library has been loaded successfully. */
+ static boolean isLoaded() {
+ synchronized (lock) {
+ return libraryLoaded;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java
new file mode 100644
index 0000000000..b9210d26a4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Implementation of RefCounted that executes a Runnable once the ref count reaches zero.
+ */
+class RefCountDelegate implements RefCounted {
+ private final AtomicInteger refCount = new AtomicInteger(1);
+ private final @Nullable Runnable releaseCallback;
+
+ /**
+ * @param releaseCallback Callback that will be executed once the ref count reaches zero.
+ */
+ public RefCountDelegate(@Nullable Runnable releaseCallback) {
+ this.releaseCallback = releaseCallback;
+ }
+
+ @Override
+ public void retain() {
+ int updated_count = refCount.incrementAndGet();
+ if (updated_count < 2) {
+ throw new IllegalStateException("retain() called on an object with refcount < 1");
+ }
+ }
+
+ @Override
+ public void release() {
+ int updated_count = refCount.decrementAndGet();
+ if (updated_count < 0) {
+ throw new IllegalStateException("release() called on an object with refcount < 1");
+ }
+ if (updated_count == 0 && releaseCallback != null) {
+ releaseCallback.run();
+ }
+ }
+
+ /**
+ * Tries to retain the object. Can be used in scenarios where it is unknown if the object has
+ * already been released. Returns true if successful or false if the object was already released.
+ */
+ boolean safeRetain() {
+ int currentRefCount = refCount.get();
+ while (currentRefCount != 0) {
+ if (refCount.weakCompareAndSet(currentRefCount, currentRefCount + 1)) {
+ return true;
+ }
+ currentRefCount = refCount.get();
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
new file mode 100644
index 0000000000..26a030919d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Enumeration of supported video codec types. */
+enum VideoCodecMimeType {
+ VP8("video/x-vnd.on2.vp8"),
+ VP9("video/x-vnd.on2.vp9"),
+ H264("video/avc"),
+ AV1("video/av01");
+
+ private final String mimeType;
+
+ private VideoCodecMimeType(String mimeType) {
+ this.mimeType = mimeType;
+ }
+
+ String mimeType() {
+ return mimeType;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java
new file mode 100644
index 0000000000..2aae041640
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoDecoder;
+
+/**
+ * This class contains the Java glue code for JNI generation of VideoDecoder.
+ */
+class VideoDecoderWrapper {
+ @CalledByNative
+ static VideoDecoder.Callback createDecoderCallback(final long nativeDecoder) {
+ return (VideoFrame frame, Integer decodeTimeMs,
+ Integer qp) -> nativeOnDecodedFrame(nativeDecoder, frame, decodeTimeMs, qp);
+ }
+
+ private static native void nativeOnDecodedFrame(
+ long nativeVideoDecoderWrapper, VideoFrame frame, Integer decodeTimeMs, Integer qp);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
new file mode 100644
index 0000000000..b5485d4edb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+// Explicit imports necessary for JNI generation.
+import androidx.annotation.Nullable;
+import org.webrtc.VideoEncoder;
+
+/**
+ * This class contains the Java glue code for JNI generation of VideoEncoder.
+ */
+class VideoEncoderWrapper {
+ @CalledByNative
+ static boolean getScalingSettingsOn(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.on;
+ }
+
+ @Nullable
+ @CalledByNative
+ static Integer getScalingSettingsLow(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.low;
+ }
+
+ @Nullable
+ @CalledByNative
+ static Integer getScalingSettingsHigh(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.high;
+ }
+
+ @CalledByNative
+ static VideoEncoder.Callback createEncoderCallback(final long nativeEncoder) {
+ return (EncodedImage frame,
+ VideoEncoder.CodecSpecificInfo info) -> nativeOnEncodedFrame(nativeEncoder, frame);
+ }
+
+ private static native void nativeOnEncodedFrame(
+ long nativeVideoEncoderWrapper, EncodedImage frame);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java
new file mode 100644
index 0000000000..023e92cfb1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * This class provides a ClassLoader that is capable of loading WebRTC Java classes regardless of
+ * what thread it's called from. Such a ClassLoader is needed for the few cases where the JNI
+ * mechanism is unable to automatically determine the appropriate ClassLoader instance.
+ */
+class WebRtcClassLoader {
+ @CalledByNative
+ static Object getClassLoader() {
+ Object loader = WebRtcClassLoader.class.getClassLoader();
+ if (loader == null) {
+ throw new RuntimeException("Failed to get WebRTC class loader.");
+ }
+ return loader;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java
new file mode 100644
index 0000000000..0461660fcf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/**
+ * This class wraps a webrtc::I420BufferInterface into a VideoFrame.I420Buffer.
+ */
+class WrappedNativeI420Buffer implements VideoFrame.I420Buffer {
+ private final int width;
+ private final int height;
+ private final ByteBuffer dataY;
+ private final int strideY;
+ private final ByteBuffer dataU;
+ private final int strideU;
+ private final ByteBuffer dataV;
+ private final int strideV;
+ private final long nativeBuffer;
+
+ @CalledByNative
+ WrappedNativeI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
+ int strideU, ByteBuffer dataV, int strideV, long nativeBuffer) {
+ this.width = width;
+ this.height = height;
+ this.dataY = dataY;
+ this.strideY = strideY;
+ this.dataU = dataU;
+ this.strideU = strideU;
+ this.dataV = dataV;
+ this.strideV = strideV;
+ this.nativeBuffer = nativeBuffer;
+
+ retain();
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public ByteBuffer getDataY() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataY.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataU() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataU.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataV() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataV.slice();
+ }
+
+ @Override
+ public int getStrideY() {
+ return strideY;
+ }
+
+ @Override
+ public int getStrideU() {
+ return strideU;
+ }
+
+ @Override
+ public int getStrideV() {
+ return strideV;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ retain();
+ return this;
+ }
+
+ @Override
+ public void retain() {
+ JniCommon.nativeAddRef(nativeBuffer);
+ }
+
+ @Override
+ public void release() {
+ JniCommon.nativeReleaseRef(nativeBuffer);
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ return JavaI420Buffer.cropAndScaleI420(
+ this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java
new file mode 100644
index 0000000000..70c625ab4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.AudioTrack;
+import android.os.Build;
+import org.webrtc.Logging;
+
+// Lowers the buffer size if no underruns are detected for 100 ms. Once an
+// underrun is detected, the buffer size is increased by 10 ms and it will not
+// be lowered further. The buffer size will never be increased more than
+// 5 times, to avoid the possibility of the buffer size increasing without
+// bounds.
+class LowLatencyAudioBufferManager {
+ private static final String TAG = "LowLatencyAudioBufferManager";
+ // The underrun count that was valid during the previous call to maybeAdjustBufferSize(). Used to
+ // detect increases in the value.
+ private int prevUnderrunCount;
+ // The number of ticks to wait without an underrun before decreasing the buffer size.
+ private int ticksUntilNextDecrease;
+ // Indicate if we should continue to decrease the buffer size.
+ private boolean keepLoweringBufferSize;
+ // How often the buffer size was increased.
+ private int bufferIncreaseCounter;
+
+ public LowLatencyAudioBufferManager() {
+ this.prevUnderrunCount = 0;
+ this.ticksUntilNextDecrease = 10;
+ this.keepLoweringBufferSize = true;
+ this.bufferIncreaseCounter = 0;
+ }
+
+ public void maybeAdjustBufferSize(AudioTrack audioTrack) {
+ if (Build.VERSION.SDK_INT >= 26) {
+ final int underrunCount = audioTrack.getUnderrunCount();
+ if (underrunCount > prevUnderrunCount) {
+ // Don't increase buffer more than 5 times. Continuing to increase the buffer size
+ // could be harmful on low-power devices that regularly experience underruns under
+ // normal conditions.
+ if (bufferIncreaseCounter < 5) {
+ // Underrun detected, increase buffer size by 10ms.
+ final int currentBufferSize = audioTrack.getBufferSizeInFrames();
+ final int newBufferSize = currentBufferSize + audioTrack.getPlaybackRate() / 100;
+ Logging.d(TAG,
+ "Underrun detected! Increasing AudioTrack buffer size from " + currentBufferSize
+ + " to " + newBufferSize);
+ audioTrack.setBufferSizeInFrames(newBufferSize);
+ bufferIncreaseCounter++;
+ }
+ // Stop trying to lower the buffer size.
+ keepLoweringBufferSize = false;
+ prevUnderrunCount = underrunCount;
+ ticksUntilNextDecrease = 10;
+ } else if (keepLoweringBufferSize) {
+ ticksUntilNextDecrease--;
+ if (ticksUntilNextDecrease <= 0) {
+ // No underrun seen for 100 ms, try to lower the buffer size by 10ms.
+ final int bufferSize10ms = audioTrack.getPlaybackRate() / 100;
+ // Never go below a buffer size of 10ms.
+ final int currentBufferSize = audioTrack.getBufferSizeInFrames();
+ final int newBufferSize = Math.max(bufferSize10ms, currentBufferSize - bufferSize10ms);
+ if (newBufferSize != currentBufferSize) {
+ Logging.d(TAG,
+ "Lowering AudioTrack buffer size from " + currentBufferSize + " to "
+ + newBufferSize);
+ audioTrack.setBufferSizeInFrames(newBufferSize);
+ }
+ ticksUntilNextDecrease = 10;
+ }
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java
new file mode 100644
index 0000000000..06d5cd3a8e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.AudioManager;
+import androidx.annotation.Nullable;
+import java.util.Timer;
+import java.util.TimerTask;
+import org.webrtc.Logging;
+
+// TODO(magjed): Do we really need to spawn a new thread just to log volume? Can we re-use the
+// AudioTrackThread instead?
+/**
+ * Private utility class that periodically checks and logs the volume level of the audio stream that
+ * is currently controlled by the volume control. A timer triggers logs once every 30 seconds and
+ * the timer's associated thread is named "WebRtcVolumeLevelLoggerThread".
+ */
+class VolumeLogger {
+ private static final String TAG = "VolumeLogger";
+ private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread";
+ private static final int TIMER_PERIOD_IN_SECONDS = 30;
+
+ private final AudioManager audioManager;
+ private @Nullable Timer timer;
+
+ public VolumeLogger(AudioManager audioManager) {
+ this.audioManager = audioManager;
+ }
+
+ public void start() {
+ Logging.d(TAG, "start" + WebRtcAudioUtils.getThreadInfo());
+ if (timer != null) {
+ return;
+ }
+ Logging.d(TAG, "audio mode is: " + WebRtcAudioUtils.modeToString(audioManager.getMode()));
+
+ timer = new Timer(THREAD_NAME);
+ timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
+ audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
+ 0, TIMER_PERIOD_IN_SECONDS * 1000);
+ }
+
+ private class LogVolumeTask extends TimerTask {
+ private final int maxRingVolume;
+ private final int maxVoiceCallVolume;
+
+ LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) {
+ this.maxRingVolume = maxRingVolume;
+ this.maxVoiceCallVolume = maxVoiceCallVolume;
+ }
+
+ @Override
+ public void run() {
+ final int mode = audioManager.getMode();
+ if (mode == AudioManager.MODE_RINGTONE) {
+ Logging.d(TAG,
+ "STREAM_RING stream volume: " + audioManager.getStreamVolume(AudioManager.STREAM_RING)
+ + " (max=" + maxRingVolume + ")");
+ } else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
+ Logging.d(TAG,
+ "VOICE_CALL stream volume: "
+ + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL)
+ + " (max=" + maxVoiceCallVolume + ")");
+ }
+ }
+ }
+
+ public void stop() {
+ Logging.d(TAG, "stop" + WebRtcAudioUtils.getThreadInfo());
+ if (timer != null) {
+ timer.cancel();
+ timer = null;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
new file mode 100644
index 0000000000..a9ff1011b6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
@@ -0,0 +1,227 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.audiofx.AcousticEchoCanceler;
+import android.media.audiofx.AudioEffect;
+import android.media.audiofx.AudioEffect.Descriptor;
+import android.media.audiofx.NoiseSuppressor;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.UUID;
+import org.webrtc.Logging;
+
+// This class wraps control of three different platform effects. Supported
+// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS).
+// Calling enable() will active all effects that are
+// supported by the device if the corresponding `shouldEnableXXX` member is set.
+class WebRtcAudioEffects {
+ private static final boolean DEBUG = false;
+
+ private static final String TAG = "WebRtcAudioEffectsExternal";
+
+ // UUIDs for Software Audio Effects that we want to avoid using.
+ // The implementor field will be set to "The Android Open Source Project".
+ private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER =
+ UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b");
+ private static final UUID AOSP_NOISE_SUPPRESSOR =
+ UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b");
+
+ // Contains the available effect descriptors returned from the
+ // AudioEffect.getEffects() call. This result is cached to avoid doing the
+ // slow OS call multiple times.
+ private static @Nullable Descriptor[] cachedEffects;
+
+ // Contains the audio effect objects. Created in enable() and destroyed
+ // in release().
+ private @Nullable AcousticEchoCanceler aec;
+ private @Nullable NoiseSuppressor ns;
+
+ // Affects the final state given to the setEnabled() method on each effect.
+ // The default state is set to "disabled" but each effect can also be enabled
+ // by calling setAEC() and setNS().
+ private boolean shouldEnableAec;
+ private boolean shouldEnableNs;
+
+ // Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
+ // fulfilled.
+ public static boolean isAcousticEchoCancelerSupported() {
+ return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
+ }
+
+ // Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
+ public static boolean isNoiseSuppressorSupported() {
+ return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
+ }
+
+ public WebRtcAudioEffects() {
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ // Call this method to enable or disable the platform AEC. It modifies
+ // `shouldEnableAec` which is used in enable() where the actual state
+ // of the AEC effect is modified. Returns true if HW AEC is supported and
+ // false otherwise.
+ public boolean setAEC(boolean enable) {
+ Logging.d(TAG, "setAEC(" + enable + ")");
+ if (!isAcousticEchoCancelerSupported()) {
+ Logging.w(TAG, "Platform AEC is not supported");
+ shouldEnableAec = false;
+ return false;
+ }
+ if (aec != null && (enable != shouldEnableAec)) {
+ Logging.e(TAG, "Platform AEC state can't be modified while recording");
+ return false;
+ }
+ shouldEnableAec = enable;
+ return true;
+ }
+
+ // Call this method to enable or disable the platform NS. It modifies
+ // `shouldEnableNs` which is used in enable() where the actual state
+ // of the NS effect is modified. Returns true if HW NS is supported and
+ // false otherwise.
+ public boolean setNS(boolean enable) {
+ Logging.d(TAG, "setNS(" + enable + ")");
+ if (!isNoiseSuppressorSupported()) {
+ Logging.w(TAG, "Platform NS is not supported");
+ shouldEnableNs = false;
+ return false;
+ }
+ if (ns != null && (enable != shouldEnableNs)) {
+ Logging.e(TAG, "Platform NS state can't be modified while recording");
+ return false;
+ }
+ shouldEnableNs = enable;
+ return true;
+ }
+
+ public void enable(int audioSession) {
+ Logging.d(TAG, "enable(audioSession=" + audioSession + ")");
+ assertTrue(aec == null);
+ assertTrue(ns == null);
+
+ if (DEBUG) {
+ // Add logging of supported effects but filter out "VoIP effects", i.e.,
+ // AEC, AEC and NS. Avoid calling AudioEffect.queryEffects() unless the
+ // DEBUG flag is set since we have seen crashes in this API.
+ for (Descriptor d : AudioEffect.queryEffects()) {
+ if (effectTypeIsVoIP(d.type)) {
+ Logging.d(TAG,
+ "name: " + d.name + ", "
+ + "mode: " + d.connectMode + ", "
+ + "implementor: " + d.implementor + ", "
+ + "UUID: " + d.uuid);
+ }
+ }
+ }
+
+ if (isAcousticEchoCancelerSupported()) {
+ // Create an AcousticEchoCanceler and attach it to the AudioRecord on
+ // the specified audio session.
+ aec = AcousticEchoCanceler.create(audioSession);
+ if (aec != null) {
+ boolean enabled = aec.getEnabled();
+ boolean enable = shouldEnableAec && isAcousticEchoCancelerSupported();
+ if (aec.setEnabled(enable) != AudioEffect.SUCCESS) {
+ Logging.e(TAG, "Failed to set the AcousticEchoCanceler state");
+ }
+ Logging.d(TAG,
+ "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") + ", enable: "
+ + enable + ", is now: " + (aec.getEnabled() ? "enabled" : "disabled"));
+ } else {
+ Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance");
+ }
+ }
+
+ if (isNoiseSuppressorSupported()) {
+ // Create an NoiseSuppressor and attach it to the AudioRecord on the
+ // specified audio session.
+ ns = NoiseSuppressor.create(audioSession);
+ if (ns != null) {
+ boolean enabled = ns.getEnabled();
+ boolean enable = shouldEnableNs && isNoiseSuppressorSupported();
+ if (ns.setEnabled(enable) != AudioEffect.SUCCESS) {
+ Logging.e(TAG, "Failed to set the NoiseSuppressor state");
+ }
+ Logging.d(TAG,
+ "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable
+ + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled"));
+ } else {
+ Logging.e(TAG, "Failed to create the NoiseSuppressor instance");
+ }
+ }
+ }
+
+ // Releases all native audio effect resources. It is a good practice to
+ // release the effect engine when not in use as control can be returned
+ // to other applications or the native resources released.
+ public void release() {
+ Logging.d(TAG, "release");
+ if (aec != null) {
+ aec.release();
+ aec = null;
+ }
+ if (ns != null) {
+ ns.release();
+ ns = null;
+ }
+ }
+
+ // Returns true for effect types in `type` that are of "VoIP" types:
+ // Acoustic Echo Canceler (AEC) or Automatic Gain Control (AGC) or
+ // Noise Suppressor (NS). Note that, an extra check for support is needed
+ // in each comparison since some devices includes effects in the
+ // AudioEffect.Descriptor array that are actually not available on the device.
+ // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
+ // AutomaticGainControl.isAvailable() returns false.
+ private boolean effectTypeIsVoIP(UUID type) {
+ return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
+ || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ // Returns the cached copy of the audio effects array, if available, or
+ // queries the operating system for the list of effects.
+ private static @Nullable Descriptor[] getAvailableEffects() {
+ if (cachedEffects != null) {
+ return cachedEffects;
+ }
+ // The caching is best effort only - if this method is called from several
+ // threads in parallel, they may end up doing the underlying OS call
+ // multiple times. It's normally only called on one thread so there's no
+ // real need to optimize for the multiple threads case.
+ cachedEffects = AudioEffect.queryEffects();
+ return cachedEffects;
+ }
+
+ // Returns true if an effect of the specified type is available. Functionally
+ // equivalent to (NoiseSuppressor`AutomaticGainControl`...).isAvailable(), but
+ // faster as it avoids the expensive OS call to enumerate effects.
+ private static boolean isEffectTypeAvailable(UUID effectType, UUID blockListedUuid) {
+ Descriptor[] effects = getAvailableEffects();
+ if (effects == null) {
+ return false;
+ }
+ for (Descriptor d : effects) {
+ if (d.type.equals(effectType)) {
+ return !d.uuid.equals(blockListedUuid);
+ }
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
new file mode 100644
index 0000000000..f398602a28
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.os.Build;
+import org.webrtc.Logging;
+import org.webrtc.CalledByNative;
+
+/**
+ * This class contains static functions to query sample rate and input/output audio buffer sizes.
+ */
+class WebRtcAudioManager {
+ private static final String TAG = "WebRtcAudioManagerExternal";
+
+ private static final int DEFAULT_SAMPLE_RATE_HZ = 16000;
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ private static final int BITS_PER_SAMPLE = 16;
+
+ private static final int DEFAULT_FRAME_PER_BUFFER = 256;
+
+ @CalledByNative
+ static AudioManager getAudioManager(Context context) {
+ return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ }
+
+ @CalledByNative
+ static int getOutputBufferSize(
+ Context context, AudioManager audioManager, int sampleRate, int numberOfOutputChannels) {
+ return isLowLatencyOutputSupported(context)
+ ? getLowLatencyFramesPerBuffer(audioManager)
+ : getMinOutputFrameSize(sampleRate, numberOfOutputChannels);
+ }
+
+ @CalledByNative
+ static int getInputBufferSize(
+ Context context, AudioManager audioManager, int sampleRate, int numberOfInputChannels) {
+ return isLowLatencyInputSupported(context)
+ ? getLowLatencyFramesPerBuffer(audioManager)
+ : getMinInputFrameSize(sampleRate, numberOfInputChannels);
+ }
+
+ private static boolean isLowLatencyOutputSupported(Context context) {
+ return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
+ }
+
+ private static boolean isLowLatencyInputSupported(Context context) {
+ // TODO(henrika): investigate if some sort of device list is needed here
+ // as well. The NDK doc states that: "As of API level 21, lower latency
+ // audio input is supported on select devices. To take advantage of this
+ // feature, first confirm that lower latency output is available".
+ return isLowLatencyOutputSupported(context);
+ }
+
+ /**
+ * Returns the native input/output sample rate for this device's output stream.
+ */
+ @CalledByNative
+ static int getSampleRate(AudioManager audioManager) {
+ // Override this if we're running on an old emulator image which only
+ // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE.
+ if (WebRtcAudioUtils.runningOnEmulator()) {
+ Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz.");
+ return 8000;
+ }
+ // Deliver best possible estimate based on default Android AudioManager APIs.
+ final int sampleRateHz = getSampleRateForApiLevel(audioManager);
+ Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
+ return sampleRateHz;
+ }
+
+ private static int getSampleRateForApiLevel(AudioManager audioManager) {
+ String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+ return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
+ }
+
+ // Returns the native output buffer size for low-latency output streams.
+ private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
+ String framesPerBuffer =
+ audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+ return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
+ }
+
+ // Returns the minimum output buffer size for Java based audio (AudioTrack).
+ // This size can also be used for OpenSL ES implementations on devices that
+ // lacks support of low-latency output.
+ private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
+ final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+ final int channelConfig =
+ (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+ return AudioTrack.getMinBufferSize(
+ sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+ / bytesPerFrame;
+ }
+
+ // Returns the minimum input buffer size for Java based audio (AudioRecord).
+ // This size can calso be used for OpenSL ES implementations on devices that
+ // lacks support of low-latency input.
+ private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
+ final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+ final int channelConfig =
+ (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+ return AudioRecord.getMinBufferSize(
+ sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+ / bytesPerFrame;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
new file mode 100644
index 0000000000..6647e5fcbb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
@@ -0,0 +1,743 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioRecordingConfiguration;
+import android.media.AudioTimestamp;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import android.os.Process;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import java.lang.System;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+import org.webrtc.CalledByNative;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
+
+class WebRtcAudioRecord {
+ private static final String TAG = "WebRtcAudioRecordExternal";
+
+ // Requested size of each recorded buffer provided to the client.
+ private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+ // Average number of callbacks per second.
+ private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+ // We ask for a native buffer size of BUFFER_SIZE_FACTOR * (minimum required
+ // buffer size). The extra space is allocated to guard against glitches under
+ // high load.
+ private static final int BUFFER_SIZE_FACTOR = 2;
+
+ // The AudioRecordJavaThread is allowed to wait for successful call to join()
+ // but the wait times out afther this amount of time.
+ private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+ public static final int DEFAULT_AUDIO_SOURCE = AudioSource.VOICE_COMMUNICATION;
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ public static final int DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+
+ // Indicates AudioRecord has started recording audio.
+ private static final int AUDIO_RECORD_START = 0;
+
+ // Indicates AudioRecord has stopped recording audio.
+ private static final int AUDIO_RECORD_STOP = 1;
+
+ // Time to wait before checking recording status after start has been called. Tests have
+ // shown that the result can sometimes be invalid (our own status might be missing) if we check
+ // directly after start.
+ private static final int CHECK_REC_STATUS_DELAY_MS = 100;
+
+ private final Context context;
+ private final AudioManager audioManager;
+ private final int audioSource;
+ private final int audioFormat;
+
+ private long nativeAudioRecord;
+
+ private final WebRtcAudioEffects effects = new WebRtcAudioEffects();
+
+ private @Nullable ByteBuffer byteBuffer;
+
+ private @Nullable AudioRecord audioRecord;
+ private @Nullable AudioRecordThread audioThread;
+ private @Nullable AudioDeviceInfo preferredDevice;
+
+ private final ScheduledExecutorService executor;
+ private @Nullable ScheduledFuture<String> future;
+
+ private volatile boolean microphoneMute;
+ private final AtomicReference<Boolean> audioSourceMatchesRecordingSessionRef =
+ new AtomicReference<>();
+ private byte[] emptyBytes;
+
+ private final @Nullable AudioRecordErrorCallback errorCallback;
+ private final @Nullable AudioRecordStateCallback stateCallback;
+ private final @Nullable SamplesReadyCallback audioSamplesReadyCallback;
+ private final boolean isAcousticEchoCancelerSupported;
+ private final boolean isNoiseSuppressorSupported;
+
+ /**
+ * Audio thread which keeps calling ByteBuffer.read() waiting for audio
+ * to be recorded. Feeds recorded data to the native counterpart as a
+ * periodic sequence of callbacks using DataIsRecorded().
+ * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+ */
+ private class AudioRecordThread extends Thread {
+ private volatile boolean keepAlive = true;
+
+ public AudioRecordThread(String name) {
+ super(name);
+ }
+
+ @Override
+ public void run() {
+ Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+ Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
+ assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
+
+ // Audio recording has started and the client is informed about it.
+ doAudioRecordStateCallback(AUDIO_RECORD_START);
+
+ long lastTime = System.nanoTime();
+ AudioTimestamp audioTimestamp = null;
+ if (Build.VERSION.SDK_INT >= 24) {
+ audioTimestamp = new AudioTimestamp();
+ }
+ while (keepAlive) {
+ int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity());
+ if (bytesRead == byteBuffer.capacity()) {
+ if (microphoneMute) {
+ byteBuffer.clear();
+ byteBuffer.put(emptyBytes);
+ }
+ // It's possible we've been shut down during the read, and stopRecording() tried and
+ // failed to join this thread. To be a bit safer, try to avoid calling any native methods
+ // in case they've been unregistered after stopRecording() returned.
+ if (keepAlive) {
+ long captureTimeNs = 0;
+ if (Build.VERSION.SDK_INT >= 24) {
+ if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC)
+ == AudioRecord.SUCCESS) {
+ captureTimeNs = audioTimestamp.nanoTime;
+ }
+ }
+ nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs);
+ }
+ if (audioSamplesReadyCallback != null) {
+ // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily
+ // at index 0.
+ byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(),
+ byteBuffer.capacity() + byteBuffer.arrayOffset());
+ audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady(
+ new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(),
+ audioRecord.getChannelCount(), audioRecord.getSampleRate(), data));
+ }
+ } else {
+ String errorMessage = "AudioRecord.read failed: " + bytesRead;
+ Logging.e(TAG, errorMessage);
+ if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
+ keepAlive = false;
+ reportWebRtcAudioRecordError(errorMessage);
+ }
+ }
+ }
+
+ try {
+ if (audioRecord != null) {
+ audioRecord.stop();
+ doAudioRecordStateCallback(AUDIO_RECORD_STOP);
+ }
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
+ }
+ }
+
+ // Stops the inner thread loop and also calls AudioRecord.stop().
+ // Does not block the calling thread.
+ public void stopThread() {
+ Logging.d(TAG, "stopThread");
+ keepAlive = false;
+ }
+ }
+
+ @CalledByNative
+ WebRtcAudioRecord(Context context, AudioManager audioManager) {
+ this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE,
+ DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */,
+ null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(),
+ WebRtcAudioEffects.isNoiseSuppressorSupported());
+ }
+
+ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler,
+ AudioManager audioManager, int audioSource, int audioFormat,
+ @Nullable AudioRecordErrorCallback errorCallback,
+ @Nullable AudioRecordStateCallback stateCallback,
+ @Nullable SamplesReadyCallback audioSamplesReadyCallback,
+ boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) {
+ if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) {
+ throw new IllegalArgumentException("HW AEC not supported");
+ }
+ if (isNoiseSuppressorSupported && !WebRtcAudioEffects.isNoiseSuppressorSupported()) {
+ throw new IllegalArgumentException("HW NS not supported");
+ }
+ this.context = context;
+ this.executor = scheduler;
+ this.audioManager = audioManager;
+ this.audioSource = audioSource;
+ this.audioFormat = audioFormat;
+ this.errorCallback = errorCallback;
+ this.stateCallback = stateCallback;
+ this.audioSamplesReadyCallback = audioSamplesReadyCallback;
+ this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported;
+ this.isNoiseSuppressorSupported = isNoiseSuppressorSupported;
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ @CalledByNative
+ public void setNativeAudioRecord(long nativeAudioRecord) {
+ this.nativeAudioRecord = nativeAudioRecord;
+ }
+
+ @CalledByNative
+ boolean isAcousticEchoCancelerSupported() {
+ return isAcousticEchoCancelerSupported;
+ }
+
+ @CalledByNative
+ boolean isNoiseSuppressorSupported() {
+ return isNoiseSuppressorSupported;
+ }
+
+ // Returns true if a valid call to verifyAudioConfig() has been done. Should always be
+ // checked before using the returned value of isAudioSourceMatchingRecordingSession().
+ @CalledByNative
+ boolean isAudioConfigVerified() {
+ return audioSourceMatchesRecordingSessionRef.get() != null;
+ }
+
+ // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when
+ // startRecording() has been called. Hence, should preferably be called in combination with
+ // stopRecording() to ensure that it has been set properly. `isAudioConfigVerified` is
+ // enabled in WebRtcAudioRecord to ensure that the returned value is valid.
+ @CalledByNative
+ boolean isAudioSourceMatchingRecordingSession() {
+ Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get();
+ if (audioSourceMatchesRecordingSession == null) {
+ Logging.w(TAG, "Audio configuration has not yet been verified");
+ return false;
+ }
+ return audioSourceMatchesRecordingSession;
+ }
+
+ @CalledByNative
+ private boolean enableBuiltInAEC(boolean enable) {
+ Logging.d(TAG, "enableBuiltInAEC(" + enable + ")");
+ return effects.setAEC(enable);
+ }
+
+ @CalledByNative
+ private boolean enableBuiltInNS(boolean enable) {
+ Logging.d(TAG, "enableBuiltInNS(" + enable + ")");
+ return effects.setNS(enable);
+ }
+
+ @CalledByNative
+ private int initRecording(int sampleRate, int channels) {
+ Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+ if (audioRecord != null) {
+ reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording.");
+ return -1;
+ }
+ final int bytesPerFrame = channels * getBytesPerSample(audioFormat);
+ final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND;
+ byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer);
+ if (!(byteBuffer.hasArray())) {
+ reportWebRtcAudioRecordInitError("ByteBuffer does not have backing array.");
+ return -1;
+ }
+ Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+ emptyBytes = new byte[byteBuffer.capacity()];
+ // Rather than passing the ByteBuffer with every callback (requiring
+ // the potentially expensive GetDirectBufferAddress) we simply have the
+ // the native class cache the address to the memory once.
+ nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer);
+
+ // Get the minimum buffer size required for the successful creation of
+ // an AudioRecord object, in byte units.
+ // Note that this size doesn't guarantee a smooth recording under load.
+ final int channelConfig = channelCountToConfiguration(channels);
+ int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
+ if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
+ reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize);
+ return -1;
+ }
+ Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize);
+
+ // Use a larger buffer size than the minimum required when creating the
+ // AudioRecord instance to ensure smooth recording under load. It has been
+ // verified that it does not increase the actual recording latency.
+ int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
+ Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
+ try {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ // Use the AudioRecord.Builder class on Android M (23) and above.
+ // Throws IllegalArgumentException.
+ audioRecord = createAudioRecordOnMOrHigher(
+ audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ audioSourceMatchesRecordingSessionRef.set(null);
+ if (preferredDevice != null) {
+ setPreferredDevice(preferredDevice);
+ }
+ } else {
+ // Use the old AudioRecord constructor for API levels below 23.
+ // Throws UnsupportedOperationException.
+ audioRecord = createAudioRecordOnLowerThanM(
+ audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ audioSourceMatchesRecordingSessionRef.set(null);
+ }
+ } catch (IllegalArgumentException | UnsupportedOperationException e) {
+ // Report of exception message is sufficient. Example: "Cannot create AudioRecord".
+ reportWebRtcAudioRecordInitError(e.getMessage());
+ releaseAudioResources();
+ return -1;
+ }
+ if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
+ reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed.");
+ releaseAudioResources();
+ return -1;
+ }
+ effects.enable(audioRecord.getAudioSessionId());
+ logMainParameters();
+ logMainParametersExtended();
+ // Check number of active recording sessions. Should be zero but we have seen conflict cases
+ // and adding a log for it can help us figure out details about conflicting sessions.
+ final int numActiveRecordingSessions =
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (numActiveRecordingSessions != 0) {
+ // Log the conflict as a warning since initialization did in fact succeed. Most likely, the
+ // upcoming call to startRecording() will fail under these conditions.
+ Logging.w(
+ TAG, "Potential microphone conflict. Active sessions: " + numActiveRecordingSessions);
+ }
+ return framesPerBuffer;
+ }
+
+ /**
+ * Prefer a specific {@link AudioDeviceInfo} device for recording. Calling after recording starts
+ * is valid but may cause a temporary interruption if the audio routing changes.
+ */
+ @RequiresApi(Build.VERSION_CODES.M)
+ @TargetApi(Build.VERSION_CODES.M)
+ void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) {
+ Logging.d(
+ TAG, "setPreferredDevice " + (preferredDevice != null ? preferredDevice.getId() : null));
+ this.preferredDevice = preferredDevice;
+ if (audioRecord != null) {
+ if (!audioRecord.setPreferredDevice(preferredDevice)) {
+ Logging.e(TAG, "setPreferredDevice failed");
+ }
+ }
+ }
+
+ @CalledByNative
+ private boolean startRecording() {
+ Logging.d(TAG, "startRecording");
+ assertTrue(audioRecord != null);
+ assertTrue(audioThread == null);
+ try {
+ audioRecord.startRecording();
+ } catch (IllegalStateException e) {
+ reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION,
+ "AudioRecord.startRecording failed: " + e.getMessage());
+ return false;
+ }
+ if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+ reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH,
+ "AudioRecord.startRecording failed - incorrect state: "
+ + audioRecord.getRecordingState());
+ return false;
+ }
+ audioThread = new AudioRecordThread("AudioRecordJavaThread");
+ audioThread.start();
+ scheduleLogRecordingConfigurationsTask(audioRecord);
+ return true;
+ }
+
+ @CalledByNative
+ private boolean stopRecording() {
+ Logging.d(TAG, "stopRecording");
+ assertTrue(audioThread != null);
+ if (future != null) {
+ if (!future.isDone()) {
+ // Might be needed if the client calls startRecording(), stopRecording() back-to-back.
+ future.cancel(true /* mayInterruptIfRunning */);
+ }
+ future = null;
+ }
+ audioThread.stopThread();
+ if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
+ Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ }
+ audioThread = null;
+ effects.release();
+ releaseAudioResources();
+ return true;
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private static AudioRecord createAudioRecordOnMOrHigher(
+ int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) {
+ Logging.d(TAG, "createAudioRecordOnMOrHigher");
+ return new AudioRecord.Builder()
+ .setAudioSource(audioSource)
+ .setAudioFormat(new AudioFormat.Builder()
+ .setEncoding(audioFormat)
+ .setSampleRate(sampleRate)
+ .setChannelMask(channelConfig)
+ .build())
+ .setBufferSizeInBytes(bufferSizeInBytes)
+ .build();
+ }
+
+ private static AudioRecord createAudioRecordOnLowerThanM(
+ int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) {
+ Logging.d(TAG, "createAudioRecordOnLowerThanM");
+ return new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ }
+
+ private void logMainParameters() {
+ Logging.d(TAG,
+ "AudioRecord: "
+ + "session ID: " + audioRecord.getAudioSessionId() + ", "
+ + "channels: " + audioRecord.getChannelCount() + ", "
+ + "sample rate: " + audioRecord.getSampleRate());
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private void logMainParametersExtended() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ Logging.d(TAG,
+ "AudioRecord: "
+ // The frame count of the native AudioRecord buffer.
+ + "buffer size in frames: " + audioRecord.getBufferSizeInFrames());
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ // Checks the number of active recording sessions and logs the states of all active sessions.
+ // Returns number of active sessions. Note that this could occur on arbituary thread.
+ private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
+ Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher");
+ return 0;
+ }
+ if (audioRecord == null) {
+ return 0;
+ }
+
+ // Get a list of the currently active audio recording configurations of the device (can be more
+ // than one). An empty list indicates there is no recording active when queried.
+ List<AudioRecordingConfiguration> configs = audioManager.getActiveRecordingConfigurations();
+ final int numActiveRecordingSessions = configs.size();
+ Logging.d(TAG, "Number of active recording sessions: " + numActiveRecordingSessions);
+ if (numActiveRecordingSessions > 0) {
+ logActiveRecordingConfigs(audioRecord.getAudioSessionId(), configs);
+ if (verifyAudioConfig) {
+ // Run an extra check to verify that the existing audio source doing the recording (tied
+ // to the AudioRecord instance) is matching what the audio recording configuration lists
+ // as its client parameters. If these do not match, recording might work but under invalid
+ // conditions.
+ audioSourceMatchesRecordingSessionRef.set(
+ verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(),
+ audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs));
+ }
+ }
+ return numActiveRecordingSessions;
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ private int channelCountToConfiguration(int channels) {
+ return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+ }
+
+ private native void nativeCacheDirectBufferAddress(
+ long nativeAudioRecordJni, ByteBuffer byteBuffer);
+ private native void nativeDataIsRecorded(
+ long nativeAudioRecordJni, int bytes, long captureTimestampNs);
+
+ // Sets all recorded samples to zero if `mute` is true, i.e., ensures that
+ // the microphone is muted.
+ public void setMicrophoneMute(boolean mute) {
+ Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
+ microphoneMute = mute;
+ }
+
+ // Releases the native AudioRecord resources.
+ private void releaseAudioResources() {
+ Logging.d(TAG, "releaseAudioResources");
+ if (audioRecord != null) {
+ audioRecord.release();
+ audioRecord = null;
+ }
+ audioSourceMatchesRecordingSessionRef.set(null);
+ }
+
+ private void reportWebRtcAudioRecordInitError(String errorMessage) {
+ Logging.e(TAG, "Init recording error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordInitError(errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioRecordStartError(
+ AudioRecordStartErrorCode errorCode, String errorMessage) {
+ Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioRecordError(String errorMessage) {
+ Logging.e(TAG, "Run-time recording error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordError(errorMessage);
+ }
+ }
+
+ private void doAudioRecordStateCallback(int audioState) {
+ Logging.d(TAG, "doAudioRecordStateCallback: " + audioStateToString(audioState));
+ if (stateCallback != null) {
+ if (audioState == WebRtcAudioRecord.AUDIO_RECORD_START) {
+ stateCallback.onWebRtcAudioRecordStart();
+ } else if (audioState == WebRtcAudioRecord.AUDIO_RECORD_STOP) {
+ stateCallback.onWebRtcAudioRecordStop();
+ } else {
+ Logging.e(TAG, "Invalid audio state");
+ }
+ }
+ }
+
+ // Reference from Android code, AudioFormat.getBytesPerSample. BitPerSample / 8
+ // Default audio data format is PCM 16 bits per sample.
+ // Guaranteed to be supported by all devices
+ private static int getBytesPerSample(int audioFormat) {
+ switch (audioFormat) {
+ case AudioFormat.ENCODING_PCM_8BIT:
+ return 1;
+ case AudioFormat.ENCODING_PCM_16BIT:
+ case AudioFormat.ENCODING_IEC61937:
+ case AudioFormat.ENCODING_DEFAULT:
+ return 2;
+ case AudioFormat.ENCODING_PCM_FLOAT:
+ return 4;
+ case AudioFormat.ENCODING_INVALID:
+ default:
+ throw new IllegalArgumentException("Bad audio format " + audioFormat);
+ }
+ }
+
+ // Use an ExecutorService to schedule a task after a given delay where the task consists of
+ // checking (by logging) the current status of active recording sessions.
+ private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) {
+ Logging.d(TAG, "scheduleLogRecordingConfigurationsTask");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
+ return;
+ }
+
+ Callable<String> callable = () -> {
+ if (this.audioRecord == audioRecord) {
+ logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */);
+ } else {
+ Logging.d(TAG, "audio record has changed");
+ }
+ return "Scheduled task is done";
+ };
+
+ if (future != null && !future.isDone()) {
+ future.cancel(true /* mayInterruptIfRunning */);
+ }
+ // Schedule call to logRecordingConfigurations() from executor thread after fixed delay.
+ future = executor.schedule(callable, CHECK_REC_STATUS_DELAY_MS, TimeUnit.MILLISECONDS);
+ };
+
+ @TargetApi(Build.VERSION_CODES.N)
+ private static boolean logActiveRecordingConfigs(
+ int session, List<AudioRecordingConfiguration> configs) {
+ assertTrue(!configs.isEmpty());
+ final Iterator<AudioRecordingConfiguration> it = configs.iterator();
+ Logging.d(TAG, "AudioRecordingConfigurations: ");
+ while (it.hasNext()) {
+ final AudioRecordingConfiguration config = it.next();
+ StringBuilder conf = new StringBuilder();
+ // The audio source selected by the client.
+ final int audioSource = config.getClientAudioSource();
+ conf.append(" client audio source=")
+ .append(WebRtcAudioUtils.audioSourceToString(audioSource))
+ .append(", client session id=")
+ .append(config.getClientAudioSessionId())
+ // Compare with our own id (based on AudioRecord#getAudioSessionId()).
+ .append(" (")
+ .append(session)
+ .append(")")
+ .append("\n");
+ // Audio format at which audio is recorded on this Android device. Note that it may differ
+ // from the client application recording format (see getClientFormat()).
+ AudioFormat format = config.getFormat();
+ conf.append(" Device AudioFormat: ")
+ .append("channel count=")
+ .append(format.getChannelCount())
+ .append(", channel index mask=")
+ .append(format.getChannelIndexMask())
+ // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices.
+ .append(", channel mask=")
+ .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask()))
+ .append(", encoding=")
+ .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding()))
+ .append(", sample rate=")
+ .append(format.getSampleRate())
+ .append("\n");
+ // Audio format at which the client application is recording audio.
+ format = config.getClientFormat();
+ conf.append(" Client AudioFormat: ")
+ .append("channel count=")
+ .append(format.getChannelCount())
+ .append(", channel index mask=")
+ .append(format.getChannelIndexMask())
+ // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices.
+ .append(", channel mask=")
+ .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask()))
+ .append(", encoding=")
+ .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding()))
+ .append(", sample rate=")
+ .append(format.getSampleRate())
+ .append("\n");
+ // Audio input device used for this recording session.
+ final AudioDeviceInfo device = config.getAudioDevice();
+ if (device != null) {
+ assertTrue(device.isSource());
+ conf.append(" AudioDevice: ")
+ .append("type=")
+ .append(WebRtcAudioUtils.deviceTypeToString(device.getType()))
+ .append(", id=")
+ .append(device.getId());
+ }
+ Logging.d(TAG, conf.toString());
+ }
+ return true;
+ }
+
+ // Verify that the client audio configuration (device and format) matches the requested
+ // configuration (same as AudioRecord's).
+ @TargetApi(Build.VERSION_CODES.N)
+ private static boolean verifyAudioConfig(int source, int session, AudioFormat format,
+ AudioDeviceInfo device, List<AudioRecordingConfiguration> configs) {
+ assertTrue(!configs.isEmpty());
+ final Iterator<AudioRecordingConfiguration> it = configs.iterator();
+ while (it.hasNext()) {
+ final AudioRecordingConfiguration config = it.next();
+ final AudioDeviceInfo configDevice = config.getAudioDevice();
+ if (configDevice == null) {
+ continue;
+ }
+ if ((config.getClientAudioSource() == source)
+ && (config.getClientAudioSessionId() == session)
+ // Check the client format (should match the format of the AudioRecord instance).
+ && (config.getClientFormat().getEncoding() == format.getEncoding())
+ && (config.getClientFormat().getSampleRate() == format.getSampleRate())
+ && (config.getClientFormat().getChannelMask() == format.getChannelMask())
+ && (config.getClientFormat().getChannelIndexMask() == format.getChannelIndexMask())
+ // Ensure that the device format is properly configured.
+ && (config.getFormat().getEncoding() != AudioFormat.ENCODING_INVALID)
+ && (config.getFormat().getSampleRate() > 0)
+ // For the channel mask, either the position or index-based value must be valid.
+ && ((config.getFormat().getChannelMask() != AudioFormat.CHANNEL_INVALID)
+ || (config.getFormat().getChannelIndexMask() != AudioFormat.CHANNEL_INVALID))
+ && checkDeviceMatch(configDevice, device)) {
+ Logging.d(TAG, "verifyAudioConfig: PASS");
+ return true;
+ }
+ }
+ Logging.e(TAG, "verifyAudioConfig: FAILED");
+ return false;
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ // Returns true if device A parameters matches those of device B.
+ // TODO(henrika): can be improved by adding AudioDeviceInfo#getAddress() but it requires API 29.
+ private static boolean checkDeviceMatch(AudioDeviceInfo devA, AudioDeviceInfo devB) {
+ return ((devA.getId() == devB.getId() && (devA.getType() == devB.getType())));
+ }
+
+ private static String audioStateToString(int state) {
+ switch (state) {
+ case WebRtcAudioRecord.AUDIO_RECORD_START:
+ return "START";
+ case WebRtcAudioRecord.AUDIO_RECORD_STOP:
+ return "STOP";
+ default:
+ return "INVALID";
+ }
+ }
+
+ private static final AtomicInteger nextSchedulerId = new AtomicInteger(0);
+
+ static ScheduledExecutorService newDefaultScheduler() {
+ AtomicInteger nextThreadId = new AtomicInteger(0);
+ return Executors.newScheduledThreadPool(0, new ThreadFactory() {
+ /**
+ * Constructs a new {@code Thread}
+ */
+ @Override
+ public Thread newThread(Runnable r) {
+ Thread thread = Executors.defaultThreadFactory().newThread(r);
+ thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s",
+ nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement()));
+ return thread;
+ }
+ });
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
new file mode 100644
index 0000000000..2b34e34013
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -0,0 +1,585 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioAttributes;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.Build;
+import android.os.Process;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import org.webrtc.CalledByNative;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
+import org.webrtc.audio.LowLatencyAudioBufferManager;
+
+class WebRtcAudioTrack {
+ private static final String TAG = "WebRtcAudioTrackExternal";
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ private static final int BITS_PER_SAMPLE = 16;
+
+ // Requested size of each recorded buffer provided to the client.
+ private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+ // Average number of callbacks per second.
+ private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+ // The AudioTrackThread is allowed to wait for successful call to join()
+ // but the wait times out afther this amount of time.
+ private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+ // By default, WebRTC creates audio tracks with a usage attribute
+ // corresponding to voice communications, such as telephony or VoIP.
+ private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION;
+
+ // Indicates the AudioTrack has started playing audio.
+ private static final int AUDIO_TRACK_START = 0;
+
+ // Indicates the AudioTrack has stopped playing audio.
+ private static final int AUDIO_TRACK_STOP = 1;
+
+ private long nativeAudioTrack;
+ private final Context context;
+ private final AudioManager audioManager;
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+ private ByteBuffer byteBuffer;
+
+ private @Nullable final AudioAttributes audioAttributes;
+ private @Nullable AudioTrack audioTrack;
+ private @Nullable AudioTrackThread audioThread;
+ private final VolumeLogger volumeLogger;
+
+ // Samples to be played are replaced by zeros if `speakerMute` is set to true.
+ // Can be used to ensure that the speaker is fully muted.
+ private volatile boolean speakerMute;
+ private byte[] emptyBytes;
+ private boolean useLowLatency;
+ private int initialBufferSizeInFrames;
+
+ private final @Nullable AudioTrackErrorCallback errorCallback;
+ private final @Nullable AudioTrackStateCallback stateCallback;
+
+ /**
+ * Audio thread which keeps calling AudioTrack.write() to stream audio.
+ * Data is periodically acquired from the native WebRTC layer using the
+ * nativeGetPlayoutData callback function.
+ * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+ */
+ private class AudioTrackThread extends Thread {
+ private volatile boolean keepAlive = true;
+ private LowLatencyAudioBufferManager bufferManager;
+
+ public AudioTrackThread(String name) {
+ super(name);
+ bufferManager = new LowLatencyAudioBufferManager();
+ }
+
+ @Override
+ public void run() {
+ Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+ Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
+ assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
+
+ // Audio playout has started and the client is informed about it.
+ doAudioTrackStateCallback(AUDIO_TRACK_START);
+
+ // Fixed size in bytes of each 10ms block of audio data that we ask for
+ // using callbacks to the native WebRTC client.
+ final int sizeInBytes = byteBuffer.capacity();
+
+ while (keepAlive) {
+ // Get 10ms of PCM data from the native WebRTC client. Audio data is
+ // written into the common ByteBuffer using the address that was
+ // cached at construction.
+ nativeGetPlayoutData(nativeAudioTrack, sizeInBytes);
+ // Write data until all data has been written to the audio sink.
+ // Upon return, the buffer position will have been advanced to reflect
+ // the amount of data that was successfully written to the AudioTrack.
+ assertTrue(sizeInBytes <= byteBuffer.remaining());
+ if (speakerMute) {
+ byteBuffer.clear();
+ byteBuffer.put(emptyBytes);
+ byteBuffer.position(0);
+ }
+ int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
+ if (bytesWritten != sizeInBytes) {
+ Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
+ // If a write() returns a negative value, an error has occurred.
+ // Stop playing and report an error in this case.
+ if (bytesWritten < 0) {
+ keepAlive = false;
+ reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten);
+ }
+ }
+ if (useLowLatency) {
+ bufferManager.maybeAdjustBufferSize(audioTrack);
+ }
+ // The byte buffer must be rewinded since byteBuffer.position() is
+ // increased at each call to AudioTrack.write(). If we don't do this,
+ // next call to AudioTrack.write() will fail.
+ byteBuffer.rewind();
+
+ // TODO(henrika): it is possible to create a delay estimate here by
+ // counting number of written frames and subtracting the result from
+ // audioTrack.getPlaybackHeadPosition().
+ }
+ }
+
+ // Stops the inner thread loop which results in calling AudioTrack.stop().
+ // Does not block the calling thread.
+ public void stopThread() {
+ Logging.d(TAG, "stopThread");
+ keepAlive = false;
+ }
+ }
+
+ @CalledByNative
+ WebRtcAudioTrack(Context context, AudioManager audioManager) {
+ this(context, audioManager, null /* audioAttributes */, null /* errorCallback */,
+ null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */);
+ }
+
+ WebRtcAudioTrack(Context context, AudioManager audioManager,
+ @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback,
+ @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency,
+ boolean enableVolumeLogger) {
+ threadChecker.detachThread();
+ this.context = context;
+ this.audioManager = audioManager;
+ this.audioAttributes = audioAttributes;
+ this.errorCallback = errorCallback;
+ this.stateCallback = stateCallback;
+ this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null;
+ this.useLowLatency = useLowLatency;
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ @CalledByNative
+ public void setNativeAudioTrack(long nativeAudioTrack) {
+ this.nativeAudioTrack = nativeAudioTrack;
+ }
+
+ @CalledByNative
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG,
+ "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
+ + ", bufferSizeFactor=" + bufferSizeFactor + ")");
+ final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
+ byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
+ Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+ emptyBytes = new byte[byteBuffer.capacity()];
+ // Rather than passing the ByteBuffer with every callback (requiring
+ // the potentially expensive GetDirectBufferAddress) we simply have the
+ // the native class cache the address to the memory once.
+ nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer);
+
+ // Get the minimum buffer size required for the successful creation of an
+ // AudioTrack object to be created in the MODE_STREAM mode.
+ // Note that this size doesn't guarantee a smooth playback under load.
+ final int channelConfig = channelCountToConfiguration(channels);
+ final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig,
+ AudioFormat.ENCODING_PCM_16BIT)
+ * bufferSizeFactor);
+ Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes);
+ // For the streaming mode, data must be written to the audio sink in
+ // chunks of size (given by byteBuffer.capacity()) less than or equal
+ // to the total buffer size `minBufferSizeInBytes`. But, we have seen
+ // reports of "getMinBufferSize(): error querying hardware". Hence, it
+ // can happen that `minBufferSizeInBytes` contains an invalid value.
+ if (minBufferSizeInBytes < byteBuffer.capacity()) {
+ reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
+ return -1;
+ }
+
+ // Don't use low-latency mode when a bufferSizeFactor > 1 is used. When bufferSizeFactor > 1
+ // we want to use a larger buffer to prevent underruns. However, low-latency mode would
+ // decrease the buffer size, which makes the bufferSizeFactor have no effect.
+ if (bufferSizeFactor > 1.0) {
+ useLowLatency = false;
+ }
+
+ // Ensure that prevision audio session was stopped correctly before trying
+ // to create a new AudioTrack.
+ if (audioTrack != null) {
+ reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
+ return -1;
+ }
+ try {
+ // Create an AudioTrack object and initialize its associated audio buffer.
+ // The size of this buffer determines how long an AudioTrack can play
+ // before running out of data.
+ if (useLowLatency && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ // On API level 26 or higher, we can use a low latency mode.
+ audioTrack = createAudioTrackOnOreoOrHigher(
+ sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
+ } else {
+ // As we are on API level 21 or higher, it is possible to use a special AudioTrack
+ // constructor that uses AudioAttributes and AudioFormat as input. It allows us to
+ // supersede the notion of stream types for defining the behavior of audio playback,
+ // and to allow certain platforms or routing policies to use this information for more
+ // refined volume or routing decisions.
+ audioTrack = createAudioTrackBeforeOreo(
+ sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
+ }
+ } catch (IllegalArgumentException e) {
+ reportWebRtcAudioTrackInitError(e.getMessage());
+ releaseAudioResources();
+ return -1;
+ }
+
+ // It can happen that an AudioTrack is created but it was not successfully
+ // initialized upon creation. Seems to be the case e.g. when the maximum
+ // number of globally available audio tracks is exceeded.
+ if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
+ reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
+ releaseAudioResources();
+ return -1;
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ initialBufferSizeInFrames = audioTrack.getBufferSizeInFrames();
+ } else {
+ initialBufferSizeInFrames = -1;
+ }
+ logMainParameters();
+ logMainParametersExtended();
+ return minBufferSizeInBytes;
+ }
+
+ @CalledByNative
+ private boolean startPlayout() {
+ threadChecker.checkIsOnValidThread();
+ if (volumeLogger != null) {
+ volumeLogger.start();
+ }
+ Logging.d(TAG, "startPlayout");
+ assertTrue(audioTrack != null);
+ assertTrue(audioThread == null);
+
+ // Starts playing an audio track.
+ try {
+ audioTrack.play();
+ } catch (IllegalStateException e) {
+ reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
+ "AudioTrack.play failed: " + e.getMessage());
+ releaseAudioResources();
+ return false;
+ }
+ if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
+ reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
+ "AudioTrack.play failed - incorrect state :" + audioTrack.getPlayState());
+ releaseAudioResources();
+ return false;
+ }
+
+ // Create and start new high-priority thread which calls AudioTrack.write()
+ // and where we also call the native nativeGetPlayoutData() callback to
+ // request decoded audio from WebRTC.
+ audioThread = new AudioTrackThread("AudioTrackJavaThread");
+ audioThread.start();
+ return true;
+ }
+
+ @CalledByNative
+ private boolean stopPlayout() {
+ threadChecker.checkIsOnValidThread();
+ if (volumeLogger != null) {
+ volumeLogger.stop();
+ }
+ Logging.d(TAG, "stopPlayout");
+ assertTrue(audioThread != null);
+ logUnderrunCount();
+ audioThread.stopThread();
+
+ Logging.d(TAG, "Stopping the AudioTrackThread...");
+ audioThread.interrupt();
+ if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
+ Logging.e(TAG, "Join of AudioTrackThread timed out.");
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ }
+ Logging.d(TAG, "AudioTrackThread has now been stopped.");
+ audioThread = null;
+ if (audioTrack != null) {
+ Logging.d(TAG, "Calling AudioTrack.stop...");
+ try {
+ audioTrack.stop();
+ Logging.d(TAG, "AudioTrack.stop is done.");
+ doAudioTrackStateCallback(AUDIO_TRACK_STOP);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
+ }
+ }
+ releaseAudioResources();
+ return true;
+ }
+
+ // Get max possible volume index for a phone call audio stream.
+ @CalledByNative
+ private int getStreamMaxVolume() {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "getStreamMaxVolume");
+ return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ // Set current volume level for a phone call audio stream.
+ @CalledByNative
+ private boolean setStreamVolume(int volume) {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "setStreamVolume(" + volume + ")");
+ if (audioManager.isVolumeFixed()) {
+ Logging.e(TAG, "The device implements a fixed volume policy.");
+ return false;
+ }
+ audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
+ return true;
+ }
+
+ /** Get current volume level for a phone call audio stream. */
+ @CalledByNative
+ private int getStreamVolume() {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "getStreamVolume");
+ return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ @CalledByNative
+ private int GetPlayoutUnderrunCount() {
+ if (Build.VERSION.SDK_INT >= 24) {
+ if (audioTrack != null) {
+ return audioTrack.getUnderrunCount();
+ } else {
+ return -1;
+ }
+ } else {
+ return -2;
+ }
+ }
+
+ private void logMainParameters() {
+ Logging.d(TAG,
+ "AudioTrack: "
+ + "session ID: " + audioTrack.getAudioSessionId() + ", "
+ + "channels: " + audioTrack.getChannelCount() + ", "
+ + "sample rate: " + audioTrack.getSampleRate()
+ + ", "
+ // Gain (>=1.0) expressed as linear multiplier on sample values.
+ + "max gain: " + AudioTrack.getMaxVolume());
+ }
+
+ private static void logNativeOutputSampleRate(int requestedSampleRateInHz) {
+ final int nativeOutputSampleRate =
+ AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
+ Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
+ if (requestedSampleRateInHz != nativeOutputSampleRate) {
+ Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
+ }
+ }
+
+ private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes overrideAttributes) {
+ AudioAttributes.Builder attributesBuilder =
+ new AudioAttributes.Builder()
+ .setUsage(DEFAULT_USAGE)
+ .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH);
+
+ if (overrideAttributes != null) {
+ if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) {
+ attributesBuilder.setUsage(overrideAttributes.getUsage());
+ }
+ if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) {
+ attributesBuilder.setContentType(overrideAttributes.getContentType());
+ }
+
+ attributesBuilder.setFlags(overrideAttributes.getFlags());
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ attributesBuilder = applyAttributesOnQOrHigher(attributesBuilder, overrideAttributes);
+ }
+ }
+ return attributesBuilder.build();
+ }
+
+ // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
+ // It allows certain platforms or routing policies to use this information for more
+ // refined volume or routing decisions.
+ private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig,
+ int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
+ Logging.d(TAG, "createAudioTrackBeforeOreo");
+ logNativeOutputSampleRate(sampleRateInHz);
+
+ // Create an audio track where the audio usage is for VoIP and the content type is speech.
+ return new AudioTrack(getAudioAttributes(overrideAttributes),
+ new AudioFormat.Builder()
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .setSampleRate(sampleRateInHz)
+ .setChannelMask(channelConfig)
+ .build(),
+ bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
+ }
+
+ // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
+ // Use the low-latency mode to improve audio latency. Note that the low-latency mode may
+ // prevent effects (such as AEC) from working. Assuming AEC is working, the delay changes
+ // that happen in low-latency mode during the call will cause the AEC to perform worse.
+ // The behavior of the low-latency mode may be device dependent, use at your own risk.
+ @TargetApi(Build.VERSION_CODES.O)
+ private static AudioTrack createAudioTrackOnOreoOrHigher(int sampleRateInHz, int channelConfig,
+ int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
+ Logging.d(TAG, "createAudioTrackOnOreoOrHigher");
+ logNativeOutputSampleRate(sampleRateInHz);
+
+ // Create an audio track where the audio usage is for VoIP and the content type is speech.
+ return new AudioTrack.Builder()
+ .setAudioAttributes(getAudioAttributes(overrideAttributes))
+ .setAudioFormat(new AudioFormat.Builder()
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .setSampleRate(sampleRateInHz)
+ .setChannelMask(channelConfig)
+ .build())
+ .setBufferSizeInBytes(bufferSizeInBytes)
+ .setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY)
+ .setTransferMode(AudioTrack.MODE_STREAM)
+ .setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE)
+ .build();
+ }
+
+ @TargetApi(Build.VERSION_CODES.Q)
+ private static AudioAttributes.Builder applyAttributesOnQOrHigher(
+ AudioAttributes.Builder builder, AudioAttributes overrideAttributes) {
+ return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy());
+ }
+
+ private void logBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ Logging.d(TAG,
+ "AudioTrack: "
+ // The effective size of the AudioTrack buffer that the app writes to.
+ + "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
+ }
+ }
+
+ @CalledByNative
+ private int getBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ return audioTrack.getBufferSizeInFrames();
+ }
+ return -1;
+ }
+
+ @CalledByNative
+ private int getInitialBufferSizeInFrames() {
+ return initialBufferSizeInFrames;
+ }
+
+ private void logBufferCapacityInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ Logging.d(TAG,
+ "AudioTrack: "
+ // Maximum size of the AudioTrack buffer in frames.
+ + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames());
+ }
+ }
+
+ private void logMainParametersExtended() {
+ logBufferSizeInFrames();
+ logBufferCapacityInFrames();
+ }
+
+ // Prints the number of underrun occurrences in the application-level write
+ // buffer since the AudioTrack was created. An underrun occurs if the app does
+ // not write audio data quickly enough, causing the buffer to underflow and a
+ // potential audio glitch.
+ // TODO(henrika): keep track of this value in the field and possibly add new
+ // UMA stat if needed.
+ private void logUnderrunCount() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
+ }
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ private int channelCountToConfiguration(int channels) {
+ return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+ }
+
+ private static native void nativeCacheDirectBufferAddress(
+ long nativeAudioTrackJni, ByteBuffer byteBuffer);
+ private static native void nativeGetPlayoutData(long nativeAudioTrackJni, int bytes);
+
+ // Sets all samples to be played out to zero if `mute` is true, i.e.,
+ // ensures that the speaker is muted.
+ public void setSpeakerMute(boolean mute) {
+ Logging.w(TAG, "setSpeakerMute(" + mute + ")");
+ speakerMute = mute;
+ }
+
+ // Releases the native AudioTrack resources.
+ private void releaseAudioResources() {
+ Logging.d(TAG, "releaseAudioResources");
+ if (audioTrack != null) {
+ audioTrack.release();
+ audioTrack = null;
+ }
+ }
+
+ private void reportWebRtcAudioTrackInitError(String errorMessage) {
+ Logging.e(TAG, "Init playout error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackInitError(errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioTrackStartError(
+ AudioTrackStartErrorCode errorCode, String errorMessage) {
+ Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioTrackError(String errorMessage) {
+ Logging.e(TAG, "Run-time playback error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackError(errorMessage);
+ }
+ }
+
+ private void doAudioTrackStateCallback(int audioState) {
+ Logging.d(TAG, "doAudioTrackStateCallback: " + audioState);
+ if (stateCallback != null) {
+ if (audioState == WebRtcAudioTrack.AUDIO_TRACK_START) {
+ stateCallback.onWebRtcAudioTrackStart();
+ } else if (audioState == WebRtcAudioTrack.AUDIO_TRACK_STOP) {
+ stateCallback.onWebRtcAudioTrackStop();
+ } else {
+ Logging.e(TAG, "Invalid audio state");
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
new file mode 100644
index 0000000000..7b4b809ab1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
@@ -0,0 +1,308 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import static android.media.AudioManager.MODE_IN_CALL;
+import static android.media.AudioManager.MODE_IN_COMMUNICATION;
+import static android.media.AudioManager.MODE_NORMAL;
+import static android.media.AudioManager.MODE_RINGTONE;
+
+import android.annotation.SuppressLint;
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import java.lang.Thread;
+import java.util.Arrays;
+import org.webrtc.Logging;
+
+final class WebRtcAudioUtils {
+ private static final String TAG = "WebRtcAudioUtilsExternal";
+
+ // Helper method for building a string of thread information.
+ public static String getThreadInfo() {
+ return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ + "]";
+ }
+
+ // Returns true if we're running on emulator.
+ public static boolean runningOnEmulator() {
+ return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
+ }
+
+ // Information about the current build, taken from system properties.
+ static void logDeviceInfo(String tag) {
+ Logging.d(tag,
+ "Android SDK: " + Build.VERSION.SDK_INT + ", "
+ + "Release: " + Build.VERSION.RELEASE + ", "
+ + "Brand: " + Build.BRAND + ", "
+ + "Device: " + Build.DEVICE + ", "
+ + "Id: " + Build.ID + ", "
+ + "Hardware: " + Build.HARDWARE + ", "
+ + "Manufacturer: " + Build.MANUFACTURER + ", "
+ + "Model: " + Build.MODEL + ", "
+ + "Product: " + Build.PRODUCT);
+ }
+
+ // Logs information about the current audio state. The idea is to call this
+ // method when errors are detected to log under what conditions the error
+ // occurred. Hopefully it will provide clues to what might be the root cause.
+ static void logAudioState(String tag, Context context, AudioManager audioManager) {
+ logDeviceInfo(tag);
+ logAudioStateBasic(tag, context, audioManager);
+ logAudioStateVolume(tag, audioManager);
+ logAudioDeviceInfo(tag, audioManager);
+ }
+
+ // Converts AudioDeviceInfo types to local string representation.
+ static String deviceTypeToString(int type) {
+ switch (type) {
+ case AudioDeviceInfo.TYPE_UNKNOWN:
+ return "TYPE_UNKNOWN";
+ case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
+ return "TYPE_BUILTIN_EARPIECE";
+ case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+ return "TYPE_BUILTIN_SPEAKER";
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ return "TYPE_WIRED_HEADSET";
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ return "TYPE_WIRED_HEADPHONES";
+ case AudioDeviceInfo.TYPE_LINE_ANALOG:
+ return "TYPE_LINE_ANALOG";
+ case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+ return "TYPE_LINE_DIGITAL";
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ return "TYPE_BLUETOOTH_SCO";
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ return "TYPE_BLUETOOTH_A2DP";
+ case AudioDeviceInfo.TYPE_HDMI:
+ return "TYPE_HDMI";
+ case AudioDeviceInfo.TYPE_HDMI_ARC:
+ return "TYPE_HDMI_ARC";
+ case AudioDeviceInfo.TYPE_USB_DEVICE:
+ return "TYPE_USB_DEVICE";
+ case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+ return "TYPE_USB_ACCESSORY";
+ case AudioDeviceInfo.TYPE_DOCK:
+ return "TYPE_DOCK";
+ case AudioDeviceInfo.TYPE_FM:
+ return "TYPE_FM";
+ case AudioDeviceInfo.TYPE_BUILTIN_MIC:
+ return "TYPE_BUILTIN_MIC";
+ case AudioDeviceInfo.TYPE_FM_TUNER:
+ return "TYPE_FM_TUNER";
+ case AudioDeviceInfo.TYPE_TV_TUNER:
+ return "TYPE_TV_TUNER";
+ case AudioDeviceInfo.TYPE_TELEPHONY:
+ return "TYPE_TELEPHONY";
+ case AudioDeviceInfo.TYPE_AUX_LINE:
+ return "TYPE_AUX_LINE";
+ case AudioDeviceInfo.TYPE_IP:
+ return "TYPE_IP";
+ case AudioDeviceInfo.TYPE_BUS:
+ return "TYPE_BUS";
+ case AudioDeviceInfo.TYPE_USB_HEADSET:
+ return "TYPE_USB_HEADSET";
+ default:
+ return "TYPE_UNKNOWN";
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ public static String audioSourceToString(int source) {
+ // AudioSource.UNPROCESSED requires API level 29. Use local define instead.
+ final int VOICE_PERFORMANCE = 10;
+ switch (source) {
+ case AudioSource.DEFAULT:
+ return "DEFAULT";
+ case AudioSource.MIC:
+ return "MIC";
+ case AudioSource.VOICE_UPLINK:
+ return "VOICE_UPLINK";
+ case AudioSource.VOICE_DOWNLINK:
+ return "VOICE_DOWNLINK";
+ case AudioSource.VOICE_CALL:
+ return "VOICE_CALL";
+ case AudioSource.CAMCORDER:
+ return "CAMCORDER";
+ case AudioSource.VOICE_RECOGNITION:
+ return "VOICE_RECOGNITION";
+ case AudioSource.VOICE_COMMUNICATION:
+ return "VOICE_COMMUNICATION";
+ case AudioSource.UNPROCESSED:
+ return "UNPROCESSED";
+ case VOICE_PERFORMANCE:
+ return "VOICE_PERFORMANCE";
+ default:
+ return "INVALID";
+ }
+ }
+
+ public static String channelMaskToString(int mask) {
+ // For input or AudioRecord, the mask should be AudioFormat#CHANNEL_IN_MONO or
+ // AudioFormat#CHANNEL_IN_STEREO. AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all
+ // devices.
+ switch (mask) {
+ case AudioFormat.CHANNEL_IN_STEREO:
+ return "IN_STEREO";
+ case AudioFormat.CHANNEL_IN_MONO:
+ return "IN_MONO";
+ default:
+ return "INVALID";
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ public static String audioEncodingToString(int enc) {
+ switch (enc) {
+ case AudioFormat.ENCODING_INVALID:
+ return "INVALID";
+ case AudioFormat.ENCODING_PCM_16BIT:
+ return "PCM_16BIT";
+ case AudioFormat.ENCODING_PCM_8BIT:
+ return "PCM_8BIT";
+ case AudioFormat.ENCODING_PCM_FLOAT:
+ return "PCM_FLOAT";
+ case AudioFormat.ENCODING_AC3:
+ return "AC3";
+ case AudioFormat.ENCODING_E_AC3:
+ return "AC3";
+ case AudioFormat.ENCODING_DTS:
+ return "DTS";
+ case AudioFormat.ENCODING_DTS_HD:
+ return "DTS_HD";
+ case AudioFormat.ENCODING_MP3:
+ return "MP3";
+ default:
+ return "Invalid encoding: " + enc;
+ }
+ }
+
+ // Reports basic audio statistics.
+ private static void logAudioStateBasic(String tag, Context context, AudioManager audioManager) {
+ Logging.d(tag,
+ "Audio State: "
+ + "audio mode: " + modeToString(audioManager.getMode()) + ", "
+ + "has mic: " + hasMicrophone(context) + ", "
+ + "mic muted: " + audioManager.isMicrophoneMute() + ", "
+ + "music active: " + audioManager.isMusicActive() + ", "
+ + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", "
+ + "BT SCO: " + audioManager.isBluetoothScoOn());
+ }
+
+ // Adds volume information for all possible stream types.
+ private static void logAudioStateVolume(String tag, AudioManager audioManager) {
+ final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
+ AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION,
+ AudioManager.STREAM_SYSTEM};
+ Logging.d(tag, "Audio State: ");
+ // Some devices may not have volume controls and might use a fixed volume.
+ boolean fixedVolume = audioManager.isVolumeFixed();
+ Logging.d(tag, " fixed volume=" + fixedVolume);
+ if (!fixedVolume) {
+ for (int stream : streams) {
+ StringBuilder info = new StringBuilder();
+ info.append(" " + streamTypeToString(stream) + ": ");
+ info.append("volume=").append(audioManager.getStreamVolume(stream));
+ info.append(", max=").append(audioManager.getStreamMaxVolume(stream));
+ logIsStreamMute(tag, audioManager, stream, info);
+ Logging.d(tag, info.toString());
+ }
+ }
+ }
+
+ private static void logIsStreamMute(
+ String tag, AudioManager audioManager, int stream, StringBuilder info) {
+ if (Build.VERSION.SDK_INT >= 23) {
+ info.append(", muted=").append(audioManager.isStreamMute(stream));
+ }
+ }
+
+ // Moz linting complains even though AudioManager.GET_DEVICES_ALL is
+ // listed in the docs here:
+ // https://developer.android.com/reference/android/media/AudioManager#GET_DEVICES_ALL
+ @SuppressLint("WrongConstant")
+ private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
+ if (Build.VERSION.SDK_INT < 23) {
+ return;
+ }
+ final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ if (devices.length == 0) {
+ return;
+ }
+ Logging.d(tag, "Audio Devices: ");
+ for (AudioDeviceInfo device : devices) {
+ StringBuilder info = new StringBuilder();
+ info.append(" ").append(deviceTypeToString(device.getType()));
+ info.append(device.isSource() ? "(in): " : "(out): ");
+ // An empty array indicates that the device supports arbitrary channel counts.
+ if (device.getChannelCounts().length > 0) {
+ info.append("channels=").append(Arrays.toString(device.getChannelCounts()));
+ info.append(", ");
+ }
+ if (device.getEncodings().length > 0) {
+ // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4.
+ info.append("encodings=").append(Arrays.toString(device.getEncodings()));
+ info.append(", ");
+ }
+ if (device.getSampleRates().length > 0) {
+ info.append("sample rates=").append(Arrays.toString(device.getSampleRates()));
+ info.append(", ");
+ }
+ info.append("id=").append(device.getId());
+ Logging.d(tag, info.toString());
+ }
+ }
+
+ // Converts media.AudioManager modes into local string representation.
+ static String modeToString(int mode) {
+ switch (mode) {
+ case MODE_IN_CALL:
+ return "MODE_IN_CALL";
+ case MODE_IN_COMMUNICATION:
+ return "MODE_IN_COMMUNICATION";
+ case MODE_NORMAL:
+ return "MODE_NORMAL";
+ case MODE_RINGTONE:
+ return "MODE_RINGTONE";
+ default:
+ return "MODE_INVALID";
+ }
+ }
+
+ private static String streamTypeToString(int stream) {
+ switch (stream) {
+ case AudioManager.STREAM_VOICE_CALL:
+ return "STREAM_VOICE_CALL";
+ case AudioManager.STREAM_MUSIC:
+ return "STREAM_MUSIC";
+ case AudioManager.STREAM_RING:
+ return "STREAM_RING";
+ case AudioManager.STREAM_ALARM:
+ return "STREAM_ALARM";
+ case AudioManager.STREAM_NOTIFICATION:
+ return "STREAM_NOTIFICATION";
+ case AudioManager.STREAM_SYSTEM:
+ return "STREAM_SYSTEM";
+ default:
+ return "STREAM_INVALID";
+ }
+ }
+
+ // Returns true if the device can record audio via a microphone.
+ private static boolean hasMicrophone(Context context) {
+ return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE);
+ }
+}