summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk/android/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /third_party/libwebrtc/sdk/android/src
parentInitial commit. (diff)
downloadfirefox-esr-upstream.tar.xz
firefox-esr-upstream.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esrupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/sdk/android/src')
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java684
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java38
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java31
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java29
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java33
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java340
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java428
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java458
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java72
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java98
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java365
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java271
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java17
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java26
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java281
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java52
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java763
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java39
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java28
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java23
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java129
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java139
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java55
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java22
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java115
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java73
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java69
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java99
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java53
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java51
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java63
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java29
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java27
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java46
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java27
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java110
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java81
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java83
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java227
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java122
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java743
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java585
-rw-r--r--third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java308
43 files changed, 7332 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java
new file mode 100644
index 0000000000..ad40898e4c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java
@@ -0,0 +1,684 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaFormat;
+import android.os.SystemClock;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.ThreadUtils.ThreadChecker;
+
+/**
+ * Android hardware video decoder.
+ */
+@SuppressWarnings("deprecation")
+// Cannot support API 16 without using deprecated methods.
+// TODO(sakal): Rename to MediaCodecVideoDecoder once the deprecated implementation is removed.
+class AndroidVideoDecoder implements VideoDecoder, VideoSink {
+ private static final String TAG = "AndroidVideoDecoder";
+
+ // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
+ private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
+ private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
+ private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
+ private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
+ private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
+ private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
+
+ // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
+ // this timeout.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+
+ // WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a
+ // long timeout (500 ms) to prevent this from causing the codec to return an error.
+ private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
+
+ // Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds).
+ // If this timeout is exceeded, the output thread will unblock and check if the decoder is still
+ // running. If it is, it will block on dequeue again. Otherwise, it will stop and release the
+ // MediaCodec.
+ private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
+
+ private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
+ private final String codecName;
+ private final VideoCodecMimeType codecType;
+
+ private static class FrameInfo {
+ final long decodeStartTimeMs;
+ final int rotation;
+
+ FrameInfo(long decodeStartTimeMs, int rotation) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.rotation = rotation;
+ }
+ }
+
+ private final BlockingDeque<FrameInfo> frameInfos;
+ private int colorFormat;
+
+ // Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats
+ // those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder
+ // thread and is immutable while the codec is running.
+ @Nullable private Thread outputThread;
+
+ // Checker that ensures work is run on the output thread.
+ private ThreadChecker outputThreadChecker;
+
+ // Checker that ensures work is run on the decoder thread. The decoder thread is owned by the
+ // caller and must be used to call initDecode, decode, and release.
+ private ThreadChecker decoderThreadChecker;
+
+ private volatile boolean running;
+ @Nullable private volatile Exception shutdownException;
+
+ // Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
+ // or the output thread. Accesses should be protected with this lock.
+ private final Object dimensionLock = new Object();
+ private int width;
+ private int height;
+ private int stride;
+ private int sliceHeight;
+
+ // Whether the decoder has finished the first frame. The codec may not change output dimensions
+ // after delivering the first frame. Only accessed on the output thread while the decoder is
+ // running.
+ private boolean hasDecodedFirstFrame;
+ // Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed
+ // on the decoder thread.
+ private boolean keyFrameRequired;
+
+ private final @Nullable EglBase.Context sharedContext;
+ // Valid and immutable while the decoder is running.
+ @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable private Surface surface;
+
+ private static class DecodedTextureMetadata {
+ final long presentationTimestampUs;
+ final Integer decodeTimeMs;
+
+ DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) {
+ this.presentationTimestampUs = presentationTimestampUs;
+ this.decodeTimeMs = decodeTimeMs;
+ }
+ }
+
+ // Metadata for the last frame rendered to the texture.
+ private final Object renderedTextureMetadataLock = new Object();
+ @Nullable private DecodedTextureMetadata renderedTextureMetadata;
+
+ // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid
+ // and immutable while the decoder is running.
+ @Nullable private Callback callback;
+
+ // Valid and immutable while the decoder is running.
+ @Nullable private MediaCodecWrapper codec;
+
+ AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) {
+ if (!isSupportedColorFormat(colorFormat)) {
+ throw new IllegalArgumentException("Unsupported color format: " + colorFormat);
+ }
+ Logging.d(TAG,
+ "ctor name: " + codecName + " type: " + codecType + " color format: " + colorFormat
+ + " context: " + sharedContext);
+ this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
+ this.codecName = codecName;
+ this.codecType = codecType;
+ this.colorFormat = colorFormat;
+ this.sharedContext = sharedContext;
+ this.frameInfos = new LinkedBlockingDeque<>();
+ }
+
+ @Override
+ public VideoCodecStatus initDecode(Settings settings, Callback callback) {
+ this.decoderThreadChecker = new ThreadChecker();
+
+ this.callback = callback;
+ if (sharedContext != null) {
+ surfaceTextureHelper = createSurfaceTextureHelper();
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ surfaceTextureHelper.startListening(this);
+ }
+ return initDecodeInternal(settings.width, settings.height);
+ }
+
+ // Internal variant is used when restarting the codec due to reconfiguration.
+ private VideoCodecStatus initDecodeInternal(int width, int height) {
+ decoderThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG,
+ "initDecodeInternal name: " + codecName + " type: " + codecType + " width: " + width
+ + " height: " + height);
+ if (outputThread != null) {
+ Logging.e(TAG, "initDecodeInternal called while the codec is already running");
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ // Note: it is not necessary to initialize dimensions under the lock, since the output thread
+ // is not running.
+ this.width = width;
+ this.height = height;
+
+ stride = width;
+ sliceHeight = height;
+ hasDecodedFirstFrame = false;
+ keyFrameRequired = true;
+
+ try {
+ codec = mediaCodecWrapperFactory.createByCodecName(codecName);
+ } catch (IOException | IllegalArgumentException | IllegalStateException e) {
+ Logging.e(TAG, "Cannot create media decoder " + codecName);
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
+ if (sharedContext == null) {
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ }
+ codec.configure(format, surface, null, 0);
+ codec.start();
+ } catch (IllegalStateException | IllegalArgumentException e) {
+ Logging.e(TAG, "initDecode failed", e);
+ release();
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+ running = true;
+ outputThread = createOutputThread();
+ outputThread.start();
+
+ Logging.d(TAG, "initDecodeInternal done");
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
+ decoderThreadChecker.checkIsOnValidThread();
+ if (codec == null || callback == null) {
+ Logging.d(TAG, "decode uninitalized, codec: " + (codec != null) + ", callback: " + callback);
+ return VideoCodecStatus.UNINITIALIZED;
+ }
+
+ if (frame.buffer == null) {
+ Logging.e(TAG, "decode() - no input data");
+ return VideoCodecStatus.ERR_PARAMETER;
+ }
+
+ int size = frame.buffer.remaining();
+ if (size == 0) {
+ Logging.e(TAG, "decode() - input buffer empty");
+ return VideoCodecStatus.ERR_PARAMETER;
+ }
+
+ // Load dimensions from shared memory under the dimension lock.
+ final int width;
+ final int height;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ }
+
+ // Check if the resolution changed and reset the codec if necessary.
+ if (frame.encodedWidth * frame.encodedHeight > 0
+ && (frame.encodedWidth != width || frame.encodedHeight != height)) {
+ VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ }
+
+ if (keyFrameRequired) {
+ // Need to process a key frame first.
+ if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
+ Logging.e(TAG, "decode() - key frame required first");
+ return VideoCodecStatus.NO_OUTPUT;
+ }
+ }
+
+ int index;
+ try {
+ index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueInputBuffer failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ if (index < 0) {
+ // Decoder is falling behind. No input buffers available.
+ // The decoder can't simply drop frames; it might lose a key frame.
+ Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind");
+ return VideoCodecStatus.ERROR;
+ }
+
+ ByteBuffer buffer;
+ try {
+ buffer = codec.getInputBuffer(index);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (buffer.capacity() < size) {
+ Logging.e(TAG, "decode() - HW buffer too small");
+ return VideoCodecStatus.ERROR;
+ }
+ buffer.put(frame.buffer);
+
+ frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
+ try {
+ codec.queueInputBuffer(index, 0 /* offset */, size,
+ TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "queueInputBuffer failed", e);
+ frameInfos.pollLast();
+ return VideoCodecStatus.ERROR;
+ }
+ if (keyFrameRequired) {
+ keyFrameRequired = false;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return codecName;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ // TODO(sakal): This is not called on the correct thread but is still called synchronously.
+ // Re-enable the check once this is called on the correct thread.
+ // decoderThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "release");
+ VideoCodecStatus status = releaseInternal();
+ if (surface != null) {
+ releaseSurface();
+ surface = null;
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper.dispose();
+ surfaceTextureHelper = null;
+ }
+ synchronized (renderedTextureMetadataLock) {
+ renderedTextureMetadata = null;
+ }
+ callback = null;
+ frameInfos.clear();
+ return status;
+ }
+
+ // Internal variant is used when restarting the codec due to reconfiguration.
+ private VideoCodecStatus releaseInternal() {
+ if (!running) {
+ Logging.d(TAG, "release: Decoder is not running.");
+ return VideoCodecStatus.OK;
+ }
+ try {
+ // The outputThread actually stops and releases the codec once running is false.
+ running = false;
+ if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ // Log an exception to capture the stack trace and turn it into a TIMEOUT error.
+ Logging.e(TAG, "Media decoder release timeout", new RuntimeException());
+ return VideoCodecStatus.TIMEOUT;
+ }
+ if (shutdownException != null) {
+ // Log the exception and turn it into an error. Wrap the exception in a new exception to
+ // capture both the output thread's stack trace and this thread's stack trace.
+ Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException));
+ shutdownException = null;
+ return VideoCodecStatus.ERROR;
+ }
+ } finally {
+ codec = null;
+ outputThread = null;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
+ decoderThreadChecker.checkIsOnValidThread();
+ VideoCodecStatus status = releaseInternal();
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ return initDecodeInternal(newWidth, newHeight);
+ }
+
+ private Thread createOutputThread() {
+ return new Thread("AndroidVideoDecoder.outputThread") {
+ @Override
+ public void run() {
+ outputThreadChecker = new ThreadChecker();
+ while (running) {
+ deliverDecodedFrame();
+ }
+ releaseCodecOnOutputThread();
+ }
+ };
+ }
+
+ // Visible for testing.
+ protected void deliverDecodedFrame() {
+ outputThreadChecker.checkIsOnValidThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ // Block until an output buffer is available (up to 100 milliseconds). If the timeout is
+ // exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
+ // thread's loop. Blocking here prevents the output thread from busy-waiting while the codec
+ // is idle.
+ int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
+ if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ reformat(codec.getOutputFormat());
+ return;
+ }
+
+ if (index < 0) {
+ Logging.v(TAG, "dequeueOutputBuffer returned " + index);
+ return;
+ }
+
+ FrameInfo frameInfo = frameInfos.poll();
+ Integer decodeTimeMs = null;
+ int rotation = 0;
+ if (frameInfo != null) {
+ decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
+ rotation = frameInfo.rotation;
+ }
+
+ hasDecodedFirstFrame = true;
+
+ if (surfaceTextureHelper != null) {
+ deliverTextureFrame(index, info, rotation, decodeTimeMs);
+ } else {
+ deliverByteFrame(index, info, rotation, decodeTimeMs);
+ }
+
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "deliverDecodedFrame failed", e);
+ }
+ }
+
+ private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
+ final int rotation, final Integer decodeTimeMs) {
+ // Load dimensions from shared memory under the dimension lock.
+ final int width;
+ final int height;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ }
+
+ synchronized (renderedTextureMetadataLock) {
+ if (renderedTextureMetadata != null) {
+ codec.releaseOutputBuffer(index, false);
+ return; // We are still waiting for texture for the previous frame, drop this one.
+ }
+ surfaceTextureHelper.setTextureSize(width, height);
+ surfaceTextureHelper.setFrameRotation(rotation);
+ renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs);
+ codec.releaseOutputBuffer(index, /* render= */ true);
+ }
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ final VideoFrame newFrame;
+ final Integer decodeTimeMs;
+ final long timestampNs;
+ synchronized (renderedTextureMetadataLock) {
+ if (renderedTextureMetadata == null) {
+ throw new IllegalStateException(
+ "Rendered texture metadata was null in onTextureFrameAvailable.");
+ }
+ timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000;
+ decodeTimeMs = renderedTextureMetadata.decodeTimeMs;
+ renderedTextureMetadata = null;
+ }
+ // Change timestamp of frame.
+ final VideoFrame frameWithModifiedTimeStamp =
+ new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs);
+ callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */);
+ }
+
+ private void deliverByteFrame(
+ int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) {
+ // Load dimensions from shared memory under the dimension lock.
+ int width;
+ int height;
+ int stride;
+ int sliceHeight;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ stride = this.stride;
+ sliceHeight = this.sliceHeight;
+ }
+
+ // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
+ // bytes for each of the U and V channels.
+ if (info.size < width * height * 3 / 2) {
+ Logging.e(TAG, "Insufficient output buffer size: " + info.size);
+ return;
+ }
+
+ if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
+ // Some codecs (Exynos) report an incorrect stride. Correct it here.
+ // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
+ // 2 * size / (3 * height).
+ stride = info.size * 2 / (height * 3);
+ }
+
+ ByteBuffer buffer = codec.getOutputBuffer(index);
+ buffer.position(info.offset);
+ buffer.limit(info.offset + info.size);
+ buffer = buffer.slice();
+
+ final VideoFrame.Buffer frameBuffer;
+ if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
+ frameBuffer = copyI420Buffer(buffer, stride, sliceHeight, width, height);
+ } else {
+ // All other supported color formats are NV12.
+ frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height);
+ }
+ codec.releaseOutputBuffer(index, /* render= */ false);
+
+ long presentationTimeNs = info.presentationTimeUs * 1000;
+ VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
+
+ // Note that qp is parsed on the C++ side.
+ callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
+ frame.release();
+ }
+
+ private VideoFrame.Buffer copyNV12ToI420Buffer(
+ ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
+ // toI420 copies the buffer.
+ return new NV12Buffer(width, height, stride, sliceHeight, buffer, null /* releaseCallback */)
+ .toI420();
+ }
+
+ private VideoFrame.Buffer copyI420Buffer(
+ ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
+ if (stride % 2 != 0) {
+ throw new AssertionError("Stride is not divisible by two: " + stride);
+ }
+
+ // Note that the case with odd `sliceHeight` is handled in a special way.
+ // The chroma height contained in the payload is rounded down instead of
+ // up, making it one row less than what we expect in WebRTC. Therefore, we
+ // have to duplicate the last chroma rows for this case. Also, the offset
+ // between the Y plane and the U plane is unintuitive for this case. See
+ // http://bugs.webrtc.org/6651 for more info.
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
+
+ final int uvStride = stride / 2;
+
+ final int yPos = 0;
+ final int yEnd = yPos + stride * height;
+ final int uPos = yPos + stride * sliceHeight;
+ final int uEnd = uPos + uvStride * chromaHeight;
+ final int vPos = uPos + uvStride * sliceHeight / 2;
+ final int vEnd = vPos + uvStride * chromaHeight;
+
+ VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height);
+
+ buffer.limit(yEnd);
+ buffer.position(yPos);
+ copyPlane(
+ buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height);
+
+ buffer.limit(uEnd);
+ buffer.position(uPos);
+ copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(),
+ chromaWidth, chromaHeight);
+ if (sliceHeight % 2 == 1) {
+ buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
+
+ ByteBuffer dataU = frameBuffer.getDataU();
+ dataU.position(frameBuffer.getStrideU() * chromaHeight); // Seek to beginning of last row.
+ dataU.put(buffer); // Copy the last row.
+ }
+
+ buffer.limit(vEnd);
+ buffer.position(vPos);
+ copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(),
+ chromaWidth, chromaHeight);
+ if (sliceHeight % 2 == 1) {
+ buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
+
+ ByteBuffer dataV = frameBuffer.getDataV();
+ dataV.position(frameBuffer.getStrideV() * chromaHeight); // Seek to beginning of last row.
+ dataV.put(buffer); // Copy the last row.
+ }
+
+ return frameBuffer;
+ }
+
+ private void reformat(MediaFormat format) {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Decoder format changed: " + format.toString());
+ final int newWidth;
+ final int newHeight;
+ if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM)
+ && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) {
+ newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT)
+ - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT);
+ newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM)
+ - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP);
+ } else {
+ newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
+ newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
+ }
+ // Compare to existing width, height, and save values under the dimension lock.
+ synchronized (dimensionLock) {
+ if (newWidth != width || newHeight != height) {
+ if (hasDecodedFirstFrame) {
+ stopOnOutputThread(new RuntimeException("Unexpected size change. "
+ + "Configured " + width + "*" + height + ". "
+ + "New " + newWidth + "*" + newHeight));
+ return;
+ } else if (newWidth <= 0 || newHeight <= 0) {
+ Logging.w(TAG,
+ "Unexpected format dimensions. Configured " + width + "*" + height + ". "
+ + "New " + newWidth + "*" + newHeight + ". Skip it");
+ return;
+ }
+ width = newWidth;
+ height = newHeight;
+ }
+ }
+
+ // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
+ // color format updates.
+ if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+ colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
+ if (!isSupportedColorFormat(colorFormat)) {
+ stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
+ return;
+ }
+ }
+
+ // Save stride and sliceHeight under the dimension lock.
+ synchronized (dimensionLock) {
+ if (format.containsKey(MEDIA_FORMAT_KEY_STRIDE)) {
+ stride = format.getInteger(MEDIA_FORMAT_KEY_STRIDE);
+ }
+ if (format.containsKey(MEDIA_FORMAT_KEY_SLICE_HEIGHT)) {
+ sliceHeight = format.getInteger(MEDIA_FORMAT_KEY_SLICE_HEIGHT);
+ }
+ Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+ stride = Math.max(width, stride);
+ sliceHeight = Math.max(height, sliceHeight);
+ }
+ }
+
+ private void releaseCodecOnOutputThread() {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Releasing MediaCodec on output thread");
+ try {
+ codec.stop();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder stop failed", e);
+ }
+ try {
+ codec.release();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ // Propagate exceptions caught during release back to the main thread.
+ shutdownException = e;
+ }
+ Logging.d(TAG, "Release on output thread done");
+ }
+
+ private void stopOnOutputThread(Exception e) {
+ outputThreadChecker.checkIsOnValidThread();
+ running = false;
+ shutdownException = e;
+ }
+
+ private boolean isSupportedColorFormat(int colorFormat) {
+ for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) {
+ if (supported == colorFormat) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // Visible for testing.
+ protected SurfaceTextureHelper createSurfaceTextureHelper() {
+ return SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
+ }
+
+ // Visible for testing.
+ // TODO(sakal): Remove once Robolectric commit fa991a0 has been rolled to WebRTC.
+ protected void releaseSurface() {
+ surface.release();
+ }
+
+ // Visible for testing.
+ protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) {
+ return JavaI420Buffer.allocate(width, height);
+ }
+
+ // Visible for testing.
+ protected void copyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
+ YuvHelper.copyPlane(src, srcStride, dst, dstStride, width, height);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
new file mode 100644
index 0000000000..3b5f5d2931
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
+class BaseBitrateAdjuster implements BitrateAdjuster {
+ protected int targetBitrateBps;
+ protected double targetFramerateFps;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ this.targetBitrateBps = targetBitrateBps;
+ this.targetFramerateFps = targetFramerateFps;
+ }
+
+ @Override
+ public void reportEncodedFrame(int size) {
+ // No op.
+ }
+
+ @Override
+ public int getAdjustedBitrateBps() {
+ return targetBitrateBps;
+ }
+
+ @Override
+ public double getAdjustedFramerateFps() {
+ return targetFramerateFps;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java
new file mode 100644
index 0000000000..bfa08bad89
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Object that adjusts the bitrate of a hardware codec. */
+interface BitrateAdjuster {
+ /**
+ * Sets the target bitrate in bits per second and framerate in frames per second.
+ */
+ void setTargets(int targetBitrateBps, double targetFramerateFps);
+
+ /**
+ * Should be used to report the size of an encoded frame to the bitrate adjuster. Use
+ * getAdjustedBitrateBps to get the updated bitrate after calling this method.
+ */
+ void reportEncodedFrame(int size);
+
+ /** Gets the current bitrate. */
+ int getAdjustedBitrateBps();
+
+ /** Gets the current framerate. */
+ double getAdjustedFramerateFps();
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java
new file mode 100644
index 0000000000..9b410ceaef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @CalledByNative is used by the JNI generator to create the necessary JNI
+ * bindings and expose this method to native code.
+ */
+@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
+@Retention(RetentionPolicy.CLASS)
+public @interface CalledByNative {
+ /*
+ * If present, tells which inner class the method belongs to.
+ */
+ public String value() default "";
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java
new file mode 100644
index 0000000000..8a00a7fadb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @CalledByNativeUnchecked is used to generate JNI bindings that do not check for exceptions.
+ * It only makes sense to use this annotation on methods that declare a throws... spec.
+ * However, note that the exception received native side maybe an 'unchecked' (RuntimeExpception)
+ * such as NullPointerException, so the native code should differentiate these cases.
+ * Usage of this should be very rare; where possible handle exceptions in the Java side and use a
+ * return value to indicate success / failure.
+ */
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.CLASS)
+public @interface CalledByNativeUnchecked {
+ /*
+ * If present, tells which inner class the method belongs to.
+ */
+ public String value() default "";
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
new file mode 100644
index 0000000000..a54f7201b2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.os.Handler;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+@SuppressWarnings("deprecation")
+class Camera1Session implements CameraSession {
+ private static final String TAG = "Camera1Session";
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+
+ private static final Histogram camera1StartTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
+ private static final Histogram camera1StopTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
+ private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
+ "WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
+
+ private static enum SessionState { RUNNING, STOPPED }
+
+ private final Handler cameraThreadHandler;
+ private final Events events;
+ private final boolean captureToTexture;
+ private final Context applicationContext;
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final int cameraId;
+ private final Camera camera;
+ private final Camera.CameraInfo info;
+ private final CaptureFormat captureFormat;
+ // Used only for stats. Only used on the camera thread.
+ private final long constructionTimeNs; // Construction time of this class.
+
+ private SessionState state;
+ private boolean firstFrameReported;
+
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ public static void create(final CreateSessionCallback callback, final Events events,
+ final boolean captureToTexture, final Context applicationContext,
+ final SurfaceTextureHelper surfaceTextureHelper, final String cameraName,
+ final int width, final int height, final int framerate) {
+ final long constructionTimeNs = System.nanoTime();
+ Logging.d(TAG, "Open camera " + cameraName);
+ events.onCameraOpening();
+
+ final int cameraId;
+ try {
+ cameraId = Camera1Enumerator.getCameraIndex(cameraName);
+ } catch (IllegalArgumentException e) {
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ final Camera camera;
+ try {
+ camera = Camera.open(cameraId);
+ } catch (RuntimeException e) {
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ if (camera == null) {
+ callback.onFailure(
+ FailureType.ERROR, "Camera.open returned null for camera id = " + cameraId);
+ return;
+ }
+
+ try {
+ camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
+ } catch (IOException | RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ final Camera.CameraInfo info = new Camera.CameraInfo();
+ Camera.getCameraInfo(cameraId, info);
+
+ final CaptureFormat captureFormat;
+ try {
+ final Camera.Parameters parameters = camera.getParameters();
+ captureFormat = findClosestCaptureFormat(parameters, width, height, framerate);
+ final Size pictureSize = findClosestPictureSize(parameters, width, height);
+ updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
+ } catch (RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ if (!captureToTexture) {
+ final int frameSize = captureFormat.frameSize();
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ camera.addCallbackBuffer(buffer.array());
+ }
+ }
+
+ // Calculate orientation manually and send it as CVO instead.
+ try {
+ camera.setDisplayOrientation(0 /* degrees */);
+ } catch (RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
+ surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
+ }
+
+ private static void updateCameraParameters(Camera camera, Camera.Parameters parameters,
+ CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) {
+ final List<String> focusModes = parameters.getSupportedFocusModes();
+
+ parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
+ parameters.setPreviewSize(captureFormat.width, captureFormat.height);
+ parameters.setPictureSize(pictureSize.width, pictureSize.height);
+ if (!captureToTexture) {
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ }
+
+ if (parameters.isVideoStabilizationSupported()) {
+ parameters.setVideoStabilization(true);
+ }
+ if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ }
+ camera.setParameters(parameters);
+ }
+
+ private static CaptureFormat findClosestCaptureFormat(
+ Camera.Parameters parameters, int width, int height, int framerate) {
+ // Find closest supported format for `width` x `height` @ `framerate`.
+ final List<CaptureFormat.FramerateRange> supportedFramerates =
+ Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
+ Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
+
+ final CaptureFormat.FramerateRange fpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
+
+ final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
+ CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
+
+ return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
+ }
+
+ private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) {
+ return CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
+ }
+
+ private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera,
+ Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) {
+ Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
+
+ this.cameraThreadHandler = new Handler();
+ this.events = events;
+ this.captureToTexture = captureToTexture;
+ this.applicationContext = applicationContext;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraId = cameraId;
+ this.camera = camera;
+ this.info = info;
+ this.captureFormat = captureFormat;
+ this.constructionTimeNs = constructionTimeNs;
+
+ surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
+
+ startCapturing();
+ }
+
+ @Override
+ public void stop() {
+ Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
+ checkIsOnCameraThread();
+ if (state != SessionState.STOPPED) {
+ final long stopStartTime = System.nanoTime();
+ stopInternal();
+ final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+ camera1StopTimeMsHistogram.addSample(stopTimeMs);
+ }
+ }
+
+ private void startCapturing() {
+ Logging.d(TAG, "Start capturing");
+ checkIsOnCameraThread();
+
+ state = SessionState.RUNNING;
+
+ camera.setErrorCallback(new Camera.ErrorCallback() {
+ @Override
+ public void onError(int error, Camera camera) {
+ String errorMessage;
+ if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
+ errorMessage = "Camera server died!";
+ } else {
+ errorMessage = "Camera error: " + error;
+ }
+ Logging.e(TAG, errorMessage);
+ stopInternal();
+ if (error == Camera.CAMERA_ERROR_EVICTED) {
+ events.onCameraDisconnected(Camera1Session.this);
+ } else {
+ events.onCameraError(Camera1Session.this, errorMessage);
+ }
+ }
+ });
+
+ if (captureToTexture) {
+ listenForTextureFrames();
+ } else {
+ listenForBytebufferFrames();
+ }
+ try {
+ camera.startPreview();
+ } catch (RuntimeException e) {
+ stopInternal();
+ events.onCameraError(this, e.getMessage());
+ }
+ }
+
+ private void stopInternal() {
+ Logging.d(TAG, "Stop internal");
+ checkIsOnCameraThread();
+ if (state == SessionState.STOPPED) {
+ Logging.d(TAG, "Camera is already stopped");
+ return;
+ }
+
+ state = SessionState.STOPPED;
+ surfaceTextureHelper.stopListening();
+ // Note: stopPreview or other driver code might deadlock. Deadlock in
+ // Camera._stopPreview(Native Method) has been observed on
+ // Nexus 5 (hammerhead), OS version LMY48I.
+ camera.stopPreview();
+ camera.release();
+ events.onCameraClosed(this);
+ Logging.d(TAG, "Stop done");
+ }
+
+ private void listenForTextureFrames() {
+ surfaceTextureHelper.startListening((VideoFrame frame) -> {
+ checkIsOnCameraThread();
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Texture frame captured but camera is no longer running.");
+ return;
+ }
+
+ if (!firstFrameReported) {
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera1StartTimeMsHistogram.addSample(startTimeMs);
+ firstFrameReported = true;
+ }
+
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ final VideoFrame modifiedFrame =
+ new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
+ (TextureBufferImpl) frame.getBuffer(),
+ /* mirror= */ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT,
+ /* rotation= */ 0),
+ /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
+ events.onFrameCaptured(Camera1Session.this, modifiedFrame);
+ modifiedFrame.release();
+ });
+ }
+
+ private void listenForBytebufferFrames() {
+ camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
+ @Override
+ public void onPreviewFrame(final byte[] data, Camera callbackCamera) {
+ checkIsOnCameraThread();
+
+ if (callbackCamera != camera) {
+ Logging.e(TAG, "Callback from a different camera. This should never happen.");
+ return;
+ }
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
+ return;
+ }
+
+ final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+
+ if (!firstFrameReported) {
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera1StartTimeMsHistogram.addSample(startTimeMs);
+ firstFrameReported = true;
+ }
+
+ VideoFrame.Buffer frameBuffer = new NV21Buffer(
+ data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> {
+ if (state == SessionState.RUNNING) {
+ camera.addCallbackBuffer(data);
+ }
+ }));
+ final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
+ events.onFrameCaptured(Camera1Session.this, frame);
+ frame.release();
+ }
+ });
+ }
+
+ private int getFrameOrientation() {
+ int rotation = CameraSession.getDeviceOrientation(applicationContext);
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+ rotation = 360 - rotation;
+ }
+ return (info.orientation + rotation) % 360;
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
new file mode 100644
index 0000000000..d5ee80c73e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java
@@ -0,0 +1,428 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Handler;
+import android.util.Range;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+class Camera2Session implements CameraSession {
+ private static final String TAG = "Camera2Session";
+
+ private static final Histogram camera2StartTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
+ private static final Histogram camera2StopTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
+ private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
+ "WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
+
+ private static enum SessionState { RUNNING, STOPPED }
+
+ private final Handler cameraThreadHandler;
+ private final CreateSessionCallback callback;
+ private final Events events;
+ private final Context applicationContext;
+ private final CameraManager cameraManager;
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final String cameraId;
+ private final int width;
+ private final int height;
+ private final int framerate;
+
+ // Initialized at start
+ private CameraCharacteristics cameraCharacteristics;
+ private int cameraOrientation;
+ private boolean isCameraFrontFacing;
+ private int fpsUnitFactor;
+ private CaptureFormat captureFormat;
+
+ // Initialized when camera opens
+ @Nullable private CameraDevice cameraDevice;
+ @Nullable private Surface surface;
+
+ // Initialized when capture session is created
+ @Nullable private CameraCaptureSession captureSession;
+
+ // State
+ private SessionState state = SessionState.RUNNING;
+ private boolean firstFrameReported;
+
+ // Used only for stats. Only used on the camera thread.
+ private final long constructionTimeNs; // Construction time of this class.
+
+ private class CameraStateCallback extends CameraDevice.StateCallback {
+ private String getErrorDescription(int errorCode) {
+ switch (errorCode) {
+ case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
+ return "Camera device has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
+ return "Camera device could not be opened due to a device policy.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
+ return "Camera device is in use already.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
+ return "Camera service has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
+ return "Camera device could not be opened because"
+ + " there are too many other open camera devices.";
+ default:
+ return "Unknown camera error: " + errorCode;
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ checkIsOnCameraThread();
+ final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
+ state = SessionState.STOPPED;
+ stopInternal();
+ if (startFailure) {
+ callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
+ } else {
+ events.onCameraDisconnected(Camera2Session.this);
+ }
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int errorCode) {
+ checkIsOnCameraThread();
+ reportError(getErrorDescription(errorCode));
+ }
+
+ @Override
+ public void onOpened(CameraDevice camera) {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Camera opened.");
+ cameraDevice = camera;
+
+ surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ try {
+ camera.createCaptureSession(
+ Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to create capture session. " + e);
+ return;
+ }
+ }
+
+ @Override
+ public void onClosed(CameraDevice camera) {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Camera device closed.");
+ events.onCameraClosed(Camera2Session.this);
+ }
+ }
+
+ private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
+ @Override
+ public void onConfigureFailed(CameraCaptureSession session) {
+ checkIsOnCameraThread();
+ session.close();
+ reportError("Failed to configure capture session.");
+ }
+
+ @Override
+ public void onConfigured(CameraCaptureSession session) {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "Camera capture session configured.");
+ captureSession = session;
+ try {
+ /*
+ * The viable options for video capture requests are:
+ * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
+ * post-processing.
+ * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
+ * quality.
+ */
+ final CaptureRequest.Builder captureRequestBuilder =
+ cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ // Set auto exposure fps range.
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
+ new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
+ captureFormat.framerate.max / fpsUnitFactor));
+ captureRequestBuilder.set(
+ CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
+ chooseStabilizationMode(captureRequestBuilder);
+ chooseFocusMode(captureRequestBuilder);
+
+ captureRequestBuilder.addTarget(surface);
+ session.setRepeatingRequest(
+ captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to start capture request. " + e);
+ return;
+ }
+
+ surfaceTextureHelper.startListening((VideoFrame frame) -> {
+ checkIsOnCameraThread();
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Texture frame captured but camera is no longer running.");
+ return;
+ }
+
+ if (!firstFrameReported) {
+ firstFrameReported = true;
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera2StartTimeMsHistogram.addSample(startTimeMs);
+ }
+
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ // Also, undo camera orientation, we report it as rotation instead.
+ final VideoFrame modifiedFrame =
+ new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
+ (TextureBufferImpl) frame.getBuffer(),
+ /* mirror= */ isCameraFrontFacing,
+ /* rotation= */ -cameraOrientation),
+ /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
+ events.onFrameCaptured(Camera2Session.this, modifiedFrame);
+ modifiedFrame.release();
+ });
+ Logging.d(TAG, "Camera device successfully started.");
+ callback.onDone(Camera2Session.this);
+ }
+
+ // Prefers optical stabilization over software stabilization if available. Only enables one of
+ // the stabilization modes at a time because having both enabled can cause strange results.
+ private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
+ final int[] availableOpticalStabilization = cameraCharacteristics.get(
+ CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
+ if (availableOpticalStabilization != null) {
+ for (int mode : availableOpticalStabilization) {
+ if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
+ captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+ CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
+ Logging.d(TAG, "Using optical stabilization.");
+ return;
+ }
+ }
+ }
+ // If no optical mode is available, try software.
+ final int[] availableVideoStabilization = cameraCharacteristics.get(
+ CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
+ if (availableVideoStabilization != null) {
+ for (int mode : availableVideoStabilization) {
+ if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
+ captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+ CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
+ Logging.d(TAG, "Using video stabilization.");
+ return;
+ }
+ }
+ }
+ Logging.d(TAG, "Stabilization not available.");
+ }
+
+ private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
+ final int[] availableFocusModes =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ for (int mode : availableFocusModes) {
+ if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
+ captureRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
+ Logging.d(TAG, "Using continuous video auto-focus.");
+ return;
+ }
+ }
+ Logging.d(TAG, "Auto-focus is not available.");
+ }
+ }
+
+ private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
+ @Override
+ public void onCaptureFailed(
+ CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
+ Logging.d(TAG, "Capture failed: " + failure);
+ }
+ }
+
+ public static void create(CreateSessionCallback callback, Events events,
+ Context applicationContext, CameraManager cameraManager,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
+ int framerate) {
+ new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
+ cameraId, width, height, framerate);
+ }
+
+ private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
+ CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
+ int width, int height, int framerate) {
+ Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
+
+ constructionTimeNs = System.nanoTime();
+
+ this.cameraThreadHandler = new Handler();
+ this.callback = callback;
+ this.events = events;
+ this.applicationContext = applicationContext;
+ this.cameraManager = cameraManager;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraId = cameraId;
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+
+ start();
+ }
+
+ private void start() {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "start");
+
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+ } catch (CameraAccessException | IllegalArgumentException e) {
+ reportError("getCameraCharacteristics(): " + e.getMessage());
+ return;
+ }
+ cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+
+ findCaptureFormat();
+
+ if (captureFormat == null) {
+ // findCaptureFormat reports an error already.
+ return;
+ }
+
+ openCamera();
+ }
+
+ private void findCaptureFormat() {
+ checkIsOnCameraThread();
+
+ Range<Integer>[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
+ List<CaptureFormat.FramerateRange> framerateRanges =
+ Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
+ List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
+ Logging.d(TAG, "Available preview sizes: " + sizes);
+ Logging.d(TAG, "Available fps ranges: " + framerateRanges);
+
+ if (framerateRanges.isEmpty() || sizes.isEmpty()) {
+ reportError("No supported capture formats.");
+ return;
+ }
+
+ final CaptureFormat.FramerateRange bestFpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
+
+ final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
+ CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
+
+ captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
+ Logging.d(TAG, "Using capture format: " + captureFormat);
+ }
+
+ @SuppressLint("MissingPermission")
+ private void openCamera() {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Opening camera " + cameraId);
+ events.onCameraOpening();
+
+ try {
+ cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
+ } catch (CameraAccessException | IllegalArgumentException | SecurityException e) {
+ reportError("Failed to open camera: " + e);
+ return;
+ }
+ }
+
+ @Override
+ public void stop() {
+ Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
+ checkIsOnCameraThread();
+ if (state != SessionState.STOPPED) {
+ final long stopStartTime = System.nanoTime();
+ state = SessionState.STOPPED;
+ stopInternal();
+ final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+ camera2StopTimeMsHistogram.addSample(stopTimeMs);
+ }
+ }
+
+ private void stopInternal() {
+ Logging.d(TAG, "Stop internal");
+ checkIsOnCameraThread();
+
+ surfaceTextureHelper.stopListening();
+
+ if (captureSession != null) {
+ captureSession.close();
+ captureSession = null;
+ }
+ if (surface != null) {
+ surface.release();
+ surface = null;
+ }
+ if (cameraDevice != null) {
+ cameraDevice.close();
+ cameraDevice = null;
+ }
+
+ Logging.d(TAG, "Stop done");
+ }
+
+ private void reportError(String error) {
+ checkIsOnCameraThread();
+ Logging.e(TAG, "Error: " + error);
+
+ final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
+ state = SessionState.STOPPED;
+ stopInternal();
+ if (startFailure) {
+ callback.onFailure(FailureType.ERROR, error);
+ } else {
+ events.onCameraError(this, error);
+ }
+ }
+
+ private int getFrameOrientation() {
+ int rotation = CameraSession.getDeviceOrientation(applicationContext);
+ if (!isCameraFrontFacing) {
+ rotation = 360 - rotation;
+ }
+ return (cameraOrientation + rotation) % 360;
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
new file mode 100644
index 0000000000..1922a529e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java
@@ -0,0 +1,458 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.Looper;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+abstract class CameraCapturer implements CameraVideoCapturer {
+ enum SwitchState {
+ IDLE, // No switch requested.
+ PENDING, // Waiting for previous capture session to open.
+ IN_PROGRESS, // Waiting for new switched capture session to start.
+ }
+
+ private static final String TAG = "CameraCapturer";
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private final static int OPEN_CAMERA_TIMEOUT = 10000;
+
+ private final CameraEnumerator cameraEnumerator;
+ private final CameraEventsHandler eventsHandler;
+ private final Handler uiThreadHandler;
+
+ @Nullable
+ private final CameraSession.CreateSessionCallback createSessionCallback =
+ new CameraSession.CreateSessionCallback() {
+ @Override
+ public void onDone(CameraSession session) {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "Create session done. Switch state: " + switchState);
+ uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
+ synchronized (stateLock) {
+ capturerObserver.onCapturerStarted(true /* success */);
+ sessionOpening = false;
+ currentSession = session;
+ cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
+ firstFrameObserved = false;
+ stateLock.notifyAll();
+
+ if (switchState == SwitchState.IN_PROGRESS) {
+ switchState = SwitchState.IDLE;
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
+ switchEventsHandler = null;
+ }
+ } else if (switchState == SwitchState.PENDING) {
+ String selectedCameraName = pendingCameraName;
+ pendingCameraName = null;
+ switchState = SwitchState.IDLE;
+ switchCameraInternal(switchEventsHandler, selectedCameraName);
+ }
+ }
+ }
+
+ @Override
+ public void onFailure(CameraSession.FailureType failureType, String error) {
+ checkIsOnCameraThread();
+ uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
+ synchronized (stateLock) {
+ capturerObserver.onCapturerStarted(false /* success */);
+ openAttemptsRemaining--;
+
+ if (openAttemptsRemaining <= 0) {
+ Logging.w(TAG, "Opening camera failed, passing: " + error);
+ sessionOpening = false;
+ stateLock.notifyAll();
+
+ if (switchState != SwitchState.IDLE) {
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(error);
+ switchEventsHandler = null;
+ }
+ switchState = SwitchState.IDLE;
+ }
+
+ if (failureType == CameraSession.FailureType.DISCONNECTED) {
+ eventsHandler.onCameraDisconnected();
+ } else {
+ eventsHandler.onCameraError(error);
+ }
+ } else {
+ Logging.w(TAG, "Opening camera failed, retry: " + error);
+ createSessionInternal(OPEN_CAMERA_DELAY_MS);
+ }
+ }
+ }
+ };
+
+ @Nullable
+ private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
+ @Override
+ public void onCameraOpening() {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (currentSession != null) {
+ Logging.w(TAG, "onCameraOpening while session was open.");
+ return;
+ }
+ eventsHandler.onCameraOpening(cameraName);
+ }
+ }
+
+ @Override
+ public void onCameraError(CameraSession session, String error) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onCameraError from another session: " + error);
+ return;
+ }
+ eventsHandler.onCameraError(error);
+ stopCapture();
+ }
+ }
+
+ @Override
+ public void onCameraDisconnected(CameraSession session) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onCameraDisconnected from another session.");
+ return;
+ }
+ eventsHandler.onCameraDisconnected();
+ stopCapture();
+ }
+ }
+
+ @Override
+ public void onCameraClosed(CameraSession session) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession && currentSession != null) {
+ Logging.d(TAG, "onCameraClosed from another session.");
+ return;
+ }
+ eventsHandler.onCameraClosed();
+ }
+ }
+
+ @Override
+ public void onFrameCaptured(CameraSession session, VideoFrame frame) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onFrameCaptured from another session.");
+ return;
+ }
+ if (!firstFrameObserved) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameObserved = true;
+ }
+ cameraStatistics.addFrame();
+ capturerObserver.onFrameCaptured(frame);
+ }
+ }
+ };
+
+ private final Runnable openCameraTimeoutRunnable = new Runnable() {
+ @Override
+ public void run() {
+ eventsHandler.onCameraError("Camera failed to start within timeout.");
+ }
+ };
+
+ // Initialized on initialize
+ // -------------------------
+ private Handler cameraThreadHandler;
+ private Context applicationContext;
+ private org.webrtc.CapturerObserver capturerObserver;
+ private SurfaceTextureHelper surfaceHelper;
+
+ private final Object stateLock = new Object();
+ private boolean sessionOpening; /* guarded by stateLock */
+ @Nullable private CameraSession currentSession; /* guarded by stateLock */
+ private String cameraName; /* guarded by stateLock */
+ private String pendingCameraName; /* guarded by stateLock */
+ private int width; /* guarded by stateLock */
+ private int height; /* guarded by stateLock */
+ private int framerate; /* guarded by stateLock */
+ private int openAttemptsRemaining; /* guarded by stateLock */
+ private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
+ @Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
+ // Valid from onDone call until stopCapture, otherwise null.
+ @Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
+ private boolean firstFrameObserved; /* guarded by stateLock */
+
+ public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
+ CameraEnumerator cameraEnumerator) {
+ if (eventsHandler == null) {
+ eventsHandler = new CameraEventsHandler() {
+ @Override
+ public void onCameraError(String errorDescription) {}
+ @Override
+ public void onCameraDisconnected() {}
+ @Override
+ public void onCameraFreezed(String errorDescription) {}
+ @Override
+ public void onCameraOpening(String cameraName) {}
+ @Override
+ public void onFirstFrameAvailable() {}
+ @Override
+ public void onCameraClosed() {}
+ };
+ }
+
+ this.eventsHandler = eventsHandler;
+ this.cameraEnumerator = cameraEnumerator;
+ this.cameraName = cameraName;
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+ uiThreadHandler = new Handler(Looper.getMainLooper());
+
+ if (deviceNames.isEmpty()) {
+ throw new RuntimeException("No cameras attached.");
+ }
+ if (!deviceNames.contains(this.cameraName)) {
+ throw new IllegalArgumentException(
+ "Camera name " + this.cameraName + " does not match any known camera device.");
+ }
+ }
+
+ @Override
+ public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ org.webrtc.CapturerObserver capturerObserver) {
+ this.applicationContext = applicationContext;
+ this.capturerObserver = capturerObserver;
+ this.surfaceHelper = surfaceTextureHelper;
+ this.cameraThreadHandler = surfaceTextureHelper.getHandler();
+ }
+
+ @Override
+ public void startCapture(int width, int height, int framerate) {
+ Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
+ if (applicationContext == null) {
+ throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
+ }
+
+ synchronized (stateLock) {
+ if (sessionOpening || currentSession != null) {
+ Logging.w(TAG, "Session already open");
+ return;
+ }
+
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+
+ sessionOpening = true;
+ openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
+ createSessionInternal(0);
+ }
+ }
+
+ private void createSessionInternal(int delayMs) {
+ uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
+ cameraThreadHandler.postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
+ surfaceHelper, cameraName, width, height, framerate);
+ }
+ }, delayMs);
+ }
+
+ @Override
+ public void stopCapture() {
+ Logging.d(TAG, "Stop capture");
+
+ synchronized (stateLock) {
+ while (sessionOpening) {
+ Logging.d(TAG, "Stop capture: Waiting for session to open");
+ try {
+ stateLock.wait();
+ } catch (InterruptedException e) {
+ Logging.w(TAG, "Stop capture interrupted while waiting for the session to open.");
+ Thread.currentThread().interrupt();
+ return;
+ }
+ }
+
+ if (currentSession != null) {
+ Logging.d(TAG, "Stop capture: Nulling session");
+ cameraStatistics.release();
+ cameraStatistics = null;
+ final CameraSession oldSession = currentSession;
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ oldSession.stop();
+ }
+ });
+ currentSession = null;
+ capturerObserver.onCapturerStopped();
+ } else {
+ Logging.d(TAG, "Stop capture: No session open");
+ }
+ }
+
+ Logging.d(TAG, "Stop capture done");
+ }
+
+ @Override
+ public void changeCaptureFormat(int width, int height, int framerate) {
+ Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
+ synchronized (stateLock) {
+ stopCapture();
+ startCapture(width, height, framerate);
+ }
+ }
+
+ @Override
+ public void dispose() {
+ Logging.d(TAG, "dispose");
+ stopCapture();
+ }
+
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
+ Logging.d(TAG, "switchCamera");
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+
+ if (deviceNames.size() < 2) {
+ reportCameraSwitchError("No camera to switch to.", switchEventsHandler);
+ return;
+ }
+
+ int cameraNameIndex = deviceNames.indexOf(cameraName);
+ String cameraName = deviceNames.get((cameraNameIndex + 1) % deviceNames.size());
+ switchCameraInternal(switchEventsHandler, cameraName);
+ }
+ });
+ }
+
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler, final String cameraName) {
+ Logging.d(TAG, "switchCamera");
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ switchCameraInternal(switchEventsHandler, cameraName);
+ }
+ });
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return false;
+ }
+
+ public void printStackTrace() {
+ Thread cameraThread = null;
+ if (cameraThreadHandler != null) {
+ cameraThread = cameraThreadHandler.getLooper().getThread();
+ }
+ if (cameraThread != null) {
+ StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
+ if (cameraStackTrace.length > 0) {
+ Logging.d(TAG, "CameraCapturer stack trace:");
+ for (StackTraceElement traceElem : cameraStackTrace) {
+ Logging.d(TAG, traceElem.toString());
+ }
+ }
+ }
+ }
+
+ private void reportCameraSwitchError(
+ String error, @Nullable CameraSwitchHandler switchEventsHandler) {
+ Logging.e(TAG, error);
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(error);
+ }
+ }
+
+ private void switchCameraInternal(
+ @Nullable final CameraSwitchHandler switchEventsHandler, final String selectedCameraName) {
+ Logging.d(TAG, "switchCamera internal");
+ List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+
+ if (!deviceNames.contains(selectedCameraName)) {
+ reportCameraSwitchError("Attempted to switch to unknown camera device " + selectedCameraName,
+ switchEventsHandler);
+ return;
+ }
+
+ synchronized (stateLock) {
+ if (switchState != SwitchState.IDLE) {
+ reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
+ return;
+ }
+ if (!sessionOpening && currentSession == null) {
+ reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
+ return;
+ }
+
+ this.switchEventsHandler = switchEventsHandler;
+ if (sessionOpening) {
+ switchState = SwitchState.PENDING;
+ pendingCameraName = selectedCameraName;
+ return;
+ } else {
+ switchState = SwitchState.IN_PROGRESS;
+ }
+
+ Logging.d(TAG, "switchCamera: Stopping session");
+ cameraStatistics.release();
+ cameraStatistics = null;
+ final CameraSession oldSession = currentSession;
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ oldSession.stop();
+ }
+ });
+ currentSession = null;
+
+ cameraName = selectedCameraName;
+
+ sessionOpening = true;
+ openAttemptsRemaining = 1;
+ createSessionInternal(0);
+ }
+ Logging.d(TAG, "switchCamera done");
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ Logging.e(TAG, "Check is on camera thread failed.");
+ throw new RuntimeException("Not on camera thread.");
+ }
+ }
+
+ protected String getCameraName() {
+ synchronized (stateLock) {
+ return cameraName;
+ }
+ }
+
+ abstract protected void createCameraSession(
+ CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
+ Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
+ int width, int height, int framerate);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java
new file mode 100644
index 0000000000..8d137854d8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.Matrix;
+import android.view.WindowManager;
+import android.view.Surface;
+
+interface CameraSession {
+ enum FailureType { ERROR, DISCONNECTED }
+
+ // Callbacks are fired on the camera thread.
+ interface CreateSessionCallback {
+ void onDone(CameraSession session);
+ void onFailure(FailureType failureType, String error);
+ }
+
+ // Events are fired on the camera thread.
+ interface Events {
+ void onCameraOpening();
+ void onCameraError(CameraSession session, String error);
+ void onCameraDisconnected(CameraSession session);
+ void onCameraClosed(CameraSession session);
+ void onFrameCaptured(CameraSession session, VideoFrame frame);
+ }
+
+ /**
+ * Stops the capture. Waits until no more calls to capture observer will be made.
+ * If waitCameraStop is true, also waits for the camera to stop.
+ */
+ void stop();
+
+ static int getDeviceOrientation(Context context) {
+ final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+ switch (wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ return 90;
+ case Surface.ROTATION_180:
+ return 180;
+ case Surface.ROTATION_270:
+ return 270;
+ case Surface.ROTATION_0:
+ default:
+ return 0;
+ }
+ }
+
+ static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
+ TextureBufferImpl buffer, boolean mirror, int rotation) {
+ final Matrix transformMatrix = new Matrix();
+ // Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture.
+ transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f);
+ if (mirror) {
+ transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f);
+ }
+ transformMatrix.preRotate(rotation);
+ transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f);
+
+ // The width and height are not affected by rotation since Camera2Session has set them to the
+ // value they should be after undoing the rotation.
+ return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
new file mode 100644
index 0000000000..96a15bbfe1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
+ * bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
+ * target bitrate by unacceptable margins.
+ */
+class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
+ // Change the bitrate at most once every three seconds.
+ private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
+ // Maximum bitrate adjustment scale - no more than 4 times.
+ private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
+ // Amount of adjustment steps to reach maximum scale.
+ private static final int BITRATE_ADJUSTMENT_STEPS = 20;
+
+ private static final double BITS_PER_BYTE = 8.0;
+
+ // How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
+ private double deviationBytes;
+ private double timeSinceLastAdjustmentMs;
+ private int bitrateAdjustmentScaleExp;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
+ // Rescale the accumulator level if the accumulator max decreases
+ deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
+ }
+ super.setTargets(targetBitrateBps, targetFramerateFps);
+ }
+
+ @Override
+ public void reportEncodedFrame(int size) {
+ if (targetFramerateFps == 0) {
+ return;
+ }
+
+ // Accumulate the difference between actual and expected frame sizes.
+ double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFramerateFps;
+ deviationBytes += (size - expectedBytesPerFrame);
+ timeSinceLastAdjustmentMs += 1000.0 / targetFramerateFps;
+
+ // Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
+ // shortfall of the target.
+ double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
+
+ // Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
+ // bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
+ double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
+ deviationBytes = Math.min(deviationBytes, deviationCap);
+ deviationBytes = Math.max(deviationBytes, -deviationCap);
+
+ // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
+ // from the target value.
+ if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
+ return;
+ }
+
+ if (deviationBytes > deviationThresholdBytes) {
+ // Encoder generates too high bitrate - need to reduce the scale.
+ int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
+ bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
+ // Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
+ // This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
+ bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
+ deviationBytes = deviationThresholdBytes;
+ } else if (deviationBytes < -deviationThresholdBytes) {
+ // Encoder generates too low bitrate - need to increase the scale.
+ int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
+ bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
+ // Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
+ // This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
+ bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
+ deviationBytes = -deviationThresholdBytes;
+ }
+ timeSinceLastAdjustmentMs = 0;
+ }
+
+ private double getBitrateAdjustmentScale() {
+ return Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
+ (double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS);
+ }
+
+ @Override
+ public int getAdjustedBitrateBps() {
+ return (int) (targetBitrateBps * getBitrateAdjustmentScale());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java
new file mode 100644
index 0000000000..254a17c750
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java
@@ -0,0 +1,365 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.GLException;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import androidx.annotation.Nullable;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+class EglBase10Impl implements EglBase10 {
+ private static final String TAG = "EglBase10Impl";
+ // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ private final EGL10 egl;
+ private EGLContext eglContext;
+ @Nullable private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+ // EGL wrapper for an actual EGLContext.
+ private static class Context implements EglBase10.Context {
+ private final EGL10 egl;
+ private final EGLContext eglContext;
+ private final EGLConfig eglContextConfig;
+
+ @Override
+ public EGLContext getRawContext() {
+ return eglContext;
+ }
+
+ @Override
+ public long getNativeEglContext() {
+ EGLContext previousContext = egl.eglGetCurrentContext();
+ EGLDisplay currentDisplay = egl.eglGetCurrentDisplay();
+ EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL10.EGL_DRAW);
+ EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL10.EGL_READ);
+ EGLSurface tempEglSurface = null;
+
+ if (currentDisplay == EGL10.EGL_NO_DISPLAY) {
+ currentDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ }
+
+ try {
+ if (previousContext != eglContext) {
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, 1, EGL10.EGL_HEIGHT, 1, EGL10.EGL_NONE};
+ tempEglSurface =
+ egl.eglCreatePbufferSurface(currentDisplay, eglContextConfig, surfaceAttribs);
+ if (!egl.eglMakeCurrent(currentDisplay, tempEglSurface, tempEglSurface, eglContext)) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to make temporary EGL surface active: " + egl.eglGetError());
+ }
+ }
+
+ return nativeGetCurrentNativeEGLContext();
+ } finally {
+ if (tempEglSurface != null) {
+ egl.eglMakeCurrent(
+ currentDisplay, previousDrawSurface, previousReadSurface, previousContext);
+ egl.eglDestroySurface(currentDisplay, tempEglSurface);
+ }
+ }
+ }
+
+ public Context(EGL10 egl, EGLContext eglContext, EGLConfig eglContextConfig) {
+ this.egl = egl;
+ this.eglContext = eglContext;
+ this.eglContextConfig = eglContextConfig;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) {
+ this.egl = (EGL10) EGLContext.getEGL();
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(egl, eglDisplay, configAttributes);
+ final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
+ Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
+ }
+
+ @Override
+ public void createSurface(Surface surface) {
+ /**
+ * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+ * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+ * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+ */
+ class FakeSurfaceHolder implements SurfaceHolder {
+ private final Surface surface;
+
+ FakeSurfaceHolder(Surface surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ public void addCallback(Callback callback) {}
+
+ @Override
+ public void removeCallback(Callback callback) {}
+
+ @Override
+ public boolean isCreating() {
+ return false;
+ }
+
+ @Deprecated
+ @Override
+ public void setType(int i) {}
+
+ @Override
+ public void setFixedSize(int i, int i2) {}
+
+ @Override
+ public void setSizeFromLayout() {}
+
+ @Override
+ public void setFormat(int i) {}
+
+ @Override
+ public void setKeepScreenOn(boolean b) {}
+
+ @Nullable
+ @Override
+ public Canvas lockCanvas() {
+ return null;
+ }
+
+ @Nullable
+ @Override
+ public Canvas lockCanvas(Rect rect) {
+ return null;
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas) {}
+
+ @Nullable
+ @Override
+ public Rect getSurfaceFrame() {
+ return null;
+ }
+
+ @Override
+ public Surface getSurface() {
+ return surface;
+ }
+ }
+
+ createSurfaceInternal(new FakeSurfaceHolder(surface));
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+
+ @Override
+ public org.webrtc.EglBase.Context getEglBaseContext() {
+ return new Context(egl, eglContext, eglConfig);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ egl.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new GLException(egl.eglGetError(),
+ "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new GLException(egl.eglGetError(),
+ "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ egl.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ @Override
+ public void swapBuffers(long timeStampNs) {
+ // Setting presentation time is not supported for EGL 1.0.
+ swapBuffers();
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new GLException(egl.eglGetError(),
+ "Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new GLException(egl.eglGetError(),
+ "Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGL10 egl, EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new GLException(
+ egl.eglGetError(), "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLContext createEglContext(@Nullable EGLContext sharedContext, EGLDisplay eglDisplay,
+ EGLConfig eglConfig, int openGlesVersion) {
+ if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE};
+ EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+ }
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ return eglContext;
+ }
+
+ private static native long nativeGetCurrentNativeEGLContext();
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java
new file mode 100644
index 0000000000..caf45b091e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java
@@ -0,0 +1,271 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.opengl.GLException;
+import android.os.Build;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
+class EglBase14Impl implements EglBase14 {
+ private static final String TAG = "EglBase14Impl";
+ private EGLContext eglContext;
+ @Nullable private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
+ public static class Context implements EglBase14.Context {
+ private final EGLContext egl14Context;
+
+ @Override
+ public EGLContext getRawContext() {
+ return egl14Context;
+ }
+
+ @Override
+ public long getNativeEglContext() {
+ return egl14Context.getNativeHandle();
+ }
+
+ public Context(android.opengl.EGLContext eglContext) {
+ this.egl14Context = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ // `sharedContext` may be null.
+ public EglBase14Impl(EGLContext sharedContext, int[] configAttributes) {
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
+ Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
+ }
+
+ // Create EGLSurface from the Android Surface.
+ @Override
+ public void createSurface(Surface surface) {
+ createSurfaceInternal(surface);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either Surface or SurfaceTexture.
+ private void createSurfaceInternal(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+
+ @Override
+ public Context getEglBaseContext() {
+ return new Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL14.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ synchronized (EglBase.lock) {
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ }
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ @Override
+ public void swapBuffers(long timeStampNs) {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ // See
+ // https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+ EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new GLException(EGL14.eglGetError(),
+ "Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new GLException(EGL14.eglGetError(),
+ "Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(
+ eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(@Nullable EGLContext sharedContext,
+ EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) {
+ if (sharedContext != null && sharedContext == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL14.EGL_NONE};
+ EGLContext rootContext = sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+ }
+ if (eglContext == EGL14.EGL_NO_CONTEXT) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ return eglContext;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java
new file mode 100644
index 0000000000..fe9481e182
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Empty class for use in libjingle_peerconnection_java because all targets require at least one
+ * Java file.
+ */
+class Empty {}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java
new file mode 100644
index 0000000000..e28b7b5a26
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with
+ * hardware codecs that assume the framerate never changes.
+ */
+class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
+ private static final int DEFAULT_FRAMERATE_FPS = 30;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ // Keep frame rate unchanged and adjust bit rate.
+ this.targetFramerateFps = DEFAULT_FRAMERATE_FPS;
+ this.targetBitrateBps = (int) (targetBitrateBps * DEFAULT_FRAMERATE_FPS / targetFramerateFps);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java
new file mode 100644
index 0000000000..34144e2f75
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java
@@ -0,0 +1,281 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import androidx.annotation.Nullable;
+import java.nio.FloatBuffer;
+
+/**
+ * Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input
+ * sources (OES, RGB, or YUV) using a generic fragment shader as input. The generic fragment shader
+ * should sample pixel values from the function "sample" that will be provided by this class and
+ * provides an abstraction for the input source type (OES, RGB, or YUV). The texture coordinate
+ * variable name will be "tc" and the texture matrix in the vertex shader will be "tex_mat". The
+ * simplest possible generic shader that just draws pixel from the frame unmodified looks like:
+ * void main() {
+ * gl_FragColor = sample(tc);
+ * }
+ * This class covers the cases for most simple shaders and generates the necessary boiler plate.
+ * Advanced shaders can always implement RendererCommon.GlDrawer directly.
+ */
+class GlGenericDrawer implements RendererCommon.GlDrawer {
+ /**
+ * The different shader types representing different input sources. YUV here represents three
+ * separate Y, U, V textures.
+ */
+ public static enum ShaderType { OES, RGB, YUV }
+
+ /**
+ * The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set
+ * uniform variables in the shader before a frame is drawn.
+ */
+ public static interface ShaderCallbacks {
+ /**
+ * This callback is called when a new shader has been compiled and created. It will be called
+ * for the first frame as well as when the shader type is changed. This callback can be used to
+ * do custom initialization of the shader that only needs to happen once.
+ */
+ void onNewShader(GlShader shader);
+
+ /**
+ * This callback is called before rendering a frame. It can be used to do custom preparation of
+ * the shader that needs to happen every frame.
+ */
+ void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight);
+ }
+
+ private static final String INPUT_VERTEX_COORDINATE_NAME = "in_pos";
+ private static final String INPUT_TEXTURE_COORDINATE_NAME = "in_tc";
+ private static final String TEXTURE_MATRIX_NAME = "tex_mat";
+ private static final String DEFAULT_VERTEX_SHADER_STRING = "varying vec2 tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "uniform mat4 tex_mat;\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " tc = (tex_mat * in_tc).xy;\n"
+ + "}\n";
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1)
+ // is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) {
+ final StringBuilder stringBuilder = new StringBuilder();
+ if (shaderType == ShaderType.OES) {
+ stringBuilder.append("#extension GL_OES_EGL_image_external : require\n");
+ }
+ stringBuilder.append("precision mediump float;\n");
+ stringBuilder.append("varying vec2 tc;\n");
+
+ if (shaderType == ShaderType.YUV) {
+ stringBuilder.append("uniform sampler2D y_tex;\n");
+ stringBuilder.append("uniform sampler2D u_tex;\n");
+ stringBuilder.append("uniform sampler2D v_tex;\n");
+
+ // Add separate function for sampling texture.
+ // yuv_to_rgb_mat is inverse of the matrix defined in YuvConverter.
+ stringBuilder.append("vec4 sample(vec2 p) {\n");
+ stringBuilder.append(" float y = texture2D(y_tex, p).r * 1.16438;\n");
+ stringBuilder.append(" float u = texture2D(u_tex, p).r;\n");
+ stringBuilder.append(" float v = texture2D(v_tex, p).r;\n");
+ stringBuilder.append(" return vec4(y + 1.59603 * v - 0.874202,\n");
+ stringBuilder.append(" y - 0.391762 * u - 0.812968 * v + 0.531668,\n");
+ stringBuilder.append(" y + 2.01723 * u - 1.08563, 1);\n");
+ stringBuilder.append("}\n");
+ stringBuilder.append(genericFragmentSource);
+ } else {
+ final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D";
+ stringBuilder.append("uniform ").append(samplerName).append(" tex;\n");
+
+ // Update the sampling function in-place.
+ stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, "));
+ }
+
+ return stringBuilder.toString();
+ }
+
+ private final String genericFragmentSource;
+ private final String vertexShader;
+ private final ShaderCallbacks shaderCallbacks;
+ @Nullable private ShaderType currentShaderType;
+ @Nullable private GlShader currentShader;
+ private int inPosLocation;
+ private int inTcLocation;
+ private int texMatrixLocation;
+
+ public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks);
+ }
+
+ public GlGenericDrawer(
+ String vertexShader, String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ this.vertexShader = vertexShader;
+ this.genericFragmentSource = genericFragmentSource;
+ this.shaderCallbacks = shaderCallbacks;
+ }
+
+ // Visible for testing.
+ GlShader createShader(ShaderType shaderType) {
+ return new GlShader(
+ vertexShader, createFragmentShaderString(genericFragmentSource, shaderType));
+ }
+
+ /**
+ * Draw an OES texture frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ @Override
+ public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
+ // Draw the texture.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+
+ /**
+ * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
+ * are allocated at the first call to this function.
+ */
+ @Override
+ public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ // Draw the texture.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ /**
+ * Draw a YUV frame with specified texture transformation matrix. Required resources are allocated
+ * at the first call to this function.
+ */
+ @Override
+ public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ }
+ // Draw the textures.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the textures as a precaution.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+ }
+
+ private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth,
+ int frameHeight, int viewportWidth, int viewportHeight) {
+ final GlShader shader;
+ if (shaderType.equals(currentShaderType)) {
+ // Same shader type as before, reuse exising shader.
+ shader = currentShader;
+ } else {
+ // Allocate new shader.
+ currentShaderType = null;
+ if (currentShader != null) {
+ currentShader.release();
+ currentShader = null;
+ }
+
+ shader = createShader(shaderType);
+ currentShaderType = shaderType;
+ currentShader = shader;
+
+ shader.useProgram();
+ // Set input texture units.
+ if (shaderType == ShaderType.YUV) {
+ GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
+ } else {
+ GLES20.glUniform1i(shader.getUniformLocation("tex"), 0);
+ }
+
+ GlUtil.checkNoGLES2Error("Create shader");
+ shaderCallbacks.onNewShader(shader);
+ texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME);
+ inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME);
+ inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME);
+ }
+
+ shader.useProgram();
+
+ // Upload the vertex coordinates.
+ GLES20.glEnableVertexAttribArray(inPosLocation);
+ GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2,
+ /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
+ FULL_RECTANGLE_BUFFER);
+
+ // Upload the texture coordinates.
+ GLES20.glEnableVertexAttribArray(inTcLocation);
+ GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2,
+ /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
+ FULL_RECTANGLE_TEXTURE_BUFFER);
+
+ // Upload the texture transformation matrix.
+ GLES20.glUniformMatrix4fv(
+ texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */);
+
+ // Do custom per-frame shader preparation.
+ shaderCallbacks.onPrepareShader(
+ shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ GlUtil.checkNoGLES2Error("Prepare shader");
+ }
+
+ /**
+ * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
+ */
+ @Override
+ public void release() {
+ if (currentShader != null) {
+ currentShader.release();
+ currentShader = null;
+ currentShaderType = null;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java
new file mode 100644
index 0000000000..abb79c6582
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Map;
+import java.util.HashMap;
+
+/** Container for static helper functions related to dealing with H264 codecs. */
+class H264Utils {
+ public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
+ public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
+ public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
+
+ public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0";
+ public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
+ public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
+ public static final String H264_CONSTRAINED_HIGH_3_1 =
+ H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
+ public static final String H264_CONSTRAINED_BASELINE_3_1 =
+ H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
+
+ public static Map<String, String> getDefaultH264Params(boolean isHighProfile) {
+ final Map<String, String> params = new HashMap<>();
+ params.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1");
+ params.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1");
+ params.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID,
+ isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1
+ : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
+ return params;
+ }
+
+ public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC =
+ new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false));
+ public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC =
+ new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true));
+
+ public static boolean isSameH264Profile(
+ Map<String, String> params1, Map<String, String> params2) {
+ return nativeIsSameH264Profile(params1, params2);
+ }
+
+ private static native boolean nativeIsSameH264Profile(
+ Map<String, String> params1, Map<String, String> params2);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
new file mode 100644
index 0000000000..42a3ccfbfd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
@@ -0,0 +1,763 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Bundle;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.ThreadUtils.ThreadChecker;
+
+/**
+ * Android hardware video encoder.
+ */
+class HardwareVideoEncoder implements VideoEncoder {
+ private static final String TAG = "HardwareVideoEncoder";
+
+ // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
+ // in OMX_Video.h
+ private static final int VIDEO_ControlRateConstant = 2;
+ // Key associated with the bitrate control mode value (above). Not present as a MediaFormat
+ // constant until API level 21.
+ private static final String KEY_BITRATE_MODE = "bitrate-mode";
+
+ private static final int VIDEO_AVC_PROFILE_HIGH = 8;
+ private static final int VIDEO_AVC_LEVEL_3 = 0x100;
+
+ private static final int MAX_VIDEO_FRAMERATE = 30;
+
+ // See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc.
+ private static final int MAX_ENCODER_Q_SIZE = 2;
+
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+ private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
+
+ // Size of the input frames should be multiple of 16 for the H/W encoder.
+ private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16;
+
+ /**
+ * Keeps track of the number of output buffers that have been passed down the pipeline and not yet
+ * released. We need to wait for this to go down to zero before operations invalidating the output
+ * buffers, i.e., stop() and getOutputBuffer().
+ */
+ private static class BusyCount {
+ private final Object countLock = new Object();
+ private int count;
+
+ public void increment() {
+ synchronized (countLock) {
+ count++;
+ }
+ }
+
+ // This method may be called on an arbitrary thread.
+ public void decrement() {
+ synchronized (countLock) {
+ count--;
+ if (count == 0) {
+ countLock.notifyAll();
+ }
+ }
+ }
+
+ // The increment and waitForZero methods are called on the same thread (deliverEncodedImage,
+ // running on the output thread). Hence, after waitForZero returns, the count will stay zero
+ // until the same thread calls increment.
+ public void waitForZero() {
+ boolean wasInterrupted = false;
+ synchronized (countLock) {
+ while (count > 0) {
+ try {
+ countLock.wait();
+ } catch (InterruptedException e) {
+ Logging.e(TAG, "Interrupted while waiting on busy count", e);
+ wasInterrupted = true;
+ }
+ }
+ }
+
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ }
+ }
+ // --- Initialized on construction.
+ private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
+ private final String codecName;
+ private final VideoCodecMimeType codecType;
+ private final Integer surfaceColorFormat;
+ private final Integer yuvColorFormat;
+ private final YuvFormat yuvFormat;
+ private final Map<String, String> params;
+ private final int keyFrameIntervalSec; // Base interval for generating key frames.
+ // Interval at which to force a key frame. Used to reduce color distortions caused by some
+ // Qualcomm video encoders.
+ private final long forcedKeyFrameNs;
+ private final BitrateAdjuster bitrateAdjuster;
+ // EGL context shared with the application. Used to access texture inputs.
+ private final EglBase14.Context sharedContext;
+
+ // Drawer used to draw input textures onto the codec's input surface.
+ private final GlRectDrawer textureDrawer = new GlRectDrawer();
+ private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
+ // A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
+ // pre-populated with all the information that can't be sent through MediaCodec.
+ private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>();
+
+ private final ThreadChecker encodeThreadChecker = new ThreadChecker();
+ private final ThreadChecker outputThreadChecker = new ThreadChecker();
+ private final BusyCount outputBuffersBusyCount = new BusyCount();
+
+ // --- Set on initialize and immutable until release.
+ private Callback callback;
+ private boolean automaticResizeOn;
+
+ // --- Valid and immutable while an encoding session is running.
+ @Nullable private MediaCodecWrapper codec;
+ // Thread that delivers encoded frames to the user callback.
+ @Nullable private Thread outputThread;
+
+ // EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
+ // input surface. Making this base current allows textures from the context to be drawn onto the
+ // surface.
+ @Nullable private EglBase14 textureEglBase;
+ // Input surface for the codec. The encoder will draw input textures onto this surface.
+ @Nullable private Surface textureInputSurface;
+
+ private int width;
+ private int height;
+ // Y-plane strides in the encoder's input
+ private int stride;
+ // Y-plane slice-height in the encoder's input
+ private int sliceHeight;
+ private boolean useSurfaceMode;
+
+ // --- Only accessed from the encoding thread.
+ // Presentation timestamp of next frame to encode.
+ private long nextPresentationTimestampUs;
+ // Presentation timestamp of the last requested (or forced) key frame.
+ private long lastKeyFrameNs;
+
+ // --- Only accessed on the output thread.
+ // Contents of the last observed config frame output by the MediaCodec. Used by H.264.
+ @Nullable private ByteBuffer configBuffer;
+ private int adjustedBitrate;
+
+ // Whether the encoder is running. Volatile so that the output thread can watch this value and
+ // exit when the encoder stops.
+ private volatile boolean running;
+ // Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
+ // value to send exceptions thrown during release back to the encoder thread.
+ @Nullable private volatile Exception shutdownException;
+
+ /**
+ * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
+ * intervals, and bitrateAdjuster.
+ *
+ * @param codecName the hardware codec implementation to use
+ * @param codecType the type of the given video codec (eg. VP8, VP9, H264 or AV1)
+ * @param surfaceColorFormat color format for surface mode or null if not available
+ * @param yuvColorFormat color format for bytebuffer mode
+ * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
+ * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
+ * used to reduce distortion caused by some codec implementations
+ * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
+ * desired bitrates
+ * @throws IllegalArgumentException if colorFormat is unsupported
+ */
+ public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
+ Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
+ BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
+ this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
+ this.codecName = codecName;
+ this.codecType = codecType;
+ this.surfaceColorFormat = surfaceColorFormat;
+ this.yuvColorFormat = yuvColorFormat;
+ this.yuvFormat = YuvFormat.valueOf(yuvColorFormat);
+ this.params = params;
+ this.keyFrameIntervalSec = keyFrameIntervalSec;
+ this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
+ this.bitrateAdjuster = bitrateAdjuster;
+ this.sharedContext = sharedContext;
+
+ // Allow construction on a different thread.
+ encodeThreadChecker.detachThread();
+ }
+
+ @Override
+ public VideoCodecStatus initEncode(Settings settings, Callback callback) {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ this.callback = callback;
+ automaticResizeOn = settings.automaticResizeOn;
+
+ if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0
+ || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) {
+ Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned.");
+ return VideoCodecStatus.ERR_SIZE;
+ }
+ this.width = settings.width;
+ this.height = settings.height;
+ useSurfaceMode = canUseSurface();
+
+ if (settings.startBitrate != 0 && settings.maxFramerate != 0) {
+ bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate);
+ }
+ adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
+
+ Logging.d(TAG,
+ "initEncode: " + width + " x " + height + ". @ " + settings.startBitrate
+ + "kbps. Fps: " + settings.maxFramerate + " Use surface mode: " + useSurfaceMode);
+ return initEncodeInternal();
+ }
+
+ private VideoCodecStatus initEncodeInternal() {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ nextPresentationTimestampUs = 0;
+ lastKeyFrameNs = -1;
+
+ try {
+ codec = mediaCodecWrapperFactory.createByCodecName(codecName);
+ } catch (IOException | IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot create media encoder " + codecName);
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat;
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
+ format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ format.setFloat(
+ MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps());
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
+ if (codecType == VideoCodecMimeType.H264) {
+ String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
+ if (profileLevelId == null) {
+ profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1;
+ }
+ switch (profileLevelId) {
+ case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1:
+ format.setInteger("profile", VIDEO_AVC_PROFILE_HIGH);
+ format.setInteger("level", VIDEO_AVC_LEVEL_3);
+ break;
+ case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1:
+ break;
+ default:
+ Logging.w(TAG, "Unknown profile level id: " + profileLevelId);
+ }
+ }
+ Logging.d(TAG, "Format: " + format);
+ codec.configure(
+ format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (useSurfaceMode) {
+ textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE);
+ textureInputSurface = codec.createInputSurface();
+ textureEglBase.createSurface(textureInputSurface);
+ textureEglBase.makeCurrent();
+ }
+
+ MediaFormat inputFormat = codec.getInputFormat();
+ stride = getStride(inputFormat, width);
+ sliceHeight = getSliceHeight(inputFormat, height);
+
+ codec.start();
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initEncodeInternal failed", e);
+ release();
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ running = true;
+ outputThreadChecker.detachThread();
+ outputThread = createOutputThread();
+ outputThread.start();
+
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ final VideoCodecStatus returnValue;
+ if (outputThread == null) {
+ returnValue = VideoCodecStatus.OK;
+ } else {
+ // The outputThread actually stops and releases the codec once running is false.
+ running = false;
+ if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media encoder release timeout");
+ returnValue = VideoCodecStatus.TIMEOUT;
+ } else if (shutdownException != null) {
+ // Log the exception and turn it into an error.
+ Logging.e(TAG, "Media encoder release exception", shutdownException);
+ returnValue = VideoCodecStatus.ERROR;
+ } else {
+ returnValue = VideoCodecStatus.OK;
+ }
+ }
+
+ textureDrawer.release();
+ videoFrameDrawer.release();
+ if (textureEglBase != null) {
+ textureEglBase.release();
+ textureEglBase = null;
+ }
+ if (textureInputSurface != null) {
+ textureInputSurface.release();
+ textureInputSurface = null;
+ }
+ outputBuilders.clear();
+
+ codec = null;
+ outputThread = null;
+
+ // Allow changing thread after release.
+ encodeThreadChecker.detachThread();
+
+ return returnValue;
+ }
+
+ @Override
+ public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (codec == null) {
+ return VideoCodecStatus.UNINITIALIZED;
+ }
+
+ final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
+ final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer;
+
+ // If input resolution changed, restart the codec with the new resolution.
+ final int frameWidth = videoFrame.getBuffer().getWidth();
+ final int frameHeight = videoFrame.getBuffer().getHeight();
+ final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer;
+ if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) {
+ VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ }
+
+ if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
+ // Too many frames in the encoder. Drop this frame.
+ Logging.e(TAG, "Dropped frame, encoder queue full");
+ return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
+ }
+
+ boolean requestedKeyFrame = false;
+ for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
+ if (frameType == EncodedImage.FrameType.VideoFrameKey) {
+ requestedKeyFrame = true;
+ }
+ }
+
+ if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
+ requestKeyFrame(videoFrame.getTimestampNs());
+ }
+
+ // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
+ // subsampled at one byte per four pixels.
+ int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
+ EncodedImage.Builder builder = EncodedImage.builder()
+ .setCaptureTimeNs(videoFrame.getTimestampNs())
+ .setEncodedWidth(videoFrame.getBuffer().getWidth())
+ .setEncodedHeight(videoFrame.getBuffer().getHeight())
+ .setRotation(videoFrame.getRotation());
+ outputBuilders.offer(builder);
+
+ long presentationTimestampUs = nextPresentationTimestampUs;
+ // Round frame duration down to avoid bitrate overshoot.
+ long frameDurationUs =
+ (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps());
+ nextPresentationTimestampUs += frameDurationUs;
+
+ final VideoCodecStatus returnValue;
+ if (useSurfaceMode) {
+ returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs);
+ } else {
+ returnValue =
+ encodeByteBuffer(videoFrame, presentationTimestampUs, videoFrameBuffer, bufferSize);
+ }
+
+ // Check if the queue was successful.
+ if (returnValue != VideoCodecStatus.OK) {
+ // Keep the output builders in sync with buffers in the codec.
+ outputBuilders.pollLast();
+ }
+
+ return returnValue;
+ }
+
+ private VideoCodecStatus encodeTextureBuffer(
+ VideoFrame videoFrame, long presentationTimestampUs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ try {
+ // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+ // but it's a workaround for bug webrtc:5147.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // It is not necessary to release this frame because it doesn't own the buffer.
+ VideoFrame derotatedFrame =
+ new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
+ videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
+ textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "encodeTexture failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs,
+ VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
+ encodeThreadChecker.checkIsOnValidThread();
+ // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
+ int index;
+ try {
+ index = codec.dequeueInputBuffer(0 /* timeout */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueInputBuffer failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (index == -1) {
+ // Encoder is falling behind. No input buffers available. Drop the frame.
+ Logging.d(TAG, "Dropped frame, no input buffers available");
+ return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
+ }
+
+ ByteBuffer buffer;
+ try {
+ buffer = codec.getInputBuffer(index);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ fillInputBuffer(buffer, videoFrameBuffer);
+
+ try {
+ codec.queueInputBuffer(
+ index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "queueInputBuffer failed", e);
+ // IllegalStateException thrown when the codec is in the wrong state.
+ return VideoCodecStatus.ERROR;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (framerate > MAX_VIDEO_FRAMERATE) {
+ framerate = MAX_VIDEO_FRAMERATE;
+ }
+ bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRates(RateControlParameters rcParameters) {
+ encodeThreadChecker.checkIsOnValidThread();
+ bitrateAdjuster.setTargets(rcParameters.bitrate.getSum(), rcParameters.framerateFps);
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public ScalingSettings getScalingSettings() {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (automaticResizeOn) {
+ if (codecType == VideoCodecMimeType.VP8) {
+ final int kLowVp8QpThreshold = 29;
+ final int kHighVp8QpThreshold = 95;
+ return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold);
+ } else if (codecType == VideoCodecMimeType.H264) {
+ final int kLowH264QpThreshold = 24;
+ final int kHighH264QpThreshold = 37;
+ return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
+ }
+ }
+ return ScalingSettings.OFF;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return codecName;
+ }
+
+ @Override
+ public EncoderInfo getEncoderInfo() {
+ // Since our MediaCodec is guaranteed to encode 16-pixel-aligned frames only, we set alignment
+ // value to be 16. Additionally, this encoder produces a single stream. So it should not require
+ // alignment for all layers.
+ return new EncoderInfo(
+ /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT,
+ /* applyAlignmentToAllSimulcastLayers= */ false);
+ }
+
+ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) {
+ encodeThreadChecker.checkIsOnValidThread();
+ VideoCodecStatus status = release();
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+
+ if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0
+ || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) {
+ Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned.");
+ return VideoCodecStatus.ERR_SIZE;
+ }
+ width = newWidth;
+ height = newHeight;
+ useSurfaceMode = newUseSurfaceMode;
+ return initEncodeInternal();
+ }
+
+ private boolean shouldForceKeyFrame(long presentationTimestampNs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
+ }
+
+ private void requestKeyFrame(long presentationTimestampNs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+ // indicate this in queueInputBuffer() below and guarantee _this_ frame
+ // be encoded as a key frame, but sadly that flag is ignored. Instead,
+ // we request a key frame "soon".
+ try {
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ codec.setParameters(b);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "requestKeyFrame failed", e);
+ return;
+ }
+ lastKeyFrameNs = presentationTimestampNs;
+ }
+
+ private Thread createOutputThread() {
+ return new Thread() {
+ @Override
+ public void run() {
+ while (running) {
+ deliverEncodedImage();
+ }
+ releaseCodecOnOutputThread();
+ }
+ };
+ }
+
+ // Visible for testing.
+ protected void deliverEncodedImage() {
+ outputThreadChecker.checkIsOnValidThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
+ if (index < 0) {
+ if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffersBusyCount.waitForZero();
+ }
+ return;
+ }
+
+ ByteBuffer codecOutputBuffer = codec.getOutputBuffer(index);
+ codecOutputBuffer.position(info.offset);
+ codecOutputBuffer.limit(info.offset + info.size);
+
+ if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
+ configBuffer = ByteBuffer.allocateDirect(info.size);
+ configBuffer.put(codecOutputBuffer);
+ } else {
+ bitrateAdjuster.reportEncodedFrame(info.size);
+ if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
+ updateBitrate();
+ }
+
+ final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+ if (isKeyFrame) {
+ Logging.d(TAG, "Sync frame generated");
+ }
+
+ final ByteBuffer frameBuffer;
+ if (isKeyFrame && codecType == VideoCodecMimeType.H264) {
+ Logging.d(TAG,
+ "Prepending config frame of size " + configBuffer.capacity()
+ + " to output buffer with offset " + info.offset + ", size " + info.size);
+ // For H.264 key frame prepend SPS and PPS NALs at the start.
+ frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
+ configBuffer.rewind();
+ frameBuffer.put(configBuffer);
+ frameBuffer.put(codecOutputBuffer);
+ frameBuffer.rewind();
+ } else {
+ frameBuffer = codecOutputBuffer.slice();
+ }
+
+ final EncodedImage.FrameType frameType = isKeyFrame
+ ? EncodedImage.FrameType.VideoFrameKey
+ : EncodedImage.FrameType.VideoFrameDelta;
+
+ outputBuffersBusyCount.increment();
+ EncodedImage.Builder builder = outputBuilders.poll();
+ EncodedImage encodedImage = builder
+ .setBuffer(frameBuffer,
+ () -> {
+ // This callback should not throw any exceptions since
+ // it may be called on an arbitrary thread.
+ // Check bug webrtc:11230 for more details.
+ try {
+ codec.releaseOutputBuffer(index, false);
+ } catch (Exception e) {
+ Logging.e(TAG, "releaseOutputBuffer failed", e);
+ }
+ outputBuffersBusyCount.decrement();
+ })
+ .setFrameType(frameType)
+ .createEncodedImage();
+ // TODO(mellem): Set codec-specific info.
+ callback.onEncodedFrame(encodedImage, new CodecSpecificInfo());
+ // Note that the callback may have retained the image.
+ encodedImage.release();
+ }
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "deliverOutput failed", e);
+ }
+ }
+
+ private void releaseCodecOnOutputThread() {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Releasing MediaCodec on output thread");
+ outputBuffersBusyCount.waitForZero();
+ try {
+ codec.stop();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder stop failed", e);
+ }
+ try {
+ codec.release();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder release failed", e);
+ // Propagate exceptions caught during release back to the main thread.
+ shutdownException = e;
+ }
+ configBuffer = null;
+ Logging.d(TAG, "Release on output thread done");
+ }
+
+ private VideoCodecStatus updateBitrate() {
+ outputThreadChecker.checkIsOnValidThread();
+ adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
+ try {
+ Bundle params = new Bundle();
+ params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
+ codec.setParameters(params);
+ return VideoCodecStatus.OK;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "updateBitrate failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ }
+
+ private boolean canUseSurface() {
+ return sharedContext != null && surfaceColorFormat != null;
+ }
+
+ private static int getStride(MediaFormat inputFormat, int width) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null
+ && inputFormat.containsKey(MediaFormat.KEY_STRIDE)) {
+ return inputFormat.getInteger(MediaFormat.KEY_STRIDE);
+ }
+ return width;
+ }
+
+ private static int getSliceHeight(MediaFormat inputFormat, int height) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null
+ && inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
+ return inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
+ }
+ return height;
+ }
+
+ // Visible for testing.
+ protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer videoFrameBuffer) {
+ yuvFormat.fillBuffer(buffer, videoFrameBuffer, stride, sliceHeight);
+ }
+
+ /**
+ * Enumeration of supported YUV color formats used for MediaCodec's input.
+ */
+ private enum YuvFormat {
+ I420 {
+ @Override
+ void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) {
+ /*
+ * According to the docs in Android MediaCodec, the stride of the U and V planes can be
+ * calculated based on the color format, though it is generally undefined and depends on the
+ * device and release.
+ * <p/> Assuming the width and height, dstStrideY and dstSliceHeightY are
+ * even, it works fine when we define the stride and slice-height of the dst U/V plane to be
+ * half of the dst Y plane.
+ */
+ int dstStrideU = dstStrideY / 2;
+ int dstSliceHeight = dstSliceHeightY / 2;
+ VideoFrame.I420Buffer i420 = srcBuffer.toI420();
+ YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(),
+ dstStrideY, dstSliceHeightY, dstStrideU, dstSliceHeight);
+ i420.release();
+ }
+ },
+ NV12 {
+ @Override
+ void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) {
+ VideoFrame.I420Buffer i420 = srcBuffer.toI420();
+ YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(),
+ dstStrideY, dstSliceHeightY);
+ i420.release();
+ }
+ };
+
+ abstract void fillBuffer(
+ ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY);
+
+ static YuvFormat valueOf(int colorFormat) {
+ switch (colorFormat) {
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
+ return I420;
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
+ case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ return NV12;
+ default:
+ throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java
new file mode 100644
index 0000000000..c1d2d61a71
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Class for holding the native pointer of a histogram. Since there is no way to destroy a
+ * histogram, please don't create unnecessary instances of this object. This class is thread safe.
+ *
+ * Usage example:
+ * private static final Histogram someMetricHistogram =
+ * Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50);
+ * someMetricHistogram.addSample(someVariable);
+ */
+class Histogram {
+ private final long handle;
+
+ private Histogram(long handle) {
+ this.handle = handle;
+ }
+
+ static public Histogram createCounts(String name, int min, int max, int bucketCount) {
+ return new Histogram(0);
+ }
+
+ static public Histogram createEnumeration(String name, int max) {
+ return new Histogram(0);
+ }
+
+ public void addSample(int sample) {
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java
new file mode 100644
index 0000000000..f391db61a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CalledByNative;
+import org.webrtc.Loggable;
+import org.webrtc.Logging.Severity;
+
+class JNILogging {
+ private final Loggable loggable;
+
+ public JNILogging(Loggable loggable) {
+ this.loggable = loggable;
+ }
+
+ @CalledByNative
+ public void logToInjectable(String message, Integer severity, String tag) {
+ loggable.onLogMessage(message, Severity.values()[severity], tag);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java
new file mode 100644
index 0000000000..e1b2e513d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Class with static JNI helper functions that are used in many places. */
+public class JniCommon {
+ /** Functions to increment/decrement an rtc::RefCountInterface pointer. */
+ public static native void nativeAddRef(long refCountedPointer);
+ public static native void nativeReleaseRef(long refCountedPointer);
+
+ public static native ByteBuffer nativeAllocateByteBuffer(int size);
+ public static native void nativeFreeByteBuffer(ByteBuffer buffer);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
new file mode 100644
index 0000000000..d5ccae9688
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Container class for static constants and helpers used with MediaCodec. */
+// We are forced to use the old API because we want to support API level < 21.
+@SuppressWarnings("deprecation")
+class MediaCodecUtils {
+ private static final String TAG = "MediaCodecUtils";
+
+ // Prefixes for supported hardware encoder/decoder component names.
+ static final String EXYNOS_PREFIX = "OMX.Exynos.";
+ static final String INTEL_PREFIX = "OMX.Intel.";
+ static final String NVIDIA_PREFIX = "OMX.Nvidia.";
+ static final String QCOM_PREFIX = "OMX.qcom.";
+ static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = {
+ "OMX.google.", "OMX.SEC.", "c2.android"};
+
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
+ static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+
+ // Color formats supported by hardware decoder - in order of preference.
+ static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+
+ // Color formats supported by hardware encoder - in order of preference.
+ static final int[] ENCODER_COLOR_FORMATS = {
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+
+ // Color formats supported by texture mode encoding - in order of preference.
+ static final int[] TEXTURE_COLOR_FORMATS =
+ new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
+
+ static @Nullable Integer selectColorFormat(
+ int[] supportedColorFormats, CodecCapabilities capabilities) {
+ for (int supportedColorFormat : supportedColorFormats) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ return codecColorFormat;
+ }
+ }
+ }
+ return null;
+ }
+
+ static boolean codecSupportsType(MediaCodecInfo info, VideoCodecMimeType type) {
+ for (String mimeType : info.getSupportedTypes()) {
+ if (type.mimeType().equals(mimeType)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static Map<String, String> getCodecProperties(VideoCodecMimeType type, boolean highProfile) {
+ switch (type) {
+ case VP8:
+ case VP9:
+ case AV1:
+ return new HashMap<String, String>();
+ case H264:
+ return H264Utils.getDefaultH264Params(highProfile);
+ default:
+ throw new IllegalArgumentException("Unsupported codec: " + type);
+ }
+ }
+
+ static boolean isHardwareAccelerated(MediaCodecInfo info) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isHardwareAcceleratedQOrHigher(info);
+ }
+ return !isSoftwareOnly(info);
+ }
+
+ @TargetApi(29)
+ private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isHardwareAccelerated();
+ }
+
+ static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isSoftwareOnlyQOrHigher(codecInfo);
+ }
+ String name = codecInfo.getName();
+ for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) {
+ if (name.startsWith(prefix)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @TargetApi(29)
+ private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isSoftwareOnly();
+ }
+
+ private MediaCodecUtils() {
+ // This class should not be instantiated.
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
new file mode 100644
index 0000000000..bf591dda26
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
+import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Factory for decoders backed by Android MediaCodec API. */
+@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
+class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
+ private static final String TAG = "MediaCodecVideoDecoderFactory";
+
+ private final @Nullable EglBase.Context sharedContext;
+ private final @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate;
+
+ /**
+ * MediaCodecVideoDecoderFactory with support of codecs filtering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param codecAllowedPredicate optional predicate to test if codec allowed. All codecs are
+ * allowed when predicate is not provided.
+ */
+ public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
+ @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) {
+ this.sharedContext = sharedContext;
+ this.codecAllowedPredicate = codecAllowedPredicate;
+ }
+
+ @Nullable
+ @Override
+ public VideoDecoder createDecoder(VideoCodecInfo codecType) {
+ VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName());
+ MediaCodecInfo info = findCodecForType(type);
+
+ if (info == null) {
+ return null;
+ }
+
+ CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
+ return new AndroidVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type,
+ MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
+ sharedContext);
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
+ // Generate a list of supported codecs in order of preference:
+ // VP8, VP9, H264 (high profile), and H264 (baseline profile).
+ for (VideoCodecMimeType type : new VideoCodecMimeType[] {VideoCodecMimeType.VP8,
+ VideoCodecMimeType.VP9, VideoCodecMimeType.H264, VideoCodecMimeType.AV1}) {
+ MediaCodecInfo codec = findCodecForType(type);
+ if (codec != null) {
+ String name = type.name();
+ if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
+ }
+
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)));
+ }
+ }
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+
+ private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = null;
+ try {
+ info = MediaCodecList.getCodecInfoAt(i);
+ } catch (IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot retrieve decoder codec info", e);
+ }
+
+ if (info == null || info.isEncoder()) {
+ continue;
+ }
+
+ if (isSupportedCodec(info, type)) {
+ return info;
+ }
+ }
+
+ return null; // No support for this type.
+ }
+
+ // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
+ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) {
+ if (!MediaCodecUtils.codecSupportsType(info, type)) {
+ return false;
+ }
+ // Check for a supported color format.
+ if (MediaCodecUtils.selectColorFormat(
+ MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
+ == null) {
+ return false;
+ }
+ return isCodecAllowed(info);
+ }
+
+ private boolean isCodecAllowed(MediaCodecInfo info) {
+ if (codecAllowedPredicate == null) {
+ return true;
+ }
+ return codecAllowedPredicate.test(info);
+ }
+
+ private boolean isH264HighProfileSupported(MediaCodecInfo info) {
+ String name = info.getName();
+ // Support H.264 HP decoding on QCOM chips.
+ if (name.startsWith(QCOM_PREFIX)) {
+ return true;
+ }
+ // Support H.264 HP decoding on Exynos chips for Android M and above.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && name.startsWith(EXYNOS_PREFIX)) {
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java
new file mode 100644
index 0000000000..60c853df35
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import java.nio.ByteBuffer;
+
+/**
+ * Subset of methods defined in {@link android.media.MediaCodec} needed by
+ * {@link HardwareVideoEncoder} and {@link AndroidVideoDecoder}. This interface
+ * exists to allow mocking and using a fake implementation in tests.
+ */
+interface MediaCodecWrapper {
+ void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags);
+
+ void start();
+
+ void flush();
+
+ void stop();
+
+ void release();
+
+ int dequeueInputBuffer(long timeoutUs);
+
+ void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags);
+
+ int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs);
+
+ void releaseOutputBuffer(int index, boolean render);
+
+ MediaFormat getInputFormat();
+
+ MediaFormat getOutputFormat();
+
+ ByteBuffer getInputBuffer(int index);
+
+ ByteBuffer getOutputBuffer(int index);
+
+ Surface createInputSurface();
+
+ void setParameters(Bundle params);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java
new file mode 100644
index 0000000000..2962cb62a7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.io.IOException;
+
+interface MediaCodecWrapperFactory {
+ /**
+ * Creates a new {@link MediaCodecWrapper} by codec name.
+ *
+ * <p>For additional information see {@link android.media.MediaCodec#createByCodecName}.
+ */
+ MediaCodecWrapper createByCodecName(String name) throws IOException;
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
new file mode 100644
index 0000000000..2ba62ac7d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * Implementation of MediaCodecWrapperFactory that returns MediaCodecInterfaces wrapping
+ * {@link android.media.MediaCodec} objects.
+ */
+class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
+ private static class MediaCodecWrapperImpl implements MediaCodecWrapper {
+ private final MediaCodec mediaCodec;
+
+ public MediaCodecWrapperImpl(MediaCodec mediaCodec) {
+ this.mediaCodec = mediaCodec;
+ }
+
+ @Override
+ public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
+ mediaCodec.configure(format, surface, crypto, flags);
+ }
+
+ @Override
+ public void start() {
+ mediaCodec.start();
+ }
+
+ @Override
+ public void flush() {
+ mediaCodec.flush();
+ }
+
+ @Override
+ public void stop() {
+ mediaCodec.stop();
+ }
+
+ @Override
+ public void release() {
+ mediaCodec.release();
+ }
+
+ @Override
+ public int dequeueInputBuffer(long timeoutUs) {
+ return mediaCodec.dequeueInputBuffer(timeoutUs);
+ }
+
+ @Override
+ public void queueInputBuffer(
+ int index, int offset, int size, long presentationTimeUs, int flags) {
+ mediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
+ }
+
+ @Override
+ public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) {
+ return mediaCodec.dequeueOutputBuffer(info, timeoutUs);
+ }
+
+ @Override
+ public void releaseOutputBuffer(int index, boolean render) {
+ mediaCodec.releaseOutputBuffer(index, render);
+ }
+
+ @Override
+ public MediaFormat getInputFormat() {
+ return mediaCodec.getInputFormat();
+ }
+
+ @Override
+ public MediaFormat getOutputFormat() {
+ return mediaCodec.getOutputFormat();
+ }
+
+ @Override
+ public ByteBuffer getInputBuffer(int index) {
+ return mediaCodec.getInputBuffer(index);
+ }
+
+ @Override
+ public ByteBuffer getOutputBuffer(int index) {
+ return mediaCodec.getOutputBuffer(index);
+ }
+
+ @Override
+ public Surface createInputSurface() {
+ return mediaCodec.createInputSurface();
+ }
+
+ @Override
+ public void setParameters(Bundle params) {
+ mediaCodec.setParameters(params);
+ }
+ }
+
+ @Override
+ public MediaCodecWrapper createByCodecName(String name) throws IOException {
+ return new MediaCodecWrapperImpl(MediaCodec.createByCodecName(name));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java
new file mode 100644
index 0000000000..fe0221d826
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+public class NV12Buffer implements VideoFrame.Buffer {
+ private final int width;
+ private final int height;
+ private final int stride;
+ private final int sliceHeight;
+ private final ByteBuffer buffer;
+ private final RefCountDelegate refCountDelegate;
+
+ public NV12Buffer(int width, int height, int stride, int sliceHeight, ByteBuffer buffer,
+ @Nullable Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.stride = stride;
+ this.sliceHeight = sliceHeight;
+ this.buffer = buffer;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ return (VideoFrame.I420Buffer) cropAndScale(0, 0, width, height, width, height);
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, buffer, width,
+ height, stride, sliceHeight, newBuffer.getDataY(), newBuffer.getStrideY(),
+ newBuffer.getDataU(), newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, ByteBuffer src, int srcWidth, int srcHeight, int srcStride,
+ int srcSliceHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int dstStrideU,
+ ByteBuffer dstV, int dstStrideV);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
new file mode 100644
index 0000000000..0fb1afe74b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+public class NV21Buffer implements VideoFrame.Buffer {
+ private final byte[] data;
+ private final int width;
+ private final int height;
+ private final RefCountDelegate refCountDelegate;
+
+ public NV21Buffer(byte[] data, int width, int height, @Nullable Runnable releaseCallback) {
+ this.data = data;
+ this.width = width;
+ this.height = height;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ // Cropping converts the frame to I420. Just crop and scale to the whole image.
+ return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
+ height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
+ height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+ newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
+ int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java
new file mode 100644
index 0000000000..d4fba481e8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoProcessor;
+
+/**
+ * This class is meant to be a simple layer that only handles the JNI wrapping of a C++
+ * AndroidVideoTrackSource, that can easily be mocked out in Java unit tests. Refrain from adding
+ * any unnecessary logic to this class.
+ * This class is thred safe and methods can be called from any thread, but if frames A, B, ..., are
+ * sent to adaptFrame(), the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same
+ * order to onFrameCaptured().
+ */
+class NativeAndroidVideoTrackSource {
+ // Pointer to webrtc::jni::AndroidVideoTrackSource.
+ private final long nativeAndroidVideoTrackSource;
+
+ public NativeAndroidVideoTrackSource(long nativeAndroidVideoTrackSource) {
+ this.nativeAndroidVideoTrackSource = nativeAndroidVideoTrackSource;
+ }
+
+ /**
+ * Set the state for the native MediaSourceInterface. Maps boolean to either
+ * SourceState::kLive or SourceState::kEnded.
+ */
+ public void setState(boolean isLive) {
+ nativeSetState(nativeAndroidVideoTrackSource, isLive);
+ }
+
+ /**
+ * This function should be called before delivering any frame to determine if the frame should be
+ * dropped or what the cropping and scaling parameters should be. If the return value is null, the
+ * frame should be dropped, otherwise the frame should be adapted in accordance to the frame
+ * adaptation parameters before calling onFrameCaptured().
+ */
+ @Nullable
+ public VideoProcessor.FrameAdaptationParameters adaptFrame(VideoFrame frame) {
+ return nativeAdaptFrame(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(),
+ frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs());
+ }
+
+ /**
+ * Pass an adapted frame to the native AndroidVideoTrackSource. Note that adaptFrame() is
+ * expected to be called first and that the passed frame conforms to those parameters.
+ */
+ public void onFrameCaptured(VideoFrame frame) {
+ nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getRotation(),
+ frame.getTimestampNs(), frame.getBuffer());
+ }
+
+ /**
+ * Calling this function will cause frames to be scaled down to the requested resolution. Also,
+ * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
+ * the requested fps.
+ */
+ public void adaptOutputFormat(VideoSource.AspectRatio targetLandscapeAspectRatio,
+ @Nullable Integer maxLandscapePixelCount, VideoSource.AspectRatio targetPortraitAspectRatio,
+ @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
+ nativeAdaptOutputFormat(nativeAndroidVideoTrackSource, targetLandscapeAspectRatio.width,
+ targetLandscapeAspectRatio.height, maxLandscapePixelCount, targetPortraitAspectRatio.width,
+ targetPortraitAspectRatio.height, maxPortraitPixelCount, maxFps);
+ }
+
+ public void setIsScreencast(boolean isScreencast) {
+ nativeSetIsScreencast(nativeAndroidVideoTrackSource, isScreencast);
+ }
+
+ @CalledByNative
+ static VideoProcessor.FrameAdaptationParameters createFrameAdaptationParameters(int cropX,
+ int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight, long timestampNs,
+ boolean drop) {
+ return new VideoProcessor.FrameAdaptationParameters(
+ cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, timestampNs, drop);
+ }
+
+ private static native void nativeSetIsScreencast(
+ long nativeAndroidVideoTrackSource, boolean isScreencast);
+ private static native void nativeSetState(long nativeAndroidVideoTrackSource, boolean isLive);
+ private static native void nativeAdaptOutputFormat(long nativeAndroidVideoTrackSource,
+ int landscapeWidth, int landscapeHeight, @Nullable Integer maxLandscapePixelCount,
+ int portraitWidth, int portraitHeight, @Nullable Integer maxPortraitPixelCount,
+ @Nullable Integer maxFps);
+ @Nullable
+ private static native VideoProcessor.FrameAdaptationParameters nativeAdaptFrame(
+ long nativeAndroidVideoTrackSource, int width, int height, int rotation, long timestampNs);
+ private static native void nativeOnFrameCaptured(
+ long nativeAndroidVideoTrackSource, int rotation, long timestampNs, VideoFrame.Buffer buffer);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java
new file mode 100644
index 0000000000..c195fb3a4c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoFrame;
+
+/**
+ * Used from native api and implements a simple VideoCapturer.CapturerObserver that feeds frames to
+ * a webrtc::jni::AndroidVideoTrackSource.
+ */
+class NativeCapturerObserver implements CapturerObserver {
+ private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
+
+ @CalledByNative
+ public NativeCapturerObserver(long nativeSource) {
+ this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
+ }
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeAndroidVideoTrackSource.setState(success);
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
+ }
+
+ @Override
+ public void onFrameCaptured(VideoFrame frame) {
+ final VideoProcessor.FrameAdaptationParameters parameters =
+ nativeAndroidVideoTrackSource.adaptFrame(frame);
+ if (parameters == null) {
+ // Drop frame.
+ return;
+ }
+
+ final VideoFrame.Buffer adaptedBuffer =
+ frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth,
+ parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight);
+ nativeAndroidVideoTrackSource.onFrameCaptured(
+ new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs));
+ adaptedBuffer.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java
new file mode 100644
index 0000000000..531c216302
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+class NativeLibrary {
+ private static String TAG = "NativeLibrary";
+
+ static class DefaultLoader implements NativeLibraryLoader {
+ @Override
+ public boolean load(String name) {
+ Logging.d(TAG, "Loading library: " + name);
+ System.loadLibrary(name);
+
+ // Not relevant, but kept for API compatibility.
+ return true;
+ }
+ }
+
+ private static Object lock = new Object();
+ private static boolean libraryLoaded;
+
+ /**
+ * Loads the native library. Clients should call PeerConnectionFactory.initialize. It will call
+ * this method for them.
+ */
+ static void initialize(NativeLibraryLoader loader, String libraryName) {
+ synchronized (lock) {
+ if (libraryLoaded) {
+ Logging.d(TAG, "Native library has already been loaded.");
+ return;
+ }
+ Logging.d(TAG, "Loading native library: " + libraryName);
+ libraryLoaded = loader.load(libraryName);
+ }
+ }
+
+ /** Returns true if the library has been loaded successfully. */
+ static boolean isLoaded() {
+ synchronized (lock) {
+ return libraryLoaded;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java
new file mode 100644
index 0000000000..b9210d26a4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Implementation of RefCounted that executes a Runnable once the ref count reaches zero.
+ */
+class RefCountDelegate implements RefCounted {
+ private final AtomicInteger refCount = new AtomicInteger(1);
+ private final @Nullable Runnable releaseCallback;
+
+ /**
+ * @param releaseCallback Callback that will be executed once the ref count reaches zero.
+ */
+ public RefCountDelegate(@Nullable Runnable releaseCallback) {
+ this.releaseCallback = releaseCallback;
+ }
+
+ @Override
+ public void retain() {
+ int updated_count = refCount.incrementAndGet();
+ if (updated_count < 2) {
+ throw new IllegalStateException("retain() called on an object with refcount < 1");
+ }
+ }
+
+ @Override
+ public void release() {
+ int updated_count = refCount.decrementAndGet();
+ if (updated_count < 0) {
+ throw new IllegalStateException("release() called on an object with refcount < 1");
+ }
+ if (updated_count == 0 && releaseCallback != null) {
+ releaseCallback.run();
+ }
+ }
+
+ /**
+ * Tries to retain the object. Can be used in scenarios where it is unknown if the object has
+ * already been released. Returns true if successful or false if the object was already released.
+ */
+ boolean safeRetain() {
+ int currentRefCount = refCount.get();
+ while (currentRefCount != 0) {
+ if (refCount.weakCompareAndSet(currentRefCount, currentRefCount + 1)) {
+ return true;
+ }
+ currentRefCount = refCount.get();
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
new file mode 100644
index 0000000000..26a030919d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Enumeration of supported video codec types. */
+enum VideoCodecMimeType {
+ VP8("video/x-vnd.on2.vp8"),
+ VP9("video/x-vnd.on2.vp9"),
+ H264("video/avc"),
+ AV1("video/av01");
+
+ private final String mimeType;
+
+ private VideoCodecMimeType(String mimeType) {
+ this.mimeType = mimeType;
+ }
+
+ String mimeType() {
+ return mimeType;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java
new file mode 100644
index 0000000000..2aae041640
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoDecoder;
+
+/**
+ * This class contains the Java glue code for JNI generation of VideoDecoder.
+ */
+class VideoDecoderWrapper {
+ @CalledByNative
+ static VideoDecoder.Callback createDecoderCallback(final long nativeDecoder) {
+ return (VideoFrame frame, Integer decodeTimeMs,
+ Integer qp) -> nativeOnDecodedFrame(nativeDecoder, frame, decodeTimeMs, qp);
+ }
+
+ private static native void nativeOnDecodedFrame(
+ long nativeVideoDecoderWrapper, VideoFrame frame, Integer decodeTimeMs, Integer qp);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
new file mode 100644
index 0000000000..b5485d4edb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+// Explicit imports necessary for JNI generation.
+import androidx.annotation.Nullable;
+import org.webrtc.VideoEncoder;
+
+/**
+ * This class contains the Java glue code for JNI generation of VideoEncoder.
+ */
+class VideoEncoderWrapper {
+ @CalledByNative
+ static boolean getScalingSettingsOn(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.on;
+ }
+
+ @Nullable
+ @CalledByNative
+ static Integer getScalingSettingsLow(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.low;
+ }
+
+ @Nullable
+ @CalledByNative
+ static Integer getScalingSettingsHigh(VideoEncoder.ScalingSettings scalingSettings) {
+ return scalingSettings.high;
+ }
+
+ @CalledByNative
+ static VideoEncoder.Callback createEncoderCallback(final long nativeEncoder) {
+ return (EncodedImage frame,
+ VideoEncoder.CodecSpecificInfo info) -> nativeOnEncodedFrame(nativeEncoder, frame);
+ }
+
+ private static native void nativeOnEncodedFrame(
+ long nativeVideoEncoderWrapper, EncodedImage frame);
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java
new file mode 100644
index 0000000000..023e92cfb1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * This class provides a ClassLoader that is capable of loading WebRTC Java classes regardless of
+ * what thread it's called from. Such a ClassLoader is needed for the few cases where the JNI
+ * mechanism is unable to automatically determine the appropriate ClassLoader instance.
+ */
+class WebRtcClassLoader {
+ @CalledByNative
+ static Object getClassLoader() {
+ Object loader = WebRtcClassLoader.class.getClassLoader();
+ if (loader == null) {
+ throw new RuntimeException("Failed to get WebRTC class loader.");
+ }
+ return loader;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java
new file mode 100644
index 0000000000..0461660fcf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/**
+ * This class wraps a webrtc::I420BufferInterface into a VideoFrame.I420Buffer.
+ */
+class WrappedNativeI420Buffer implements VideoFrame.I420Buffer {
+ private final int width;
+ private final int height;
+ private final ByteBuffer dataY;
+ private final int strideY;
+ private final ByteBuffer dataU;
+ private final int strideU;
+ private final ByteBuffer dataV;
+ private final int strideV;
+ private final long nativeBuffer;
+
+ @CalledByNative
+ WrappedNativeI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
+ int strideU, ByteBuffer dataV, int strideV, long nativeBuffer) {
+ this.width = width;
+ this.height = height;
+ this.dataY = dataY;
+ this.strideY = strideY;
+ this.dataU = dataU;
+ this.strideU = strideU;
+ this.dataV = dataV;
+ this.strideV = strideV;
+ this.nativeBuffer = nativeBuffer;
+
+ retain();
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public ByteBuffer getDataY() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataY.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataU() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataU.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataV() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataV.slice();
+ }
+
+ @Override
+ public int getStrideY() {
+ return strideY;
+ }
+
+ @Override
+ public int getStrideU() {
+ return strideU;
+ }
+
+ @Override
+ public int getStrideV() {
+ return strideV;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ retain();
+ return this;
+ }
+
+ @Override
+ public void retain() {
+ JniCommon.nativeAddRef(nativeBuffer);
+ }
+
+ @Override
+ public void release() {
+ JniCommon.nativeReleaseRef(nativeBuffer);
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ return JavaI420Buffer.cropAndScaleI420(
+ this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java
new file mode 100644
index 0000000000..70c625ab4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.AudioTrack;
+import android.os.Build;
+import org.webrtc.Logging;
+
+// Lowers the buffer size if no underruns are detected for 100 ms. Once an
+// underrun is detected, the buffer size is increased by 10 ms and it will not
+// be lowered further. The buffer size will never be increased more than
+// 5 times, to avoid the possibility of the buffer size increasing without
+// bounds.
+class LowLatencyAudioBufferManager {
+ private static final String TAG = "LowLatencyAudioBufferManager";
+ // The underrun count that was valid during the previous call to maybeAdjustBufferSize(). Used to
+ // detect increases in the value.
+ private int prevUnderrunCount;
+ // The number of ticks to wait without an underrun before decreasing the buffer size.
+ private int ticksUntilNextDecrease;
+ // Indicate if we should continue to decrease the buffer size.
+ private boolean keepLoweringBufferSize;
+ // How often the buffer size was increased.
+ private int bufferIncreaseCounter;
+
+ public LowLatencyAudioBufferManager() {
+ this.prevUnderrunCount = 0;
+ this.ticksUntilNextDecrease = 10;
+ this.keepLoweringBufferSize = true;
+ this.bufferIncreaseCounter = 0;
+ }
+
+ public void maybeAdjustBufferSize(AudioTrack audioTrack) {
+ if (Build.VERSION.SDK_INT >= 26) {
+ final int underrunCount = audioTrack.getUnderrunCount();
+ if (underrunCount > prevUnderrunCount) {
+ // Don't increase buffer more than 5 times. Continuing to increase the buffer size
+ // could be harmful on low-power devices that regularly experience underruns under
+ // normal conditions.
+ if (bufferIncreaseCounter < 5) {
+ // Underrun detected, increase buffer size by 10ms.
+ final int currentBufferSize = audioTrack.getBufferSizeInFrames();
+ final int newBufferSize = currentBufferSize + audioTrack.getPlaybackRate() / 100;
+ Logging.d(TAG,
+ "Underrun detected! Increasing AudioTrack buffer size from " + currentBufferSize
+ + " to " + newBufferSize);
+ audioTrack.setBufferSizeInFrames(newBufferSize);
+ bufferIncreaseCounter++;
+ }
+ // Stop trying to lower the buffer size.
+ keepLoweringBufferSize = false;
+ prevUnderrunCount = underrunCount;
+ ticksUntilNextDecrease = 10;
+ } else if (keepLoweringBufferSize) {
+ ticksUntilNextDecrease--;
+ if (ticksUntilNextDecrease <= 0) {
+ // No underrun seen for 100 ms, try to lower the buffer size by 10ms.
+ final int bufferSize10ms = audioTrack.getPlaybackRate() / 100;
+ // Never go below a buffer size of 10ms.
+ final int currentBufferSize = audioTrack.getBufferSizeInFrames();
+ final int newBufferSize = Math.max(bufferSize10ms, currentBufferSize - bufferSize10ms);
+ if (newBufferSize != currentBufferSize) {
+ Logging.d(TAG,
+ "Lowering AudioTrack buffer size from " + currentBufferSize + " to "
+ + newBufferSize);
+ audioTrack.setBufferSizeInFrames(newBufferSize);
+ }
+ ticksUntilNextDecrease = 10;
+ }
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java
new file mode 100644
index 0000000000..06d5cd3a8e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.AudioManager;
+import androidx.annotation.Nullable;
+import java.util.Timer;
+import java.util.TimerTask;
+import org.webrtc.Logging;
+
+// TODO(magjed): Do we really need to spawn a new thread just to log volume? Can we re-use the
+// AudioTrackThread instead?
+/**
+ * Private utility class that periodically checks and logs the volume level of the audio stream that
+ * is currently controlled by the volume control. A timer triggers logs once every 30 seconds and
+ * the timer's associated thread is named "WebRtcVolumeLevelLoggerThread".
+ */
+class VolumeLogger {
+ private static final String TAG = "VolumeLogger";
+ private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread";
+ private static final int TIMER_PERIOD_IN_SECONDS = 30;
+
+ private final AudioManager audioManager;
+ private @Nullable Timer timer;
+
+ public VolumeLogger(AudioManager audioManager) {
+ this.audioManager = audioManager;
+ }
+
+ public void start() {
+ Logging.d(TAG, "start" + WebRtcAudioUtils.getThreadInfo());
+ if (timer != null) {
+ return;
+ }
+ Logging.d(TAG, "audio mode is: " + WebRtcAudioUtils.modeToString(audioManager.getMode()));
+
+ timer = new Timer(THREAD_NAME);
+ timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
+ audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
+ 0, TIMER_PERIOD_IN_SECONDS * 1000);
+ }
+
+ private class LogVolumeTask extends TimerTask {
+ private final int maxRingVolume;
+ private final int maxVoiceCallVolume;
+
+ LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) {
+ this.maxRingVolume = maxRingVolume;
+ this.maxVoiceCallVolume = maxVoiceCallVolume;
+ }
+
+ @Override
+ public void run() {
+ final int mode = audioManager.getMode();
+ if (mode == AudioManager.MODE_RINGTONE) {
+ Logging.d(TAG,
+ "STREAM_RING stream volume: " + audioManager.getStreamVolume(AudioManager.STREAM_RING)
+ + " (max=" + maxRingVolume + ")");
+ } else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
+ Logging.d(TAG,
+ "VOICE_CALL stream volume: "
+ + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL)
+ + " (max=" + maxVoiceCallVolume + ")");
+ }
+ }
+ }
+
+ public void stop() {
+ Logging.d(TAG, "stop" + WebRtcAudioUtils.getThreadInfo());
+ if (timer != null) {
+ timer.cancel();
+ timer = null;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
new file mode 100644
index 0000000000..a9ff1011b6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
@@ -0,0 +1,227 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.media.audiofx.AcousticEchoCanceler;
+import android.media.audiofx.AudioEffect;
+import android.media.audiofx.AudioEffect.Descriptor;
+import android.media.audiofx.NoiseSuppressor;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.UUID;
+import org.webrtc.Logging;
+
+// This class wraps control of three different platform effects. Supported
+// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS).
+// Calling enable() will active all effects that are
+// supported by the device if the corresponding `shouldEnableXXX` member is set.
+class WebRtcAudioEffects {
+ private static final boolean DEBUG = false;
+
+ private static final String TAG = "WebRtcAudioEffectsExternal";
+
+ // UUIDs for Software Audio Effects that we want to avoid using.
+ // The implementor field will be set to "The Android Open Source Project".
+ private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER =
+ UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b");
+ private static final UUID AOSP_NOISE_SUPPRESSOR =
+ UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b");
+
+ // Contains the available effect descriptors returned from the
+ // AudioEffect.getEffects() call. This result is cached to avoid doing the
+ // slow OS call multiple times.
+ private static @Nullable Descriptor[] cachedEffects;
+
+ // Contains the audio effect objects. Created in enable() and destroyed
+ // in release().
+ private @Nullable AcousticEchoCanceler aec;
+ private @Nullable NoiseSuppressor ns;
+
+ // Affects the final state given to the setEnabled() method on each effect.
+ // The default state is set to "disabled" but each effect can also be enabled
+ // by calling setAEC() and setNS().
+ private boolean shouldEnableAec;
+ private boolean shouldEnableNs;
+
+ // Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
+ // fulfilled.
+ public static boolean isAcousticEchoCancelerSupported() {
+ return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
+ }
+
+ // Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
+ public static boolean isNoiseSuppressorSupported() {
+ return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
+ }
+
+ public WebRtcAudioEffects() {
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ // Call this method to enable or disable the platform AEC. It modifies
+ // `shouldEnableAec` which is used in enable() where the actual state
+ // of the AEC effect is modified. Returns true if HW AEC is supported and
+ // false otherwise.
+ public boolean setAEC(boolean enable) {
+ Logging.d(TAG, "setAEC(" + enable + ")");
+ if (!isAcousticEchoCancelerSupported()) {
+ Logging.w(TAG, "Platform AEC is not supported");
+ shouldEnableAec = false;
+ return false;
+ }
+ if (aec != null && (enable != shouldEnableAec)) {
+ Logging.e(TAG, "Platform AEC state can't be modified while recording");
+ return false;
+ }
+ shouldEnableAec = enable;
+ return true;
+ }
+
+ // Call this method to enable or disable the platform NS. It modifies
+ // `shouldEnableNs` which is used in enable() where the actual state
+ // of the NS effect is modified. Returns true if HW NS is supported and
+ // false otherwise.
+ public boolean setNS(boolean enable) {
+ Logging.d(TAG, "setNS(" + enable + ")");
+ if (!isNoiseSuppressorSupported()) {
+ Logging.w(TAG, "Platform NS is not supported");
+ shouldEnableNs = false;
+ return false;
+ }
+ if (ns != null && (enable != shouldEnableNs)) {
+ Logging.e(TAG, "Platform NS state can't be modified while recording");
+ return false;
+ }
+ shouldEnableNs = enable;
+ return true;
+ }
+
+ public void enable(int audioSession) {
+ Logging.d(TAG, "enable(audioSession=" + audioSession + ")");
+ assertTrue(aec == null);
+ assertTrue(ns == null);
+
+ if (DEBUG) {
+ // Add logging of supported effects but filter out "VoIP effects", i.e.,
+ // AEC, AEC and NS. Avoid calling AudioEffect.queryEffects() unless the
+ // DEBUG flag is set since we have seen crashes in this API.
+ for (Descriptor d : AudioEffect.queryEffects()) {
+ if (effectTypeIsVoIP(d.type)) {
+ Logging.d(TAG,
+ "name: " + d.name + ", "
+ + "mode: " + d.connectMode + ", "
+ + "implementor: " + d.implementor + ", "
+ + "UUID: " + d.uuid);
+ }
+ }
+ }
+
+ if (isAcousticEchoCancelerSupported()) {
+ // Create an AcousticEchoCanceler and attach it to the AudioRecord on
+ // the specified audio session.
+ aec = AcousticEchoCanceler.create(audioSession);
+ if (aec != null) {
+ boolean enabled = aec.getEnabled();
+ boolean enable = shouldEnableAec && isAcousticEchoCancelerSupported();
+ if (aec.setEnabled(enable) != AudioEffect.SUCCESS) {
+ Logging.e(TAG, "Failed to set the AcousticEchoCanceler state");
+ }
+ Logging.d(TAG,
+ "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") + ", enable: "
+ + enable + ", is now: " + (aec.getEnabled() ? "enabled" : "disabled"));
+ } else {
+ Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance");
+ }
+ }
+
+ if (isNoiseSuppressorSupported()) {
+ // Create an NoiseSuppressor and attach it to the AudioRecord on the
+ // specified audio session.
+ ns = NoiseSuppressor.create(audioSession);
+ if (ns != null) {
+ boolean enabled = ns.getEnabled();
+ boolean enable = shouldEnableNs && isNoiseSuppressorSupported();
+ if (ns.setEnabled(enable) != AudioEffect.SUCCESS) {
+ Logging.e(TAG, "Failed to set the NoiseSuppressor state");
+ }
+ Logging.d(TAG,
+ "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable
+ + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled"));
+ } else {
+ Logging.e(TAG, "Failed to create the NoiseSuppressor instance");
+ }
+ }
+ }
+
+ // Releases all native audio effect resources. It is a good practice to
+ // release the effect engine when not in use as control can be returned
+ // to other applications or the native resources released.
+ public void release() {
+ Logging.d(TAG, "release");
+ if (aec != null) {
+ aec.release();
+ aec = null;
+ }
+ if (ns != null) {
+ ns.release();
+ ns = null;
+ }
+ }
+
+ // Returns true for effect types in `type` that are of "VoIP" types:
+ // Acoustic Echo Canceler (AEC) or Automatic Gain Control (AGC) or
+ // Noise Suppressor (NS). Note that, an extra check for support is needed
+ // in each comparison since some devices includes effects in the
+ // AudioEffect.Descriptor array that are actually not available on the device.
+ // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
+ // AutomaticGainControl.isAvailable() returns false.
+ private boolean effectTypeIsVoIP(UUID type) {
+ return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
+ || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ // Returns the cached copy of the audio effects array, if available, or
+ // queries the operating system for the list of effects.
+ private static @Nullable Descriptor[] getAvailableEffects() {
+ if (cachedEffects != null) {
+ return cachedEffects;
+ }
+ // The caching is best effort only - if this method is called from several
+ // threads in parallel, they may end up doing the underlying OS call
+ // multiple times. It's normally only called on one thread so there's no
+ // real need to optimize for the multiple threads case.
+ cachedEffects = AudioEffect.queryEffects();
+ return cachedEffects;
+ }
+
+ // Returns true if an effect of the specified type is available. Functionally
+ // equivalent to (NoiseSuppressor`AutomaticGainControl`...).isAvailable(), but
+ // faster as it avoids the expensive OS call to enumerate effects.
+ private static boolean isEffectTypeAvailable(UUID effectType, UUID blockListedUuid) {
+ Descriptor[] effects = getAvailableEffects();
+ if (effects == null) {
+ return false;
+ }
+ for (Descriptor d : effects) {
+ if (d.type.equals(effectType)) {
+ return !d.uuid.equals(blockListedUuid);
+ }
+ }
+ return false;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
new file mode 100644
index 0000000000..f398602a28
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.os.Build;
+import org.webrtc.Logging;
+import org.webrtc.CalledByNative;
+
+/**
+ * This class contains static functions to query sample rate and input/output audio buffer sizes.
+ */
+class WebRtcAudioManager {
+ private static final String TAG = "WebRtcAudioManagerExternal";
+
+ private static final int DEFAULT_SAMPLE_RATE_HZ = 16000;
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ private static final int BITS_PER_SAMPLE = 16;
+
+ private static final int DEFAULT_FRAME_PER_BUFFER = 256;
+
+ @CalledByNative
+ static AudioManager getAudioManager(Context context) {
+ return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ }
+
+ @CalledByNative
+ static int getOutputBufferSize(
+ Context context, AudioManager audioManager, int sampleRate, int numberOfOutputChannels) {
+ return isLowLatencyOutputSupported(context)
+ ? getLowLatencyFramesPerBuffer(audioManager)
+ : getMinOutputFrameSize(sampleRate, numberOfOutputChannels);
+ }
+
+ @CalledByNative
+ static int getInputBufferSize(
+ Context context, AudioManager audioManager, int sampleRate, int numberOfInputChannels) {
+ return isLowLatencyInputSupported(context)
+ ? getLowLatencyFramesPerBuffer(audioManager)
+ : getMinInputFrameSize(sampleRate, numberOfInputChannels);
+ }
+
+ private static boolean isLowLatencyOutputSupported(Context context) {
+ return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
+ }
+
+ private static boolean isLowLatencyInputSupported(Context context) {
+ // TODO(henrika): investigate if some sort of device list is needed here
+ // as well. The NDK doc states that: "As of API level 21, lower latency
+ // audio input is supported on select devices. To take advantage of this
+ // feature, first confirm that lower latency output is available".
+ return isLowLatencyOutputSupported(context);
+ }
+
+ /**
+ * Returns the native input/output sample rate for this device's output stream.
+ */
+ @CalledByNative
+ static int getSampleRate(AudioManager audioManager) {
+ // Override this if we're running on an old emulator image which only
+ // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE.
+ if (WebRtcAudioUtils.runningOnEmulator()) {
+ Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz.");
+ return 8000;
+ }
+ // Deliver best possible estimate based on default Android AudioManager APIs.
+ final int sampleRateHz = getSampleRateForApiLevel(audioManager);
+ Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
+ return sampleRateHz;
+ }
+
+ private static int getSampleRateForApiLevel(AudioManager audioManager) {
+ String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+ return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
+ }
+
+ // Returns the native output buffer size for low-latency output streams.
+ private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
+ String framesPerBuffer =
+ audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+ return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
+ }
+
+ // Returns the minimum output buffer size for Java based audio (AudioTrack).
+ // This size can also be used for OpenSL ES implementations on devices that
+ // lacks support of low-latency output.
+ private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
+ final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+ final int channelConfig =
+ (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+ return AudioTrack.getMinBufferSize(
+ sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+ / bytesPerFrame;
+ }
+
+ // Returns the minimum input buffer size for Java based audio (AudioRecord).
+ // This size can calso be used for OpenSL ES implementations on devices that
+ // lacks support of low-latency input.
+ private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
+ final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+ final int channelConfig =
+ (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+ return AudioRecord.getMinBufferSize(
+ sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+ / bytesPerFrame;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
new file mode 100644
index 0000000000..6647e5fcbb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
@@ -0,0 +1,743 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioRecordingConfiguration;
+import android.media.AudioTimestamp;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import android.os.Process;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import java.lang.System;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+import org.webrtc.CalledByNative;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
+
+class WebRtcAudioRecord {
+ private static final String TAG = "WebRtcAudioRecordExternal";
+
+ // Requested size of each recorded buffer provided to the client.
+ private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+ // Average number of callbacks per second.
+ private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+ // We ask for a native buffer size of BUFFER_SIZE_FACTOR * (minimum required
+ // buffer size). The extra space is allocated to guard against glitches under
+ // high load.
+ private static final int BUFFER_SIZE_FACTOR = 2;
+
+ // The AudioRecordJavaThread is allowed to wait for successful call to join()
+ // but the wait times out afther this amount of time.
+ private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+ public static final int DEFAULT_AUDIO_SOURCE = AudioSource.VOICE_COMMUNICATION;
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ public static final int DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+
+ // Indicates AudioRecord has started recording audio.
+ private static final int AUDIO_RECORD_START = 0;
+
+ // Indicates AudioRecord has stopped recording audio.
+ private static final int AUDIO_RECORD_STOP = 1;
+
+ // Time to wait before checking recording status after start has been called. Tests have
+ // shown that the result can sometimes be invalid (our own status might be missing) if we check
+ // directly after start.
+ private static final int CHECK_REC_STATUS_DELAY_MS = 100;
+
+ private final Context context;
+ private final AudioManager audioManager;
+ private final int audioSource;
+ private final int audioFormat;
+
+ private long nativeAudioRecord;
+
+ private final WebRtcAudioEffects effects = new WebRtcAudioEffects();
+
+ private @Nullable ByteBuffer byteBuffer;
+
+ private @Nullable AudioRecord audioRecord;
+ private @Nullable AudioRecordThread audioThread;
+ private @Nullable AudioDeviceInfo preferredDevice;
+
+ private final ScheduledExecutorService executor;
+ private @Nullable ScheduledFuture<String> future;
+
+ private volatile boolean microphoneMute;
+ private final AtomicReference<Boolean> audioSourceMatchesRecordingSessionRef =
+ new AtomicReference<>();
+ private byte[] emptyBytes;
+
+ private final @Nullable AudioRecordErrorCallback errorCallback;
+ private final @Nullable AudioRecordStateCallback stateCallback;
+ private final @Nullable SamplesReadyCallback audioSamplesReadyCallback;
+ private final boolean isAcousticEchoCancelerSupported;
+ private final boolean isNoiseSuppressorSupported;
+
+ /**
+ * Audio thread which keeps calling ByteBuffer.read() waiting for audio
+ * to be recorded. Feeds recorded data to the native counterpart as a
+ * periodic sequence of callbacks using DataIsRecorded().
+ * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+ */
+ private class AudioRecordThread extends Thread {
+ private volatile boolean keepAlive = true;
+
+ public AudioRecordThread(String name) {
+ super(name);
+ }
+
+ @Override
+ public void run() {
+ Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+ Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
+ assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
+
+ // Audio recording has started and the client is informed about it.
+ doAudioRecordStateCallback(AUDIO_RECORD_START);
+
+ long lastTime = System.nanoTime();
+ AudioTimestamp audioTimestamp = null;
+ if (Build.VERSION.SDK_INT >= 24) {
+ audioTimestamp = new AudioTimestamp();
+ }
+ while (keepAlive) {
+ int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity());
+ if (bytesRead == byteBuffer.capacity()) {
+ if (microphoneMute) {
+ byteBuffer.clear();
+ byteBuffer.put(emptyBytes);
+ }
+ // It's possible we've been shut down during the read, and stopRecording() tried and
+ // failed to join this thread. To be a bit safer, try to avoid calling any native methods
+ // in case they've been unregistered after stopRecording() returned.
+ if (keepAlive) {
+ long captureTimeNs = 0;
+ if (Build.VERSION.SDK_INT >= 24) {
+ if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC)
+ == AudioRecord.SUCCESS) {
+ captureTimeNs = audioTimestamp.nanoTime;
+ }
+ }
+ nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs);
+ }
+ if (audioSamplesReadyCallback != null) {
+ // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily
+ // at index 0.
+ byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(),
+ byteBuffer.capacity() + byteBuffer.arrayOffset());
+ audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady(
+ new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(),
+ audioRecord.getChannelCount(), audioRecord.getSampleRate(), data));
+ }
+ } else {
+ String errorMessage = "AudioRecord.read failed: " + bytesRead;
+ Logging.e(TAG, errorMessage);
+ if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
+ keepAlive = false;
+ reportWebRtcAudioRecordError(errorMessage);
+ }
+ }
+ }
+
+ try {
+ if (audioRecord != null) {
+ audioRecord.stop();
+ doAudioRecordStateCallback(AUDIO_RECORD_STOP);
+ }
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
+ }
+ }
+
+ // Stops the inner thread loop and also calls AudioRecord.stop().
+ // Does not block the calling thread.
+ public void stopThread() {
+ Logging.d(TAG, "stopThread");
+ keepAlive = false;
+ }
+ }
+
+ @CalledByNative
+ WebRtcAudioRecord(Context context, AudioManager audioManager) {
+ this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE,
+ DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */,
+ null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(),
+ WebRtcAudioEffects.isNoiseSuppressorSupported());
+ }
+
+ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler,
+ AudioManager audioManager, int audioSource, int audioFormat,
+ @Nullable AudioRecordErrorCallback errorCallback,
+ @Nullable AudioRecordStateCallback stateCallback,
+ @Nullable SamplesReadyCallback audioSamplesReadyCallback,
+ boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) {
+ if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) {
+ throw new IllegalArgumentException("HW AEC not supported");
+ }
+ if (isNoiseSuppressorSupported && !WebRtcAudioEffects.isNoiseSuppressorSupported()) {
+ throw new IllegalArgumentException("HW NS not supported");
+ }
+ this.context = context;
+ this.executor = scheduler;
+ this.audioManager = audioManager;
+ this.audioSource = audioSource;
+ this.audioFormat = audioFormat;
+ this.errorCallback = errorCallback;
+ this.stateCallback = stateCallback;
+ this.audioSamplesReadyCallback = audioSamplesReadyCallback;
+ this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported;
+ this.isNoiseSuppressorSupported = isNoiseSuppressorSupported;
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ @CalledByNative
+ public void setNativeAudioRecord(long nativeAudioRecord) {
+ this.nativeAudioRecord = nativeAudioRecord;
+ }
+
+ @CalledByNative
+ boolean isAcousticEchoCancelerSupported() {
+ return isAcousticEchoCancelerSupported;
+ }
+
+ @CalledByNative
+ boolean isNoiseSuppressorSupported() {
+ return isNoiseSuppressorSupported;
+ }
+
+ // Returns true if a valid call to verifyAudioConfig() has been done. Should always be
+ // checked before using the returned value of isAudioSourceMatchingRecordingSession().
+ @CalledByNative
+ boolean isAudioConfigVerified() {
+ return audioSourceMatchesRecordingSessionRef.get() != null;
+ }
+
+ // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when
+ // startRecording() has been called. Hence, should preferably be called in combination with
+ // stopRecording() to ensure that it has been set properly. `isAudioConfigVerified` is
+ // enabled in WebRtcAudioRecord to ensure that the returned value is valid.
+ @CalledByNative
+ boolean isAudioSourceMatchingRecordingSession() {
+ Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get();
+ if (audioSourceMatchesRecordingSession == null) {
+ Logging.w(TAG, "Audio configuration has not yet been verified");
+ return false;
+ }
+ return audioSourceMatchesRecordingSession;
+ }
+
+ @CalledByNative
+ private boolean enableBuiltInAEC(boolean enable) {
+ Logging.d(TAG, "enableBuiltInAEC(" + enable + ")");
+ return effects.setAEC(enable);
+ }
+
+ @CalledByNative
+ private boolean enableBuiltInNS(boolean enable) {
+ Logging.d(TAG, "enableBuiltInNS(" + enable + ")");
+ return effects.setNS(enable);
+ }
+
+ @CalledByNative
+ private int initRecording(int sampleRate, int channels) {
+ Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+ if (audioRecord != null) {
+ reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording.");
+ return -1;
+ }
+ final int bytesPerFrame = channels * getBytesPerSample(audioFormat);
+ final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND;
+ byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer);
+ if (!(byteBuffer.hasArray())) {
+ reportWebRtcAudioRecordInitError("ByteBuffer does not have backing array.");
+ return -1;
+ }
+ Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+ emptyBytes = new byte[byteBuffer.capacity()];
+ // Rather than passing the ByteBuffer with every callback (requiring
+ // the potentially expensive GetDirectBufferAddress) we simply have the
+ // the native class cache the address to the memory once.
+ nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer);
+
+ // Get the minimum buffer size required for the successful creation of
+ // an AudioRecord object, in byte units.
+ // Note that this size doesn't guarantee a smooth recording under load.
+ final int channelConfig = channelCountToConfiguration(channels);
+ int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
+ if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
+ reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize);
+ return -1;
+ }
+ Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize);
+
+ // Use a larger buffer size than the minimum required when creating the
+ // AudioRecord instance to ensure smooth recording under load. It has been
+ // verified that it does not increase the actual recording latency.
+ int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
+ Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
+ try {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ // Use the AudioRecord.Builder class on Android M (23) and above.
+ // Throws IllegalArgumentException.
+ audioRecord = createAudioRecordOnMOrHigher(
+ audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ audioSourceMatchesRecordingSessionRef.set(null);
+ if (preferredDevice != null) {
+ setPreferredDevice(preferredDevice);
+ }
+ } else {
+ // Use the old AudioRecord constructor for API levels below 23.
+ // Throws UnsupportedOperationException.
+ audioRecord = createAudioRecordOnLowerThanM(
+ audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ audioSourceMatchesRecordingSessionRef.set(null);
+ }
+ } catch (IllegalArgumentException | UnsupportedOperationException e) {
+ // Report of exception message is sufficient. Example: "Cannot create AudioRecord".
+ reportWebRtcAudioRecordInitError(e.getMessage());
+ releaseAudioResources();
+ return -1;
+ }
+ if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
+ reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed.");
+ releaseAudioResources();
+ return -1;
+ }
+ effects.enable(audioRecord.getAudioSessionId());
+ logMainParameters();
+ logMainParametersExtended();
+ // Check number of active recording sessions. Should be zero but we have seen conflict cases
+ // and adding a log for it can help us figure out details about conflicting sessions.
+ final int numActiveRecordingSessions =
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (numActiveRecordingSessions != 0) {
+ // Log the conflict as a warning since initialization did in fact succeed. Most likely, the
+ // upcoming call to startRecording() will fail under these conditions.
+ Logging.w(
+ TAG, "Potential microphone conflict. Active sessions: " + numActiveRecordingSessions);
+ }
+ return framesPerBuffer;
+ }
+
+ /**
+ * Prefer a specific {@link AudioDeviceInfo} device for recording. Calling after recording starts
+ * is valid but may cause a temporary interruption if the audio routing changes.
+ */
+ @RequiresApi(Build.VERSION_CODES.M)
+ @TargetApi(Build.VERSION_CODES.M)
+ void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) {
+ Logging.d(
+ TAG, "setPreferredDevice " + (preferredDevice != null ? preferredDevice.getId() : null));
+ this.preferredDevice = preferredDevice;
+ if (audioRecord != null) {
+ if (!audioRecord.setPreferredDevice(preferredDevice)) {
+ Logging.e(TAG, "setPreferredDevice failed");
+ }
+ }
+ }
+
+ @CalledByNative
+ private boolean startRecording() {
+ Logging.d(TAG, "startRecording");
+ assertTrue(audioRecord != null);
+ assertTrue(audioThread == null);
+ try {
+ audioRecord.startRecording();
+ } catch (IllegalStateException e) {
+ reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION,
+ "AudioRecord.startRecording failed: " + e.getMessage());
+ return false;
+ }
+ if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+ reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH,
+ "AudioRecord.startRecording failed - incorrect state: "
+ + audioRecord.getRecordingState());
+ return false;
+ }
+ audioThread = new AudioRecordThread("AudioRecordJavaThread");
+ audioThread.start();
+ scheduleLogRecordingConfigurationsTask(audioRecord);
+ return true;
+ }
+
+ @CalledByNative
+ private boolean stopRecording() {
+ Logging.d(TAG, "stopRecording");
+ assertTrue(audioThread != null);
+ if (future != null) {
+ if (!future.isDone()) {
+ // Might be needed if the client calls startRecording(), stopRecording() back-to-back.
+ future.cancel(true /* mayInterruptIfRunning */);
+ }
+ future = null;
+ }
+ audioThread.stopThread();
+ if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
+ Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ }
+ audioThread = null;
+ effects.release();
+ releaseAudioResources();
+ return true;
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private static AudioRecord createAudioRecordOnMOrHigher(
+ int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) {
+ Logging.d(TAG, "createAudioRecordOnMOrHigher");
+ return new AudioRecord.Builder()
+ .setAudioSource(audioSource)
+ .setAudioFormat(new AudioFormat.Builder()
+ .setEncoding(audioFormat)
+ .setSampleRate(sampleRate)
+ .setChannelMask(channelConfig)
+ .build())
+ .setBufferSizeInBytes(bufferSizeInBytes)
+ .build();
+ }
+
+ private static AudioRecord createAudioRecordOnLowerThanM(
+ int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) {
+ Logging.d(TAG, "createAudioRecordOnLowerThanM");
+ return new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes);
+ }
+
+ private void logMainParameters() {
+ Logging.d(TAG,
+ "AudioRecord: "
+ + "session ID: " + audioRecord.getAudioSessionId() + ", "
+ + "channels: " + audioRecord.getChannelCount() + ", "
+ + "sample rate: " + audioRecord.getSampleRate());
+ }
+
+ @TargetApi(Build.VERSION_CODES.M)
+ private void logMainParametersExtended() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ Logging.d(TAG,
+ "AudioRecord: "
+ // The frame count of the native AudioRecord buffer.
+ + "buffer size in frames: " + audioRecord.getBufferSizeInFrames());
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ // Checks the number of active recording sessions and logs the states of all active sessions.
+ // Returns number of active sessions. Note that this could occur on arbituary thread.
+ private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
+ Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher");
+ return 0;
+ }
+ if (audioRecord == null) {
+ return 0;
+ }
+
+ // Get a list of the currently active audio recording configurations of the device (can be more
+ // than one). An empty list indicates there is no recording active when queried.
+ List<AudioRecordingConfiguration> configs = audioManager.getActiveRecordingConfigurations();
+ final int numActiveRecordingSessions = configs.size();
+ Logging.d(TAG, "Number of active recording sessions: " + numActiveRecordingSessions);
+ if (numActiveRecordingSessions > 0) {
+ logActiveRecordingConfigs(audioRecord.getAudioSessionId(), configs);
+ if (verifyAudioConfig) {
+ // Run an extra check to verify that the existing audio source doing the recording (tied
+ // to the AudioRecord instance) is matching what the audio recording configuration lists
+ // as its client parameters. If these do not match, recording might work but under invalid
+ // conditions.
+ audioSourceMatchesRecordingSessionRef.set(
+ verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(),
+ audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs));
+ }
+ }
+ return numActiveRecordingSessions;
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ private int channelCountToConfiguration(int channels) {
+ return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+ }
+
+ private native void nativeCacheDirectBufferAddress(
+ long nativeAudioRecordJni, ByteBuffer byteBuffer);
+ private native void nativeDataIsRecorded(
+ long nativeAudioRecordJni, int bytes, long captureTimestampNs);
+
+ // Sets all recorded samples to zero if `mute` is true, i.e., ensures that
+ // the microphone is muted.
+ public void setMicrophoneMute(boolean mute) {
+ Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
+ microphoneMute = mute;
+ }
+
+ // Releases the native AudioRecord resources.
+ private void releaseAudioResources() {
+ Logging.d(TAG, "releaseAudioResources");
+ if (audioRecord != null) {
+ audioRecord.release();
+ audioRecord = null;
+ }
+ audioSourceMatchesRecordingSessionRef.set(null);
+ }
+
+ private void reportWebRtcAudioRecordInitError(String errorMessage) {
+ Logging.e(TAG, "Init recording error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordInitError(errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioRecordStartError(
+ AudioRecordStartErrorCode errorCode, String errorMessage) {
+ Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioRecordError(String errorMessage) {
+ Logging.e(TAG, "Run-time recording error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioRecordError(errorMessage);
+ }
+ }
+
+ private void doAudioRecordStateCallback(int audioState) {
+ Logging.d(TAG, "doAudioRecordStateCallback: " + audioStateToString(audioState));
+ if (stateCallback != null) {
+ if (audioState == WebRtcAudioRecord.AUDIO_RECORD_START) {
+ stateCallback.onWebRtcAudioRecordStart();
+ } else if (audioState == WebRtcAudioRecord.AUDIO_RECORD_STOP) {
+ stateCallback.onWebRtcAudioRecordStop();
+ } else {
+ Logging.e(TAG, "Invalid audio state");
+ }
+ }
+ }
+
+ // Reference from Android code, AudioFormat.getBytesPerSample. BitPerSample / 8
+ // Default audio data format is PCM 16 bits per sample.
+ // Guaranteed to be supported by all devices
+ private static int getBytesPerSample(int audioFormat) {
+ switch (audioFormat) {
+ case AudioFormat.ENCODING_PCM_8BIT:
+ return 1;
+ case AudioFormat.ENCODING_PCM_16BIT:
+ case AudioFormat.ENCODING_IEC61937:
+ case AudioFormat.ENCODING_DEFAULT:
+ return 2;
+ case AudioFormat.ENCODING_PCM_FLOAT:
+ return 4;
+ case AudioFormat.ENCODING_INVALID:
+ default:
+ throw new IllegalArgumentException("Bad audio format " + audioFormat);
+ }
+ }
+
+ // Use an ExecutorService to schedule a task after a given delay where the task consists of
+ // checking (by logging) the current status of active recording sessions.
+ private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) {
+ Logging.d(TAG, "scheduleLogRecordingConfigurationsTask");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
+ return;
+ }
+
+ Callable<String> callable = () -> {
+ if (this.audioRecord == audioRecord) {
+ logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */);
+ } else {
+ Logging.d(TAG, "audio record has changed");
+ }
+ return "Scheduled task is done";
+ };
+
+ if (future != null && !future.isDone()) {
+ future.cancel(true /* mayInterruptIfRunning */);
+ }
+ // Schedule call to logRecordingConfigurations() from executor thread after fixed delay.
+ future = executor.schedule(callable, CHECK_REC_STATUS_DELAY_MS, TimeUnit.MILLISECONDS);
+ };
+
+ @TargetApi(Build.VERSION_CODES.N)
+ private static boolean logActiveRecordingConfigs(
+ int session, List<AudioRecordingConfiguration> configs) {
+ assertTrue(!configs.isEmpty());
+ final Iterator<AudioRecordingConfiguration> it = configs.iterator();
+ Logging.d(TAG, "AudioRecordingConfigurations: ");
+ while (it.hasNext()) {
+ final AudioRecordingConfiguration config = it.next();
+ StringBuilder conf = new StringBuilder();
+ // The audio source selected by the client.
+ final int audioSource = config.getClientAudioSource();
+ conf.append(" client audio source=")
+ .append(WebRtcAudioUtils.audioSourceToString(audioSource))
+ .append(", client session id=")
+ .append(config.getClientAudioSessionId())
+ // Compare with our own id (based on AudioRecord#getAudioSessionId()).
+ .append(" (")
+ .append(session)
+ .append(")")
+ .append("\n");
+ // Audio format at which audio is recorded on this Android device. Note that it may differ
+ // from the client application recording format (see getClientFormat()).
+ AudioFormat format = config.getFormat();
+ conf.append(" Device AudioFormat: ")
+ .append("channel count=")
+ .append(format.getChannelCount())
+ .append(", channel index mask=")
+ .append(format.getChannelIndexMask())
+ // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices.
+ .append(", channel mask=")
+ .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask()))
+ .append(", encoding=")
+ .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding()))
+ .append(", sample rate=")
+ .append(format.getSampleRate())
+ .append("\n");
+ // Audio format at which the client application is recording audio.
+ format = config.getClientFormat();
+ conf.append(" Client AudioFormat: ")
+ .append("channel count=")
+ .append(format.getChannelCount())
+ .append(", channel index mask=")
+ .append(format.getChannelIndexMask())
+ // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices.
+ .append(", channel mask=")
+ .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask()))
+ .append(", encoding=")
+ .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding()))
+ .append(", sample rate=")
+ .append(format.getSampleRate())
+ .append("\n");
+ // Audio input device used for this recording session.
+ final AudioDeviceInfo device = config.getAudioDevice();
+ if (device != null) {
+ assertTrue(device.isSource());
+ conf.append(" AudioDevice: ")
+ .append("type=")
+ .append(WebRtcAudioUtils.deviceTypeToString(device.getType()))
+ .append(", id=")
+ .append(device.getId());
+ }
+ Logging.d(TAG, conf.toString());
+ }
+ return true;
+ }
+
+ // Verify that the client audio configuration (device and format) matches the requested
+ // configuration (same as AudioRecord's).
+ @TargetApi(Build.VERSION_CODES.N)
+ private static boolean verifyAudioConfig(int source, int session, AudioFormat format,
+ AudioDeviceInfo device, List<AudioRecordingConfiguration> configs) {
+ assertTrue(!configs.isEmpty());
+ final Iterator<AudioRecordingConfiguration> it = configs.iterator();
+ while (it.hasNext()) {
+ final AudioRecordingConfiguration config = it.next();
+ final AudioDeviceInfo configDevice = config.getAudioDevice();
+ if (configDevice == null) {
+ continue;
+ }
+ if ((config.getClientAudioSource() == source)
+ && (config.getClientAudioSessionId() == session)
+ // Check the client format (should match the format of the AudioRecord instance).
+ && (config.getClientFormat().getEncoding() == format.getEncoding())
+ && (config.getClientFormat().getSampleRate() == format.getSampleRate())
+ && (config.getClientFormat().getChannelMask() == format.getChannelMask())
+ && (config.getClientFormat().getChannelIndexMask() == format.getChannelIndexMask())
+ // Ensure that the device format is properly configured.
+ && (config.getFormat().getEncoding() != AudioFormat.ENCODING_INVALID)
+ && (config.getFormat().getSampleRate() > 0)
+ // For the channel mask, either the position or index-based value must be valid.
+ && ((config.getFormat().getChannelMask() != AudioFormat.CHANNEL_INVALID)
+ || (config.getFormat().getChannelIndexMask() != AudioFormat.CHANNEL_INVALID))
+ && checkDeviceMatch(configDevice, device)) {
+ Logging.d(TAG, "verifyAudioConfig: PASS");
+ return true;
+ }
+ }
+ Logging.e(TAG, "verifyAudioConfig: FAILED");
+ return false;
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ // Returns true if device A parameters matches those of device B.
+ // TODO(henrika): can be improved by adding AudioDeviceInfo#getAddress() but it requires API 29.
+ private static boolean checkDeviceMatch(AudioDeviceInfo devA, AudioDeviceInfo devB) {
+ return ((devA.getId() == devB.getId() && (devA.getType() == devB.getType())));
+ }
+
+ private static String audioStateToString(int state) {
+ switch (state) {
+ case WebRtcAudioRecord.AUDIO_RECORD_START:
+ return "START";
+ case WebRtcAudioRecord.AUDIO_RECORD_STOP:
+ return "STOP";
+ default:
+ return "INVALID";
+ }
+ }
+
+ private static final AtomicInteger nextSchedulerId = new AtomicInteger(0);
+
+ static ScheduledExecutorService newDefaultScheduler() {
+ AtomicInteger nextThreadId = new AtomicInteger(0);
+ return Executors.newScheduledThreadPool(0, new ThreadFactory() {
+ /**
+ * Constructs a new {@code Thread}
+ */
+ @Override
+ public Thread newThread(Runnable r) {
+ Thread thread = Executors.defaultThreadFactory().newThread(r);
+ thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s",
+ nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement()));
+ return thread;
+ }
+ });
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
new file mode 100644
index 0000000000..2b34e34013
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -0,0 +1,585 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioAttributes;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.Build;
+import android.os.Process;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import org.webrtc.CalledByNative;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback;
+import org.webrtc.audio.LowLatencyAudioBufferManager;
+
+class WebRtcAudioTrack {
+ private static final String TAG = "WebRtcAudioTrackExternal";
+
+ // Default audio data format is PCM 16 bit per sample.
+ // Guaranteed to be supported by all devices.
+ private static final int BITS_PER_SAMPLE = 16;
+
+ // Requested size of each recorded buffer provided to the client.
+ private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+ // Average number of callbacks per second.
+ private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+ // The AudioTrackThread is allowed to wait for successful call to join()
+ // but the wait times out afther this amount of time.
+ private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+ // By default, WebRTC creates audio tracks with a usage attribute
+ // corresponding to voice communications, such as telephony or VoIP.
+ private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION;
+
+ // Indicates the AudioTrack has started playing audio.
+ private static final int AUDIO_TRACK_START = 0;
+
+ // Indicates the AudioTrack has stopped playing audio.
+ private static final int AUDIO_TRACK_STOP = 1;
+
+ private long nativeAudioTrack;
+ private final Context context;
+ private final AudioManager audioManager;
+ private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+ private ByteBuffer byteBuffer;
+
+ private @Nullable final AudioAttributes audioAttributes;
+ private @Nullable AudioTrack audioTrack;
+ private @Nullable AudioTrackThread audioThread;
+ private final VolumeLogger volumeLogger;
+
+ // Samples to be played are replaced by zeros if `speakerMute` is set to true.
+ // Can be used to ensure that the speaker is fully muted.
+ private volatile boolean speakerMute;
+ private byte[] emptyBytes;
+ private boolean useLowLatency;
+ private int initialBufferSizeInFrames;
+
+ private final @Nullable AudioTrackErrorCallback errorCallback;
+ private final @Nullable AudioTrackStateCallback stateCallback;
+
+ /**
+ * Audio thread which keeps calling AudioTrack.write() to stream audio.
+ * Data is periodically acquired from the native WebRTC layer using the
+ * nativeGetPlayoutData callback function.
+ * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+ */
+ private class AudioTrackThread extends Thread {
+ private volatile boolean keepAlive = true;
+ private LowLatencyAudioBufferManager bufferManager;
+
+ public AudioTrackThread(String name) {
+ super(name);
+ bufferManager = new LowLatencyAudioBufferManager();
+ }
+
+ @Override
+ public void run() {
+ Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+ Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
+ assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
+
+ // Audio playout has started and the client is informed about it.
+ doAudioTrackStateCallback(AUDIO_TRACK_START);
+
+ // Fixed size in bytes of each 10ms block of audio data that we ask for
+ // using callbacks to the native WebRTC client.
+ final int sizeInBytes = byteBuffer.capacity();
+
+ while (keepAlive) {
+ // Get 10ms of PCM data from the native WebRTC client. Audio data is
+ // written into the common ByteBuffer using the address that was
+ // cached at construction.
+ nativeGetPlayoutData(nativeAudioTrack, sizeInBytes);
+ // Write data until all data has been written to the audio sink.
+ // Upon return, the buffer position will have been advanced to reflect
+ // the amount of data that was successfully written to the AudioTrack.
+ assertTrue(sizeInBytes <= byteBuffer.remaining());
+ if (speakerMute) {
+ byteBuffer.clear();
+ byteBuffer.put(emptyBytes);
+ byteBuffer.position(0);
+ }
+ int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
+ if (bytesWritten != sizeInBytes) {
+ Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
+ // If a write() returns a negative value, an error has occurred.
+ // Stop playing and report an error in this case.
+ if (bytesWritten < 0) {
+ keepAlive = false;
+ reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten);
+ }
+ }
+ if (useLowLatency) {
+ bufferManager.maybeAdjustBufferSize(audioTrack);
+ }
+ // The byte buffer must be rewinded since byteBuffer.position() is
+ // increased at each call to AudioTrack.write(). If we don't do this,
+ // next call to AudioTrack.write() will fail.
+ byteBuffer.rewind();
+
+ // TODO(henrika): it is possible to create a delay estimate here by
+ // counting number of written frames and subtracting the result from
+ // audioTrack.getPlaybackHeadPosition().
+ }
+ }
+
+ // Stops the inner thread loop which results in calling AudioTrack.stop().
+ // Does not block the calling thread.
+ public void stopThread() {
+ Logging.d(TAG, "stopThread");
+ keepAlive = false;
+ }
+ }
+
+ @CalledByNative
+ WebRtcAudioTrack(Context context, AudioManager audioManager) {
+ this(context, audioManager, null /* audioAttributes */, null /* errorCallback */,
+ null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */);
+ }
+
+ WebRtcAudioTrack(Context context, AudioManager audioManager,
+ @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback,
+ @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency,
+ boolean enableVolumeLogger) {
+ threadChecker.detachThread();
+ this.context = context;
+ this.audioManager = audioManager;
+ this.audioAttributes = audioAttributes;
+ this.errorCallback = errorCallback;
+ this.stateCallback = stateCallback;
+ this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null;
+ this.useLowLatency = useLowLatency;
+ Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+ }
+
+ @CalledByNative
+ public void setNativeAudioTrack(long nativeAudioTrack) {
+ this.nativeAudioTrack = nativeAudioTrack;
+ }
+
+ @CalledByNative
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG,
+ "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
+ + ", bufferSizeFactor=" + bufferSizeFactor + ")");
+ final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
+ byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
+ Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+ emptyBytes = new byte[byteBuffer.capacity()];
+ // Rather than passing the ByteBuffer with every callback (requiring
+ // the potentially expensive GetDirectBufferAddress) we simply have the
+ // the native class cache the address to the memory once.
+ nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer);
+
+ // Get the minimum buffer size required for the successful creation of an
+ // AudioTrack object to be created in the MODE_STREAM mode.
+ // Note that this size doesn't guarantee a smooth playback under load.
+ final int channelConfig = channelCountToConfiguration(channels);
+ final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig,
+ AudioFormat.ENCODING_PCM_16BIT)
+ * bufferSizeFactor);
+ Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes);
+ // For the streaming mode, data must be written to the audio sink in
+ // chunks of size (given by byteBuffer.capacity()) less than or equal
+ // to the total buffer size `minBufferSizeInBytes`. But, we have seen
+ // reports of "getMinBufferSize(): error querying hardware". Hence, it
+ // can happen that `minBufferSizeInBytes` contains an invalid value.
+ if (minBufferSizeInBytes < byteBuffer.capacity()) {
+ reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
+ return -1;
+ }
+
+ // Don't use low-latency mode when a bufferSizeFactor > 1 is used. When bufferSizeFactor > 1
+ // we want to use a larger buffer to prevent underruns. However, low-latency mode would
+ // decrease the buffer size, which makes the bufferSizeFactor have no effect.
+ if (bufferSizeFactor > 1.0) {
+ useLowLatency = false;
+ }
+
+ // Ensure that prevision audio session was stopped correctly before trying
+ // to create a new AudioTrack.
+ if (audioTrack != null) {
+ reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
+ return -1;
+ }
+ try {
+ // Create an AudioTrack object and initialize its associated audio buffer.
+ // The size of this buffer determines how long an AudioTrack can play
+ // before running out of data.
+ if (useLowLatency && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ // On API level 26 or higher, we can use a low latency mode.
+ audioTrack = createAudioTrackOnOreoOrHigher(
+ sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
+ } else {
+ // As we are on API level 21 or higher, it is possible to use a special AudioTrack
+ // constructor that uses AudioAttributes and AudioFormat as input. It allows us to
+ // supersede the notion of stream types for defining the behavior of audio playback,
+ // and to allow certain platforms or routing policies to use this information for more
+ // refined volume or routing decisions.
+ audioTrack = createAudioTrackBeforeOreo(
+ sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
+ }
+ } catch (IllegalArgumentException e) {
+ reportWebRtcAudioTrackInitError(e.getMessage());
+ releaseAudioResources();
+ return -1;
+ }
+
+ // It can happen that an AudioTrack is created but it was not successfully
+ // initialized upon creation. Seems to be the case e.g. when the maximum
+ // number of globally available audio tracks is exceeded.
+ if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
+ reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
+ releaseAudioResources();
+ return -1;
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ initialBufferSizeInFrames = audioTrack.getBufferSizeInFrames();
+ } else {
+ initialBufferSizeInFrames = -1;
+ }
+ logMainParameters();
+ logMainParametersExtended();
+ return minBufferSizeInBytes;
+ }
+
+ @CalledByNative
+ private boolean startPlayout() {
+ threadChecker.checkIsOnValidThread();
+ if (volumeLogger != null) {
+ volumeLogger.start();
+ }
+ Logging.d(TAG, "startPlayout");
+ assertTrue(audioTrack != null);
+ assertTrue(audioThread == null);
+
+ // Starts playing an audio track.
+ try {
+ audioTrack.play();
+ } catch (IllegalStateException e) {
+ reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
+ "AudioTrack.play failed: " + e.getMessage());
+ releaseAudioResources();
+ return false;
+ }
+ if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
+ reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
+ "AudioTrack.play failed - incorrect state :" + audioTrack.getPlayState());
+ releaseAudioResources();
+ return false;
+ }
+
+ // Create and start new high-priority thread which calls AudioTrack.write()
+ // and where we also call the native nativeGetPlayoutData() callback to
+ // request decoded audio from WebRTC.
+ audioThread = new AudioTrackThread("AudioTrackJavaThread");
+ audioThread.start();
+ return true;
+ }
+
+ @CalledByNative
+ private boolean stopPlayout() {
+ threadChecker.checkIsOnValidThread();
+ if (volumeLogger != null) {
+ volumeLogger.stop();
+ }
+ Logging.d(TAG, "stopPlayout");
+ assertTrue(audioThread != null);
+ logUnderrunCount();
+ audioThread.stopThread();
+
+ Logging.d(TAG, "Stopping the AudioTrackThread...");
+ audioThread.interrupt();
+ if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
+ Logging.e(TAG, "Join of AudioTrackThread timed out.");
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ }
+ Logging.d(TAG, "AudioTrackThread has now been stopped.");
+ audioThread = null;
+ if (audioTrack != null) {
+ Logging.d(TAG, "Calling AudioTrack.stop...");
+ try {
+ audioTrack.stop();
+ Logging.d(TAG, "AudioTrack.stop is done.");
+ doAudioTrackStateCallback(AUDIO_TRACK_STOP);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
+ }
+ }
+ releaseAudioResources();
+ return true;
+ }
+
+ // Get max possible volume index for a phone call audio stream.
+ @CalledByNative
+ private int getStreamMaxVolume() {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "getStreamMaxVolume");
+ return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ // Set current volume level for a phone call audio stream.
+ @CalledByNative
+ private boolean setStreamVolume(int volume) {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "setStreamVolume(" + volume + ")");
+ if (audioManager.isVolumeFixed()) {
+ Logging.e(TAG, "The device implements a fixed volume policy.");
+ return false;
+ }
+ audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
+ return true;
+ }
+
+ /** Get current volume level for a phone call audio stream. */
+ @CalledByNative
+ private int getStreamVolume() {
+ threadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "getStreamVolume");
+ return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ @CalledByNative
+ private int GetPlayoutUnderrunCount() {
+ if (Build.VERSION.SDK_INT >= 24) {
+ if (audioTrack != null) {
+ return audioTrack.getUnderrunCount();
+ } else {
+ return -1;
+ }
+ } else {
+ return -2;
+ }
+ }
+
+ private void logMainParameters() {
+ Logging.d(TAG,
+ "AudioTrack: "
+ + "session ID: " + audioTrack.getAudioSessionId() + ", "
+ + "channels: " + audioTrack.getChannelCount() + ", "
+ + "sample rate: " + audioTrack.getSampleRate()
+ + ", "
+ // Gain (>=1.0) expressed as linear multiplier on sample values.
+ + "max gain: " + AudioTrack.getMaxVolume());
+ }
+
+ private static void logNativeOutputSampleRate(int requestedSampleRateInHz) {
+ final int nativeOutputSampleRate =
+ AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
+ Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
+ if (requestedSampleRateInHz != nativeOutputSampleRate) {
+ Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
+ }
+ }
+
+ private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes overrideAttributes) {
+ AudioAttributes.Builder attributesBuilder =
+ new AudioAttributes.Builder()
+ .setUsage(DEFAULT_USAGE)
+ .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH);
+
+ if (overrideAttributes != null) {
+ if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) {
+ attributesBuilder.setUsage(overrideAttributes.getUsage());
+ }
+ if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) {
+ attributesBuilder.setContentType(overrideAttributes.getContentType());
+ }
+
+ attributesBuilder.setFlags(overrideAttributes.getFlags());
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ attributesBuilder = applyAttributesOnQOrHigher(attributesBuilder, overrideAttributes);
+ }
+ }
+ return attributesBuilder.build();
+ }
+
+ // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
+ // It allows certain platforms or routing policies to use this information for more
+ // refined volume or routing decisions.
+ private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig,
+ int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
+ Logging.d(TAG, "createAudioTrackBeforeOreo");
+ logNativeOutputSampleRate(sampleRateInHz);
+
+ // Create an audio track where the audio usage is for VoIP and the content type is speech.
+ return new AudioTrack(getAudioAttributes(overrideAttributes),
+ new AudioFormat.Builder()
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .setSampleRate(sampleRateInHz)
+ .setChannelMask(channelConfig)
+ .build(),
+ bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
+ }
+
+ // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
+ // Use the low-latency mode to improve audio latency. Note that the low-latency mode may
+ // prevent effects (such as AEC) from working. Assuming AEC is working, the delay changes
+ // that happen in low-latency mode during the call will cause the AEC to perform worse.
+ // The behavior of the low-latency mode may be device dependent, use at your own risk.
+ @TargetApi(Build.VERSION_CODES.O)
+ private static AudioTrack createAudioTrackOnOreoOrHigher(int sampleRateInHz, int channelConfig,
+ int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
+ Logging.d(TAG, "createAudioTrackOnOreoOrHigher");
+ logNativeOutputSampleRate(sampleRateInHz);
+
+ // Create an audio track where the audio usage is for VoIP and the content type is speech.
+ return new AudioTrack.Builder()
+ .setAudioAttributes(getAudioAttributes(overrideAttributes))
+ .setAudioFormat(new AudioFormat.Builder()
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .setSampleRate(sampleRateInHz)
+ .setChannelMask(channelConfig)
+ .build())
+ .setBufferSizeInBytes(bufferSizeInBytes)
+ .setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY)
+ .setTransferMode(AudioTrack.MODE_STREAM)
+ .setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE)
+ .build();
+ }
+
+ @TargetApi(Build.VERSION_CODES.Q)
+ private static AudioAttributes.Builder applyAttributesOnQOrHigher(
+ AudioAttributes.Builder builder, AudioAttributes overrideAttributes) {
+ return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy());
+ }
+
+ private void logBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ Logging.d(TAG,
+ "AudioTrack: "
+ // The effective size of the AudioTrack buffer that the app writes to.
+ + "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
+ }
+ }
+
+ @CalledByNative
+ private int getBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ return audioTrack.getBufferSizeInFrames();
+ }
+ return -1;
+ }
+
+ @CalledByNative
+ private int getInitialBufferSizeInFrames() {
+ return initialBufferSizeInFrames;
+ }
+
+ private void logBufferCapacityInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ Logging.d(TAG,
+ "AudioTrack: "
+ // Maximum size of the AudioTrack buffer in frames.
+ + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames());
+ }
+ }
+
+ private void logMainParametersExtended() {
+ logBufferSizeInFrames();
+ logBufferCapacityInFrames();
+ }
+
+ // Prints the number of underrun occurrences in the application-level write
+ // buffer since the AudioTrack was created. An underrun occurs if the app does
+ // not write audio data quickly enough, causing the buffer to underflow and a
+ // potential audio glitch.
+ // TODO(henrika): keep track of this value in the field and possibly add new
+ // UMA stat if needed.
+ private void logUnderrunCount() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
+ }
+ }
+
+ // Helper method which throws an exception when an assertion has failed.
+ private static void assertTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected condition to be true");
+ }
+ }
+
+ private int channelCountToConfiguration(int channels) {
+ return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+ }
+
+ private static native void nativeCacheDirectBufferAddress(
+ long nativeAudioTrackJni, ByteBuffer byteBuffer);
+ private static native void nativeGetPlayoutData(long nativeAudioTrackJni, int bytes);
+
+ // Sets all samples to be played out to zero if `mute` is true, i.e.,
+ // ensures that the speaker is muted.
+ public void setSpeakerMute(boolean mute) {
+ Logging.w(TAG, "setSpeakerMute(" + mute + ")");
+ speakerMute = mute;
+ }
+
+ // Releases the native AudioTrack resources.
+ private void releaseAudioResources() {
+ Logging.d(TAG, "releaseAudioResources");
+ if (audioTrack != null) {
+ audioTrack.release();
+ audioTrack = null;
+ }
+ }
+
+ private void reportWebRtcAudioTrackInitError(String errorMessage) {
+ Logging.e(TAG, "Init playout error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackInitError(errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioTrackStartError(
+ AudioTrackStartErrorCode errorCode, String errorMessage) {
+ Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage);
+ }
+ }
+
+ private void reportWebRtcAudioTrackError(String errorMessage) {
+ Logging.e(TAG, "Run-time playback error: " + errorMessage);
+ WebRtcAudioUtils.logAudioState(TAG, context, audioManager);
+ if (errorCallback != null) {
+ errorCallback.onWebRtcAudioTrackError(errorMessage);
+ }
+ }
+
+ private void doAudioTrackStateCallback(int audioState) {
+ Logging.d(TAG, "doAudioTrackStateCallback: " + audioState);
+ if (stateCallback != null) {
+ if (audioState == WebRtcAudioTrack.AUDIO_TRACK_START) {
+ stateCallback.onWebRtcAudioTrackStart();
+ } else if (audioState == WebRtcAudioTrack.AUDIO_TRACK_STOP) {
+ stateCallback.onWebRtcAudioTrackStop();
+ } else {
+ Logging.e(TAG, "Invalid audio state");
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
new file mode 100644
index 0000000000..7b4b809ab1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
@@ -0,0 +1,308 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.audio;
+
+import static android.media.AudioManager.MODE_IN_CALL;
+import static android.media.AudioManager.MODE_IN_COMMUNICATION;
+import static android.media.AudioManager.MODE_NORMAL;
+import static android.media.AudioManager.MODE_RINGTONE;
+
+import android.annotation.SuppressLint;
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import java.lang.Thread;
+import java.util.Arrays;
+import org.webrtc.Logging;
+
+final class WebRtcAudioUtils {
+ private static final String TAG = "WebRtcAudioUtilsExternal";
+
+ // Helper method for building a string of thread information.
+ public static String getThreadInfo() {
+ return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+ + "]";
+ }
+
+ // Returns true if we're running on emulator.
+ public static boolean runningOnEmulator() {
+ return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
+ }
+
+ // Information about the current build, taken from system properties.
+ static void logDeviceInfo(String tag) {
+ Logging.d(tag,
+ "Android SDK: " + Build.VERSION.SDK_INT + ", "
+ + "Release: " + Build.VERSION.RELEASE + ", "
+ + "Brand: " + Build.BRAND + ", "
+ + "Device: " + Build.DEVICE + ", "
+ + "Id: " + Build.ID + ", "
+ + "Hardware: " + Build.HARDWARE + ", "
+ + "Manufacturer: " + Build.MANUFACTURER + ", "
+ + "Model: " + Build.MODEL + ", "
+ + "Product: " + Build.PRODUCT);
+ }
+
+ // Logs information about the current audio state. The idea is to call this
+ // method when errors are detected to log under what conditions the error
+ // occurred. Hopefully it will provide clues to what might be the root cause.
+ static void logAudioState(String tag, Context context, AudioManager audioManager) {
+ logDeviceInfo(tag);
+ logAudioStateBasic(tag, context, audioManager);
+ logAudioStateVolume(tag, audioManager);
+ logAudioDeviceInfo(tag, audioManager);
+ }
+
+ // Converts AudioDeviceInfo types to local string representation.
+ static String deviceTypeToString(int type) {
+ switch (type) {
+ case AudioDeviceInfo.TYPE_UNKNOWN:
+ return "TYPE_UNKNOWN";
+ case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
+ return "TYPE_BUILTIN_EARPIECE";
+ case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+ return "TYPE_BUILTIN_SPEAKER";
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ return "TYPE_WIRED_HEADSET";
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ return "TYPE_WIRED_HEADPHONES";
+ case AudioDeviceInfo.TYPE_LINE_ANALOG:
+ return "TYPE_LINE_ANALOG";
+ case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+ return "TYPE_LINE_DIGITAL";
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ return "TYPE_BLUETOOTH_SCO";
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ return "TYPE_BLUETOOTH_A2DP";
+ case AudioDeviceInfo.TYPE_HDMI:
+ return "TYPE_HDMI";
+ case AudioDeviceInfo.TYPE_HDMI_ARC:
+ return "TYPE_HDMI_ARC";
+ case AudioDeviceInfo.TYPE_USB_DEVICE:
+ return "TYPE_USB_DEVICE";
+ case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+ return "TYPE_USB_ACCESSORY";
+ case AudioDeviceInfo.TYPE_DOCK:
+ return "TYPE_DOCK";
+ case AudioDeviceInfo.TYPE_FM:
+ return "TYPE_FM";
+ case AudioDeviceInfo.TYPE_BUILTIN_MIC:
+ return "TYPE_BUILTIN_MIC";
+ case AudioDeviceInfo.TYPE_FM_TUNER:
+ return "TYPE_FM_TUNER";
+ case AudioDeviceInfo.TYPE_TV_TUNER:
+ return "TYPE_TV_TUNER";
+ case AudioDeviceInfo.TYPE_TELEPHONY:
+ return "TYPE_TELEPHONY";
+ case AudioDeviceInfo.TYPE_AUX_LINE:
+ return "TYPE_AUX_LINE";
+ case AudioDeviceInfo.TYPE_IP:
+ return "TYPE_IP";
+ case AudioDeviceInfo.TYPE_BUS:
+ return "TYPE_BUS";
+ case AudioDeviceInfo.TYPE_USB_HEADSET:
+ return "TYPE_USB_HEADSET";
+ default:
+ return "TYPE_UNKNOWN";
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ public static String audioSourceToString(int source) {
+ // AudioSource.UNPROCESSED requires API level 29. Use local define instead.
+ final int VOICE_PERFORMANCE = 10;
+ switch (source) {
+ case AudioSource.DEFAULT:
+ return "DEFAULT";
+ case AudioSource.MIC:
+ return "MIC";
+ case AudioSource.VOICE_UPLINK:
+ return "VOICE_UPLINK";
+ case AudioSource.VOICE_DOWNLINK:
+ return "VOICE_DOWNLINK";
+ case AudioSource.VOICE_CALL:
+ return "VOICE_CALL";
+ case AudioSource.CAMCORDER:
+ return "CAMCORDER";
+ case AudioSource.VOICE_RECOGNITION:
+ return "VOICE_RECOGNITION";
+ case AudioSource.VOICE_COMMUNICATION:
+ return "VOICE_COMMUNICATION";
+ case AudioSource.UNPROCESSED:
+ return "UNPROCESSED";
+ case VOICE_PERFORMANCE:
+ return "VOICE_PERFORMANCE";
+ default:
+ return "INVALID";
+ }
+ }
+
+ public static String channelMaskToString(int mask) {
+ // For input or AudioRecord, the mask should be AudioFormat#CHANNEL_IN_MONO or
+ // AudioFormat#CHANNEL_IN_STEREO. AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all
+ // devices.
+ switch (mask) {
+ case AudioFormat.CHANNEL_IN_STEREO:
+ return "IN_STEREO";
+ case AudioFormat.CHANNEL_IN_MONO:
+ return "IN_MONO";
+ default:
+ return "INVALID";
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.N)
+ public static String audioEncodingToString(int enc) {
+ switch (enc) {
+ case AudioFormat.ENCODING_INVALID:
+ return "INVALID";
+ case AudioFormat.ENCODING_PCM_16BIT:
+ return "PCM_16BIT";
+ case AudioFormat.ENCODING_PCM_8BIT:
+ return "PCM_8BIT";
+ case AudioFormat.ENCODING_PCM_FLOAT:
+ return "PCM_FLOAT";
+ case AudioFormat.ENCODING_AC3:
+ return "AC3";
+ case AudioFormat.ENCODING_E_AC3:
+ return "AC3";
+ case AudioFormat.ENCODING_DTS:
+ return "DTS";
+ case AudioFormat.ENCODING_DTS_HD:
+ return "DTS_HD";
+ case AudioFormat.ENCODING_MP3:
+ return "MP3";
+ default:
+ return "Invalid encoding: " + enc;
+ }
+ }
+
+ // Reports basic audio statistics.
+ private static void logAudioStateBasic(String tag, Context context, AudioManager audioManager) {
+ Logging.d(tag,
+ "Audio State: "
+ + "audio mode: " + modeToString(audioManager.getMode()) + ", "
+ + "has mic: " + hasMicrophone(context) + ", "
+ + "mic muted: " + audioManager.isMicrophoneMute() + ", "
+ + "music active: " + audioManager.isMusicActive() + ", "
+ + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", "
+ + "BT SCO: " + audioManager.isBluetoothScoOn());
+ }
+
+ // Adds volume information for all possible stream types.
+ private static void logAudioStateVolume(String tag, AudioManager audioManager) {
+ final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
+ AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION,
+ AudioManager.STREAM_SYSTEM};
+ Logging.d(tag, "Audio State: ");
+ // Some devices may not have volume controls and might use a fixed volume.
+ boolean fixedVolume = audioManager.isVolumeFixed();
+ Logging.d(tag, " fixed volume=" + fixedVolume);
+ if (!fixedVolume) {
+ for (int stream : streams) {
+ StringBuilder info = new StringBuilder();
+ info.append(" " + streamTypeToString(stream) + ": ");
+ info.append("volume=").append(audioManager.getStreamVolume(stream));
+ info.append(", max=").append(audioManager.getStreamMaxVolume(stream));
+ logIsStreamMute(tag, audioManager, stream, info);
+ Logging.d(tag, info.toString());
+ }
+ }
+ }
+
+ private static void logIsStreamMute(
+ String tag, AudioManager audioManager, int stream, StringBuilder info) {
+ if (Build.VERSION.SDK_INT >= 23) {
+ info.append(", muted=").append(audioManager.isStreamMute(stream));
+ }
+ }
+
+ // Moz linting complains even though AudioManager.GET_DEVICES_ALL is
+ // listed in the docs here:
+ // https://developer.android.com/reference/android/media/AudioManager#GET_DEVICES_ALL
+ @SuppressLint("WrongConstant")
+ private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
+ if (Build.VERSION.SDK_INT < 23) {
+ return;
+ }
+ final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ if (devices.length == 0) {
+ return;
+ }
+ Logging.d(tag, "Audio Devices: ");
+ for (AudioDeviceInfo device : devices) {
+ StringBuilder info = new StringBuilder();
+ info.append(" ").append(deviceTypeToString(device.getType()));
+ info.append(device.isSource() ? "(in): " : "(out): ");
+ // An empty array indicates that the device supports arbitrary channel counts.
+ if (device.getChannelCounts().length > 0) {
+ info.append("channels=").append(Arrays.toString(device.getChannelCounts()));
+ info.append(", ");
+ }
+ if (device.getEncodings().length > 0) {
+ // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4.
+ info.append("encodings=").append(Arrays.toString(device.getEncodings()));
+ info.append(", ");
+ }
+ if (device.getSampleRates().length > 0) {
+ info.append("sample rates=").append(Arrays.toString(device.getSampleRates()));
+ info.append(", ");
+ }
+ info.append("id=").append(device.getId());
+ Logging.d(tag, info.toString());
+ }
+ }
+
+ // Converts media.AudioManager modes into local string representation.
+ static String modeToString(int mode) {
+ switch (mode) {
+ case MODE_IN_CALL:
+ return "MODE_IN_CALL";
+ case MODE_IN_COMMUNICATION:
+ return "MODE_IN_COMMUNICATION";
+ case MODE_NORMAL:
+ return "MODE_NORMAL";
+ case MODE_RINGTONE:
+ return "MODE_RINGTONE";
+ default:
+ return "MODE_INVALID";
+ }
+ }
+
+ private static String streamTypeToString(int stream) {
+ switch (stream) {
+ case AudioManager.STREAM_VOICE_CALL:
+ return "STREAM_VOICE_CALL";
+ case AudioManager.STREAM_MUSIC:
+ return "STREAM_MUSIC";
+ case AudioManager.STREAM_RING:
+ return "STREAM_RING";
+ case AudioManager.STREAM_ALARM:
+ return "STREAM_ALARM";
+ case AudioManager.STREAM_NOTIFICATION:
+ return "STREAM_NOTIFICATION";
+ case AudioManager.STREAM_SYSTEM:
+ return "STREAM_SYSTEM";
+ default:
+ return "STREAM_INVALID";
+ }
+ }
+
+ // Returns true if the device can record audio via a microphone.
+ private static boolean hasMicrophone(Context context) {
+ return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE);
+ }
+}