diff options
Diffstat (limited to 'third_party/libwebrtc/sdk/android/src')
176 files changed, 21652 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java new file mode 100644 index 0000000000..ad40898e4c --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java @@ -0,0 +1,684 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo.CodecCapabilities; +import android.media.MediaFormat; +import android.os.SystemClock; +import android.view.Surface; +import androidx.annotation.Nullable; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.concurrent.BlockingDeque; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import org.webrtc.ThreadUtils.ThreadChecker; + +/** + * Android hardware video decoder. + */ +@SuppressWarnings("deprecation") +// Cannot support API 16 without using deprecated methods. +// TODO(sakal): Rename to MediaCodecVideoDecoder once the deprecated implementation is removed. +class AndroidVideoDecoder implements VideoDecoder, VideoSink { + private static final String TAG = "AndroidVideoDecoder"; + + // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API. + private static final String MEDIA_FORMAT_KEY_STRIDE = "stride"; + private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height"; + private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left"; + private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right"; + private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top"; + private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom"; + + // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after + // this timeout. + private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; + + // WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a + // long timeout (500 ms) to prevent this from causing the codec to return an error. + private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000; + + // Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds). + // If this timeout is exceeded, the output thread will unblock and check if the decoder is still + // running. If it is, it will block on dequeue again. Otherwise, it will stop and release the + // MediaCodec. + private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; + + private final MediaCodecWrapperFactory mediaCodecWrapperFactory; + private final String codecName; + private final VideoCodecMimeType codecType; + + private static class FrameInfo { + final long decodeStartTimeMs; + final int rotation; + + FrameInfo(long decodeStartTimeMs, int rotation) { + this.decodeStartTimeMs = decodeStartTimeMs; + this.rotation = rotation; + } + } + + private final BlockingDeque<FrameInfo> frameInfos; + private int colorFormat; + + // Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats + // those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder + // thread and is immutable while the codec is running. + @Nullable private Thread outputThread; + + // Checker that ensures work is run on the output thread. + private ThreadChecker outputThreadChecker; + + // Checker that ensures work is run on the decoder thread. The decoder thread is owned by the + // caller and must be used to call initDecode, decode, and release. + private ThreadChecker decoderThreadChecker; + + private volatile boolean running; + @Nullable private volatile Exception shutdownException; + + // Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread + // or the output thread. Accesses should be protected with this lock. + private final Object dimensionLock = new Object(); + private int width; + private int height; + private int stride; + private int sliceHeight; + + // Whether the decoder has finished the first frame. The codec may not change output dimensions + // after delivering the first frame. Only accessed on the output thread while the decoder is + // running. + private boolean hasDecodedFirstFrame; + // Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed + // on the decoder thread. + private boolean keyFrameRequired; + + private final @Nullable EglBase.Context sharedContext; + // Valid and immutable while the decoder is running. + @Nullable private SurfaceTextureHelper surfaceTextureHelper; + @Nullable private Surface surface; + + private static class DecodedTextureMetadata { + final long presentationTimestampUs; + final Integer decodeTimeMs; + + DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) { + this.presentationTimestampUs = presentationTimestampUs; + this.decodeTimeMs = decodeTimeMs; + } + } + + // Metadata for the last frame rendered to the texture. + private final Object renderedTextureMetadataLock = new Object(); + @Nullable private DecodedTextureMetadata renderedTextureMetadata; + + // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid + // and immutable while the decoder is running. + @Nullable private Callback callback; + + // Valid and immutable while the decoder is running. + @Nullable private MediaCodecWrapper codec; + + AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, + VideoCodecMimeType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) { + if (!isSupportedColorFormat(colorFormat)) { + throw new IllegalArgumentException("Unsupported color format: " + colorFormat); + } + Logging.d(TAG, + "ctor name: " + codecName + " type: " + codecType + " color format: " + colorFormat + + " context: " + sharedContext); + this.mediaCodecWrapperFactory = mediaCodecWrapperFactory; + this.codecName = codecName; + this.codecType = codecType; + this.colorFormat = colorFormat; + this.sharedContext = sharedContext; + this.frameInfos = new LinkedBlockingDeque<>(); + } + + @Override + public VideoCodecStatus initDecode(Settings settings, Callback callback) { + this.decoderThreadChecker = new ThreadChecker(); + + this.callback = callback; + if (sharedContext != null) { + surfaceTextureHelper = createSurfaceTextureHelper(); + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); + surfaceTextureHelper.startListening(this); + } + return initDecodeInternal(settings.width, settings.height); + } + + // Internal variant is used when restarting the codec due to reconfiguration. + private VideoCodecStatus initDecodeInternal(int width, int height) { + decoderThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, + "initDecodeInternal name: " + codecName + " type: " + codecType + " width: " + width + + " height: " + height); + if (outputThread != null) { + Logging.e(TAG, "initDecodeInternal called while the codec is already running"); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + + // Note: it is not necessary to initialize dimensions under the lock, since the output thread + // is not running. + this.width = width; + this.height = height; + + stride = width; + sliceHeight = height; + hasDecodedFirstFrame = false; + keyFrameRequired = true; + + try { + codec = mediaCodecWrapperFactory.createByCodecName(codecName); + } catch (IOException | IllegalArgumentException | IllegalStateException e) { + Logging.e(TAG, "Cannot create media decoder " + codecName); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + try { + MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height); + if (sharedContext == null) { + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + } + codec.configure(format, surface, null, 0); + codec.start(); + } catch (IllegalStateException | IllegalArgumentException e) { + Logging.e(TAG, "initDecode failed", e); + release(); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + running = true; + outputThread = createOutputThread(); + outputThread.start(); + + Logging.d(TAG, "initDecodeInternal done"); + return VideoCodecStatus.OK; + } + + @Override + public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) { + decoderThreadChecker.checkIsOnValidThread(); + if (codec == null || callback == null) { + Logging.d(TAG, "decode uninitalized, codec: " + (codec != null) + ", callback: " + callback); + return VideoCodecStatus.UNINITIALIZED; + } + + if (frame.buffer == null) { + Logging.e(TAG, "decode() - no input data"); + return VideoCodecStatus.ERR_PARAMETER; + } + + int size = frame.buffer.remaining(); + if (size == 0) { + Logging.e(TAG, "decode() - input buffer empty"); + return VideoCodecStatus.ERR_PARAMETER; + } + + // Load dimensions from shared memory under the dimension lock. + final int width; + final int height; + synchronized (dimensionLock) { + width = this.width; + height = this.height; + } + + // Check if the resolution changed and reset the codec if necessary. + if (frame.encodedWidth * frame.encodedHeight > 0 + && (frame.encodedWidth != width || frame.encodedHeight != height)) { + VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight); + if (status != VideoCodecStatus.OK) { + return status; + } + } + + if (keyFrameRequired) { + // Need to process a key frame first. + if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) { + Logging.e(TAG, "decode() - key frame required first"); + return VideoCodecStatus.NO_OUTPUT; + } + } + + int index; + try { + index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US); + } catch (IllegalStateException e) { + Logging.e(TAG, "dequeueInputBuffer failed", e); + return VideoCodecStatus.ERROR; + } + if (index < 0) { + // Decoder is falling behind. No input buffers available. + // The decoder can't simply drop frames; it might lose a key frame. + Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind"); + return VideoCodecStatus.ERROR; + } + + ByteBuffer buffer; + try { + buffer = codec.getInputBuffer(index); + } catch (IllegalStateException e) { + Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e); + return VideoCodecStatus.ERROR; + } + + if (buffer.capacity() < size) { + Logging.e(TAG, "decode() - HW buffer too small"); + return VideoCodecStatus.ERROR; + } + buffer.put(frame.buffer); + + frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation)); + try { + codec.queueInputBuffer(index, 0 /* offset */, size, + TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */); + } catch (IllegalStateException e) { + Logging.e(TAG, "queueInputBuffer failed", e); + frameInfos.pollLast(); + return VideoCodecStatus.ERROR; + } + if (keyFrameRequired) { + keyFrameRequired = false; + } + return VideoCodecStatus.OK; + } + + @Override + public String getImplementationName() { + return codecName; + } + + @Override + public VideoCodecStatus release() { + // TODO(sakal): This is not called on the correct thread but is still called synchronously. + // Re-enable the check once this is called on the correct thread. + // decoderThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, "release"); + VideoCodecStatus status = releaseInternal(); + if (surface != null) { + releaseSurface(); + surface = null; + surfaceTextureHelper.stopListening(); + surfaceTextureHelper.dispose(); + surfaceTextureHelper = null; + } + synchronized (renderedTextureMetadataLock) { + renderedTextureMetadata = null; + } + callback = null; + frameInfos.clear(); + return status; + } + + // Internal variant is used when restarting the codec due to reconfiguration. + private VideoCodecStatus releaseInternal() { + if (!running) { + Logging.d(TAG, "release: Decoder is not running."); + return VideoCodecStatus.OK; + } + try { + // The outputThread actually stops and releases the codec once running is false. + running = false; + if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) { + // Log an exception to capture the stack trace and turn it into a TIMEOUT error. + Logging.e(TAG, "Media decoder release timeout", new RuntimeException()); + return VideoCodecStatus.TIMEOUT; + } + if (shutdownException != null) { + // Log the exception and turn it into an error. Wrap the exception in a new exception to + // capture both the output thread's stack trace and this thread's stack trace. + Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException)); + shutdownException = null; + return VideoCodecStatus.ERROR; + } + } finally { + codec = null; + outputThread = null; + } + return VideoCodecStatus.OK; + } + + private VideoCodecStatus reinitDecode(int newWidth, int newHeight) { + decoderThreadChecker.checkIsOnValidThread(); + VideoCodecStatus status = releaseInternal(); + if (status != VideoCodecStatus.OK) { + return status; + } + return initDecodeInternal(newWidth, newHeight); + } + + private Thread createOutputThread() { + return new Thread("AndroidVideoDecoder.outputThread") { + @Override + public void run() { + outputThreadChecker = new ThreadChecker(); + while (running) { + deliverDecodedFrame(); + } + releaseCodecOnOutputThread(); + } + }; + } + + // Visible for testing. + protected void deliverDecodedFrame() { + outputThreadChecker.checkIsOnValidThread(); + try { + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + // Block until an output buffer is available (up to 100 milliseconds). If the timeout is + // exceeded, deliverDecodedFrame() will be called again on the next iteration of the output + // thread's loop. Blocking here prevents the output thread from busy-waiting while the codec + // is idle. + int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US); + if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + reformat(codec.getOutputFormat()); + return; + } + + if (index < 0) { + Logging.v(TAG, "dequeueOutputBuffer returned " + index); + return; + } + + FrameInfo frameInfo = frameInfos.poll(); + Integer decodeTimeMs = null; + int rotation = 0; + if (frameInfo != null) { + decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs); + rotation = frameInfo.rotation; + } + + hasDecodedFirstFrame = true; + + if (surfaceTextureHelper != null) { + deliverTextureFrame(index, info, rotation, decodeTimeMs); + } else { + deliverByteFrame(index, info, rotation, decodeTimeMs); + } + + } catch (IllegalStateException e) { + Logging.e(TAG, "deliverDecodedFrame failed", e); + } + } + + private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info, + final int rotation, final Integer decodeTimeMs) { + // Load dimensions from shared memory under the dimension lock. + final int width; + final int height; + synchronized (dimensionLock) { + width = this.width; + height = this.height; + } + + synchronized (renderedTextureMetadataLock) { + if (renderedTextureMetadata != null) { + codec.releaseOutputBuffer(index, false); + return; // We are still waiting for texture for the previous frame, drop this one. + } + surfaceTextureHelper.setTextureSize(width, height); + surfaceTextureHelper.setFrameRotation(rotation); + renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs); + codec.releaseOutputBuffer(index, /* render= */ true); + } + } + + @Override + public void onFrame(VideoFrame frame) { + final VideoFrame newFrame; + final Integer decodeTimeMs; + final long timestampNs; + synchronized (renderedTextureMetadataLock) { + if (renderedTextureMetadata == null) { + throw new IllegalStateException( + "Rendered texture metadata was null in onTextureFrameAvailable."); + } + timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000; + decodeTimeMs = renderedTextureMetadata.decodeTimeMs; + renderedTextureMetadata = null; + } + // Change timestamp of frame. + final VideoFrame frameWithModifiedTimeStamp = + new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs); + callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */); + } + + private void deliverByteFrame( + int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) { + // Load dimensions from shared memory under the dimension lock. + int width; + int height; + int stride; + int sliceHeight; + synchronized (dimensionLock) { + width = this.width; + height = this.height; + stride = this.stride; + sliceHeight = this.sliceHeight; + } + + // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2) + // bytes for each of the U and V channels. + if (info.size < width * height * 3 / 2) { + Logging.e(TAG, "Insufficient output buffer size: " + info.size); + return; + } + + if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) { + // Some codecs (Exynos) report an incorrect stride. Correct it here. + // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as + // 2 * size / (3 * height). + stride = info.size * 2 / (height * 3); + } + + ByteBuffer buffer = codec.getOutputBuffer(index); + buffer.position(info.offset); + buffer.limit(info.offset + info.size); + buffer = buffer.slice(); + + final VideoFrame.Buffer frameBuffer; + if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) { + frameBuffer = copyI420Buffer(buffer, stride, sliceHeight, width, height); + } else { + // All other supported color formats are NV12. + frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height); + } + codec.releaseOutputBuffer(index, /* render= */ false); + + long presentationTimeNs = info.presentationTimeUs * 1000; + VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs); + + // Note that qp is parsed on the C++ side. + callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */); + frame.release(); + } + + private VideoFrame.Buffer copyNV12ToI420Buffer( + ByteBuffer buffer, int stride, int sliceHeight, int width, int height) { + // toI420 copies the buffer. + return new NV12Buffer(width, height, stride, sliceHeight, buffer, null /* releaseCallback */) + .toI420(); + } + + private VideoFrame.Buffer copyI420Buffer( + ByteBuffer buffer, int stride, int sliceHeight, int width, int height) { + if (stride % 2 != 0) { + throw new AssertionError("Stride is not divisible by two: " + stride); + } + + // Note that the case with odd `sliceHeight` is handled in a special way. + // The chroma height contained in the payload is rounded down instead of + // up, making it one row less than what we expect in WebRTC. Therefore, we + // have to duplicate the last chroma rows for this case. Also, the offset + // between the Y plane and the U plane is unintuitive for this case. See + // http://bugs.webrtc.org/6651 for more info. + final int chromaWidth = (width + 1) / 2; + final int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2; + + final int uvStride = stride / 2; + + final int yPos = 0; + final int yEnd = yPos + stride * height; + final int uPos = yPos + stride * sliceHeight; + final int uEnd = uPos + uvStride * chromaHeight; + final int vPos = uPos + uvStride * sliceHeight / 2; + final int vEnd = vPos + uvStride * chromaHeight; + + VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height); + + buffer.limit(yEnd); + buffer.position(yPos); + copyPlane( + buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height); + + buffer.limit(uEnd); + buffer.position(uPos); + copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(), + chromaWidth, chromaHeight); + if (sliceHeight % 2 == 1) { + buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row. + + ByteBuffer dataU = frameBuffer.getDataU(); + dataU.position(frameBuffer.getStrideU() * chromaHeight); // Seek to beginning of last row. + dataU.put(buffer); // Copy the last row. + } + + buffer.limit(vEnd); + buffer.position(vPos); + copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(), + chromaWidth, chromaHeight); + if (sliceHeight % 2 == 1) { + buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row. + + ByteBuffer dataV = frameBuffer.getDataV(); + dataV.position(frameBuffer.getStrideV() * chromaHeight); // Seek to beginning of last row. + dataV.put(buffer); // Copy the last row. + } + + return frameBuffer; + } + + private void reformat(MediaFormat format) { + outputThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, "Decoder format changed: " + format.toString()); + final int newWidth; + final int newHeight; + if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT) + && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT) + && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM) + && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) { + newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT) + - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT); + newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM) + - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP); + } else { + newWidth = format.getInteger(MediaFormat.KEY_WIDTH); + newHeight = format.getInteger(MediaFormat.KEY_HEIGHT); + } + // Compare to existing width, height, and save values under the dimension lock. + synchronized (dimensionLock) { + if (newWidth != width || newHeight != height) { + if (hasDecodedFirstFrame) { + stopOnOutputThread(new RuntimeException("Unexpected size change. " + + "Configured " + width + "*" + height + ". " + + "New " + newWidth + "*" + newHeight)); + return; + } else if (newWidth <= 0 || newHeight <= 0) { + Logging.w(TAG, + "Unexpected format dimensions. Configured " + width + "*" + height + ". " + + "New " + newWidth + "*" + newHeight + ". Skip it"); + return; + } + width = newWidth; + height = newHeight; + } + } + + // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip + // color format updates. + if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { + colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); + Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); + if (!isSupportedColorFormat(colorFormat)) { + stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat)); + return; + } + } + + // Save stride and sliceHeight under the dimension lock. + synchronized (dimensionLock) { + if (format.containsKey(MEDIA_FORMAT_KEY_STRIDE)) { + stride = format.getInteger(MEDIA_FORMAT_KEY_STRIDE); + } + if (format.containsKey(MEDIA_FORMAT_KEY_SLICE_HEIGHT)) { + sliceHeight = format.getInteger(MEDIA_FORMAT_KEY_SLICE_HEIGHT); + } + Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight); + stride = Math.max(width, stride); + sliceHeight = Math.max(height, sliceHeight); + } + } + + private void releaseCodecOnOutputThread() { + outputThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, "Releasing MediaCodec on output thread"); + try { + codec.stop(); + } catch (Exception e) { + Logging.e(TAG, "Media decoder stop failed", e); + } + try { + codec.release(); + } catch (Exception e) { + Logging.e(TAG, "Media decoder release failed", e); + // Propagate exceptions caught during release back to the main thread. + shutdownException = e; + } + Logging.d(TAG, "Release on output thread done"); + } + + private void stopOnOutputThread(Exception e) { + outputThreadChecker.checkIsOnValidThread(); + running = false; + shutdownException = e; + } + + private boolean isSupportedColorFormat(int colorFormat) { + for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) { + if (supported == colorFormat) { + return true; + } + } + return false; + } + + // Visible for testing. + protected SurfaceTextureHelper createSurfaceTextureHelper() { + return SurfaceTextureHelper.create("decoder-texture-thread", sharedContext); + } + + // Visible for testing. + // TODO(sakal): Remove once Robolectric commit fa991a0 has been rolled to WebRTC. + protected void releaseSurface() { + surface.release(); + } + + // Visible for testing. + protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) { + return JavaI420Buffer.allocate(width, height); + } + + // Visible for testing. + protected void copyPlane( + ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) { + YuvHelper.copyPlane(src, srcStride, dst, dstStride, width, height); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java new file mode 100644 index 0000000000..3b5f5d2931 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java @@ -0,0 +1,38 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */ +class BaseBitrateAdjuster implements BitrateAdjuster { + protected int targetBitrateBps; + protected double targetFramerateFps; + + @Override + public void setTargets(int targetBitrateBps, double targetFramerateFps) { + this.targetBitrateBps = targetBitrateBps; + this.targetFramerateFps = targetFramerateFps; + } + + @Override + public void reportEncodedFrame(int size) { + // No op. + } + + @Override + public int getAdjustedBitrateBps() { + return targetBitrateBps; + } + + @Override + public double getAdjustedFramerateFps() { + return targetFramerateFps; + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java new file mode 100644 index 0000000000..bfa08bad89 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/BitrateAdjuster.java @@ -0,0 +1,31 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Object that adjusts the bitrate of a hardware codec. */ +interface BitrateAdjuster { + /** + * Sets the target bitrate in bits per second and framerate in frames per second. + */ + void setTargets(int targetBitrateBps, double targetFramerateFps); + + /** + * Should be used to report the size of an encoded frame to the bitrate adjuster. Use + * getAdjustedBitrateBps to get the updated bitrate after calling this method. + */ + void reportEncodedFrame(int size); + + /** Gets the current bitrate. */ + int getAdjustedBitrateBps(); + + /** Gets the current framerate. */ + double getAdjustedFramerateFps(); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java new file mode 100644 index 0000000000..9b410ceaef --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNative.java @@ -0,0 +1,29 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @CalledByNative is used by the JNI generator to create the necessary JNI + * bindings and expose this method to native code. + */ +@Target({ElementType.CONSTRUCTOR, ElementType.METHOD}) +@Retention(RetentionPolicy.CLASS) +public @interface CalledByNative { + /* + * If present, tells which inner class the method belongs to. + */ + public String value() default ""; +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java new file mode 100644 index 0000000000..8a00a7fadb --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java @@ -0,0 +1,33 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @CalledByNativeUnchecked is used to generate JNI bindings that do not check for exceptions. + * It only makes sense to use this annotation on methods that declare a throws... spec. + * However, note that the exception received native side maybe an 'unchecked' (RuntimeExpception) + * such as NullPointerException, so the native code should differentiate these cases. + * Usage of this should be very rare; where possible handle exceptions in the Java side and use a + * return value to indicate success / failure. + */ +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.CLASS) +public @interface CalledByNativeUnchecked { + /* + * If present, tells which inner class the method belongs to. + */ + public String value() default ""; +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java new file mode 100644 index 0000000000..a54f7201b2 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera1Session.java @@ -0,0 +1,340 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.hardware.Camera; +import android.os.Handler; +import android.os.SystemClock; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; + +@SuppressWarnings("deprecation") +class Camera1Session implements CameraSession { + private static final String TAG = "Camera1Session"; + private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; + + private static final Histogram camera1StartTimeMsHistogram = + Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50); + private static final Histogram camera1StopTimeMsHistogram = + Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50); + private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration( + "WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size()); + + private static enum SessionState { RUNNING, STOPPED } + + private final Handler cameraThreadHandler; + private final Events events; + private final boolean captureToTexture; + private final Context applicationContext; + private final SurfaceTextureHelper surfaceTextureHelper; + private final int cameraId; + private final Camera camera; + private final Camera.CameraInfo info; + private final CaptureFormat captureFormat; + // Used only for stats. Only used on the camera thread. + private final long constructionTimeNs; // Construction time of this class. + + private SessionState state; + private boolean firstFrameReported; + + // TODO(titovartem) make correct fix during webrtc:9175 + @SuppressWarnings("ByteBufferBackingArray") + public static void create(final CreateSessionCallback callback, final Events events, + final boolean captureToTexture, final Context applicationContext, + final SurfaceTextureHelper surfaceTextureHelper, final String cameraName, + final int width, final int height, final int framerate) { + final long constructionTimeNs = System.nanoTime(); + Logging.d(TAG, "Open camera " + cameraName); + events.onCameraOpening(); + + final int cameraId; + try { + cameraId = Camera1Enumerator.getCameraIndex(cameraName); + } catch (IllegalArgumentException e) { + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + final Camera camera; + try { + camera = Camera.open(cameraId); + } catch (RuntimeException e) { + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + if (camera == null) { + callback.onFailure( + FailureType.ERROR, "Camera.open returned null for camera id = " + cameraId); + return; + } + + try { + camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture()); + } catch (IOException | RuntimeException e) { + camera.release(); + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + final Camera.CameraInfo info = new Camera.CameraInfo(); + Camera.getCameraInfo(cameraId, info); + + final CaptureFormat captureFormat; + try { + final Camera.Parameters parameters = camera.getParameters(); + captureFormat = findClosestCaptureFormat(parameters, width, height, framerate); + final Size pictureSize = findClosestPictureSize(parameters, width, height); + updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture); + } catch (RuntimeException e) { + camera.release(); + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + if (!captureToTexture) { + final int frameSize = captureFormat.frameSize(); + for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) { + final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); + camera.addCallbackBuffer(buffer.array()); + } + } + + // Calculate orientation manually and send it as CVO instead. + try { + camera.setDisplayOrientation(0 /* degrees */); + } catch (RuntimeException e) { + camera.release(); + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + callback.onDone(new Camera1Session(events, captureToTexture, applicationContext, + surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs)); + } + + private static void updateCameraParameters(Camera camera, Camera.Parameters parameters, + CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) { + final List<String> focusModes = parameters.getSupportedFocusModes(); + + parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max); + parameters.setPreviewSize(captureFormat.width, captureFormat.height); + parameters.setPictureSize(pictureSize.width, pictureSize.height); + if (!captureToTexture) { + parameters.setPreviewFormat(captureFormat.imageFormat); + } + + if (parameters.isVideoStabilizationSupported()) { + parameters.setVideoStabilization(true); + } + if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + } + camera.setParameters(parameters); + } + + private static CaptureFormat findClosestCaptureFormat( + Camera.Parameters parameters, int width, int height, int framerate) { + // Find closest supported format for `width` x `height` @ `framerate`. + final List<CaptureFormat.FramerateRange> supportedFramerates = + Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange()); + Logging.d(TAG, "Available fps ranges: " + supportedFramerates); + + final CaptureFormat.FramerateRange fpsRange = + CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate); + + final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( + Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height); + CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize); + + return new CaptureFormat(previewSize.width, previewSize.height, fpsRange); + } + + private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) { + return CameraEnumerationAndroid.getClosestSupportedSize( + Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height); + } + + private Camera1Session(Events events, boolean captureToTexture, Context applicationContext, + SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera, + Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) { + Logging.d(TAG, "Create new camera1 session on camera " + cameraId); + + this.cameraThreadHandler = new Handler(); + this.events = events; + this.captureToTexture = captureToTexture; + this.applicationContext = applicationContext; + this.surfaceTextureHelper = surfaceTextureHelper; + this.cameraId = cameraId; + this.camera = camera; + this.info = info; + this.captureFormat = captureFormat; + this.constructionTimeNs = constructionTimeNs; + + surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); + + startCapturing(); + } + + @Override + public void stop() { + Logging.d(TAG, "Stop camera1 session on camera " + cameraId); + checkIsOnCameraThread(); + if (state != SessionState.STOPPED) { + final long stopStartTime = System.nanoTime(); + stopInternal(); + final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime); + camera1StopTimeMsHistogram.addSample(stopTimeMs); + } + } + + private void startCapturing() { + Logging.d(TAG, "Start capturing"); + checkIsOnCameraThread(); + + state = SessionState.RUNNING; + + camera.setErrorCallback(new Camera.ErrorCallback() { + @Override + public void onError(int error, Camera camera) { + String errorMessage; + if (error == Camera.CAMERA_ERROR_SERVER_DIED) { + errorMessage = "Camera server died!"; + } else { + errorMessage = "Camera error: " + error; + } + Logging.e(TAG, errorMessage); + stopInternal(); + if (error == Camera.CAMERA_ERROR_EVICTED) { + events.onCameraDisconnected(Camera1Session.this); + } else { + events.onCameraError(Camera1Session.this, errorMessage); + } + } + }); + + if (captureToTexture) { + listenForTextureFrames(); + } else { + listenForBytebufferFrames(); + } + try { + camera.startPreview(); + } catch (RuntimeException e) { + stopInternal(); + events.onCameraError(this, e.getMessage()); + } + } + + private void stopInternal() { + Logging.d(TAG, "Stop internal"); + checkIsOnCameraThread(); + if (state == SessionState.STOPPED) { + Logging.d(TAG, "Camera is already stopped"); + return; + } + + state = SessionState.STOPPED; + surfaceTextureHelper.stopListening(); + // Note: stopPreview or other driver code might deadlock. Deadlock in + // Camera._stopPreview(Native Method) has been observed on + // Nexus 5 (hammerhead), OS version LMY48I. + camera.stopPreview(); + camera.release(); + events.onCameraClosed(this); + Logging.d(TAG, "Stop done"); + } + + private void listenForTextureFrames() { + surfaceTextureHelper.startListening((VideoFrame frame) -> { + checkIsOnCameraThread(); + + if (state != SessionState.RUNNING) { + Logging.d(TAG, "Texture frame captured but camera is no longer running."); + return; + } + + if (!firstFrameReported) { + final int startTimeMs = + (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); + camera1StartTimeMsHistogram.addSample(startTimeMs); + firstFrameReported = true; + } + + // Undo the mirror that the OS "helps" us with. + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) + final VideoFrame modifiedFrame = + new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix( + (TextureBufferImpl) frame.getBuffer(), + /* mirror= */ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT, + /* rotation= */ 0), + /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); + events.onFrameCaptured(Camera1Session.this, modifiedFrame); + modifiedFrame.release(); + }); + } + + private void listenForBytebufferFrames() { + camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() { + @Override + public void onPreviewFrame(final byte[] data, Camera callbackCamera) { + checkIsOnCameraThread(); + + if (callbackCamera != camera) { + Logging.e(TAG, "Callback from a different camera. This should never happen."); + return; + } + + if (state != SessionState.RUNNING) { + Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running."); + return; + } + + final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); + + if (!firstFrameReported) { + final int startTimeMs = + (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); + camera1StartTimeMsHistogram.addSample(startTimeMs); + firstFrameReported = true; + } + + VideoFrame.Buffer frameBuffer = new NV21Buffer( + data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> { + if (state == SessionState.RUNNING) { + camera.addCallbackBuffer(data); + } + })); + final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs); + events.onFrameCaptured(Camera1Session.this, frame); + frame.release(); + } + }); + } + + private int getFrameOrientation() { + int rotation = CameraSession.getDeviceOrientation(applicationContext); + if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { + rotation = 360 - rotation; + } + return (info.orientation + rotation) % 360; + } + + private void checkIsOnCameraThread() { + if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { + throw new IllegalStateException("Wrong thread"); + } + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java new file mode 100644 index 0000000000..d5ee80c73e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Camera2Session.java @@ -0,0 +1,428 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.CaptureFailure; +import android.hardware.camera2.CaptureRequest; +import android.os.Handler; +import android.util.Range; +import android.view.Surface; +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; + +class Camera2Session implements CameraSession { + private static final String TAG = "Camera2Session"; + + private static final Histogram camera2StartTimeMsHistogram = + Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50); + private static final Histogram camera2StopTimeMsHistogram = + Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50); + private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration( + "WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size()); + + private static enum SessionState { RUNNING, STOPPED } + + private final Handler cameraThreadHandler; + private final CreateSessionCallback callback; + private final Events events; + private final Context applicationContext; + private final CameraManager cameraManager; + private final SurfaceTextureHelper surfaceTextureHelper; + private final String cameraId; + private final int width; + private final int height; + private final int framerate; + + // Initialized at start + private CameraCharacteristics cameraCharacteristics; + private int cameraOrientation; + private boolean isCameraFrontFacing; + private int fpsUnitFactor; + private CaptureFormat captureFormat; + + // Initialized when camera opens + @Nullable private CameraDevice cameraDevice; + @Nullable private Surface surface; + + // Initialized when capture session is created + @Nullable private CameraCaptureSession captureSession; + + // State + private SessionState state = SessionState.RUNNING; + private boolean firstFrameReported; + + // Used only for stats. Only used on the camera thread. + private final long constructionTimeNs; // Construction time of this class. + + private class CameraStateCallback extends CameraDevice.StateCallback { + private String getErrorDescription(int errorCode) { + switch (errorCode) { + case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: + return "Camera device has encountered a fatal error."; + case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: + return "Camera device could not be opened due to a device policy."; + case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: + return "Camera device is in use already."; + case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: + return "Camera service has encountered a fatal error."; + case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: + return "Camera device could not be opened because" + + " there are too many other open camera devices."; + default: + return "Unknown camera error: " + errorCode; + } + } + + @Override + public void onDisconnected(CameraDevice camera) { + checkIsOnCameraThread(); + final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED); + state = SessionState.STOPPED; + stopInternal(); + if (startFailure) { + callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted."); + } else { + events.onCameraDisconnected(Camera2Session.this); + } + } + + @Override + public void onError(CameraDevice camera, int errorCode) { + checkIsOnCameraThread(); + reportError(getErrorDescription(errorCode)); + } + + @Override + public void onOpened(CameraDevice camera) { + checkIsOnCameraThread(); + + Logging.d(TAG, "Camera opened."); + cameraDevice = camera; + + surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); + try { + camera.createCaptureSession( + Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler); + } catch (CameraAccessException e) { + reportError("Failed to create capture session. " + e); + return; + } + } + + @Override + public void onClosed(CameraDevice camera) { + checkIsOnCameraThread(); + + Logging.d(TAG, "Camera device closed."); + events.onCameraClosed(Camera2Session.this); + } + } + + private class CaptureSessionCallback extends CameraCaptureSession.StateCallback { + @Override + public void onConfigureFailed(CameraCaptureSession session) { + checkIsOnCameraThread(); + session.close(); + reportError("Failed to configure capture session."); + } + + @Override + public void onConfigured(CameraCaptureSession session) { + checkIsOnCameraThread(); + Logging.d(TAG, "Camera capture session configured."); + captureSession = session; + try { + /* + * The viable options for video capture requests are: + * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality + * post-processing. + * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording + * quality. + */ + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + // Set auto exposure fps range. + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + chooseStabilizationMode(captureRequestBuilder); + chooseFocusMode(captureRequestBuilder); + + captureRequestBuilder.addTarget(surface); + session.setRepeatingRequest( + captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler); + } catch (CameraAccessException e) { + reportError("Failed to start capture request. " + e); + return; + } + + surfaceTextureHelper.startListening((VideoFrame frame) -> { + checkIsOnCameraThread(); + + if (state != SessionState.RUNNING) { + Logging.d(TAG, "Texture frame captured but camera is no longer running."); + return; + } + + if (!firstFrameReported) { + firstFrameReported = true; + final int startTimeMs = + (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); + camera2StartTimeMsHistogram.addSample(startTimeMs); + } + + // Undo the mirror that the OS "helps" us with. + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) + // Also, undo camera orientation, we report it as rotation instead. + final VideoFrame modifiedFrame = + new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix( + (TextureBufferImpl) frame.getBuffer(), + /* mirror= */ isCameraFrontFacing, + /* rotation= */ -cameraOrientation), + /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); + events.onFrameCaptured(Camera2Session.this, modifiedFrame); + modifiedFrame.release(); + }); + Logging.d(TAG, "Camera device successfully started."); + callback.onDone(Camera2Session.this); + } + + // Prefers optical stabilization over software stabilization if available. Only enables one of + // the stabilization modes at a time because having both enabled can cause strange results. + private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) { + final int[] availableOpticalStabilization = cameraCharacteristics.get( + CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION); + if (availableOpticalStabilization != null) { + for (int mode : availableOpticalStabilization) { + if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) { + captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, + CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, + CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF); + Logging.d(TAG, "Using optical stabilization."); + return; + } + } + } + // If no optical mode is available, try software. + final int[] availableVideoStabilization = cameraCharacteristics.get( + CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); + if (availableVideoStabilization != null) { + for (int mode : availableVideoStabilization) { + if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) { + captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, + CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON); + captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, + CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); + Logging.d(TAG, "Using video stabilization."); + return; + } + } + } + Logging.d(TAG, "Stabilization not available."); + } + + private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) { + final int[] availableFocusModes = + cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES); + for (int mode : availableFocusModes) { + if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) { + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); + Logging.d(TAG, "Using continuous video auto-focus."); + return; + } + } + Logging.d(TAG, "Auto-focus is not available."); + } + } + + private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback { + @Override + public void onCaptureFailed( + CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { + Logging.d(TAG, "Capture failed: " + failure); + } + } + + public static void create(CreateSessionCallback callback, Events events, + Context applicationContext, CameraManager cameraManager, + SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height, + int framerate) { + new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper, + cameraId, width, height, framerate); + } + + private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext, + CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId, + int width, int height, int framerate) { + Logging.d(TAG, "Create new camera2 session on camera " + cameraId); + + constructionTimeNs = System.nanoTime(); + + this.cameraThreadHandler = new Handler(); + this.callback = callback; + this.events = events; + this.applicationContext = applicationContext; + this.cameraManager = cameraManager; + this.surfaceTextureHelper = surfaceTextureHelper; + this.cameraId = cameraId; + this.width = width; + this.height = height; + this.framerate = framerate; + + start(); + } + + private void start() { + checkIsOnCameraThread(); + Logging.d(TAG, "start"); + + try { + cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); + } catch (CameraAccessException | IllegalArgumentException e) { + reportError("getCameraCharacteristics(): " + e.getMessage()); + return; + } + cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) + == CameraMetadata.LENS_FACING_FRONT; + + findCaptureFormat(); + + if (captureFormat == null) { + // findCaptureFormat reports an error already. + return; + } + + openCamera(); + } + + private void findCaptureFormat() { + checkIsOnCameraThread(); + + Range<Integer>[] fpsRanges = + cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); + fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges); + List<CaptureFormat.FramerateRange> framerateRanges = + Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor); + List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics); + Logging.d(TAG, "Available preview sizes: " + sizes); + Logging.d(TAG, "Available fps ranges: " + framerateRanges); + + if (framerateRanges.isEmpty() || sizes.isEmpty()) { + reportError("No supported capture formats."); + return; + } + + final CaptureFormat.FramerateRange bestFpsRange = + CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate); + + final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); + CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize); + + captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange); + Logging.d(TAG, "Using capture format: " + captureFormat); + } + + @SuppressLint("MissingPermission") + private void openCamera() { + checkIsOnCameraThread(); + + Logging.d(TAG, "Opening camera " + cameraId); + events.onCameraOpening(); + + try { + cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler); + } catch (CameraAccessException | IllegalArgumentException | SecurityException e) { + reportError("Failed to open camera: " + e); + return; + } + } + + @Override + public void stop() { + Logging.d(TAG, "Stop camera2 session on camera " + cameraId); + checkIsOnCameraThread(); + if (state != SessionState.STOPPED) { + final long stopStartTime = System.nanoTime(); + state = SessionState.STOPPED; + stopInternal(); + final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime); + camera2StopTimeMsHistogram.addSample(stopTimeMs); + } + } + + private void stopInternal() { + Logging.d(TAG, "Stop internal"); + checkIsOnCameraThread(); + + surfaceTextureHelper.stopListening(); + + if (captureSession != null) { + captureSession.close(); + captureSession = null; + } + if (surface != null) { + surface.release(); + surface = null; + } + if (cameraDevice != null) { + cameraDevice.close(); + cameraDevice = null; + } + + Logging.d(TAG, "Stop done"); + } + + private void reportError(String error) { + checkIsOnCameraThread(); + Logging.e(TAG, "Error: " + error); + + final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED); + state = SessionState.STOPPED; + stopInternal(); + if (startFailure) { + callback.onFailure(FailureType.ERROR, error); + } else { + events.onCameraError(this, error); + } + } + + private int getFrameOrientation() { + int rotation = CameraSession.getDeviceOrientation(applicationContext); + if (!isCameraFrontFacing) { + rotation = 360 - rotation; + } + return (cameraOrientation + rotation) % 360; + } + + private void checkIsOnCameraThread() { + if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { + throw new IllegalStateException("Wrong thread"); + } + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java new file mode 100644 index 0000000000..1922a529e2 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraCapturer.java @@ -0,0 +1,458 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.os.Handler; +import android.os.Looper; +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.List; + +@SuppressWarnings("deprecation") +abstract class CameraCapturer implements CameraVideoCapturer { + enum SwitchState { + IDLE, // No switch requested. + PENDING, // Waiting for previous capture session to open. + IN_PROGRESS, // Waiting for new switched capture session to start. + } + + private static final String TAG = "CameraCapturer"; + private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3; + private final static int OPEN_CAMERA_DELAY_MS = 500; + private final static int OPEN_CAMERA_TIMEOUT = 10000; + + private final CameraEnumerator cameraEnumerator; + private final CameraEventsHandler eventsHandler; + private final Handler uiThreadHandler; + + @Nullable + private final CameraSession.CreateSessionCallback createSessionCallback = + new CameraSession.CreateSessionCallback() { + @Override + public void onDone(CameraSession session) { + checkIsOnCameraThread(); + Logging.d(TAG, "Create session done. Switch state: " + switchState); + uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable); + synchronized (stateLock) { + capturerObserver.onCapturerStarted(true /* success */); + sessionOpening = false; + currentSession = session; + cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler); + firstFrameObserved = false; + stateLock.notifyAll(); + + if (switchState == SwitchState.IN_PROGRESS) { + switchState = SwitchState.IDLE; + if (switchEventsHandler != null) { + switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName)); + switchEventsHandler = null; + } + } else if (switchState == SwitchState.PENDING) { + String selectedCameraName = pendingCameraName; + pendingCameraName = null; + switchState = SwitchState.IDLE; + switchCameraInternal(switchEventsHandler, selectedCameraName); + } + } + } + + @Override + public void onFailure(CameraSession.FailureType failureType, String error) { + checkIsOnCameraThread(); + uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable); + synchronized (stateLock) { + capturerObserver.onCapturerStarted(false /* success */); + openAttemptsRemaining--; + + if (openAttemptsRemaining <= 0) { + Logging.w(TAG, "Opening camera failed, passing: " + error); + sessionOpening = false; + stateLock.notifyAll(); + + if (switchState != SwitchState.IDLE) { + if (switchEventsHandler != null) { + switchEventsHandler.onCameraSwitchError(error); + switchEventsHandler = null; + } + switchState = SwitchState.IDLE; + } + + if (failureType == CameraSession.FailureType.DISCONNECTED) { + eventsHandler.onCameraDisconnected(); + } else { + eventsHandler.onCameraError(error); + } + } else { + Logging.w(TAG, "Opening camera failed, retry: " + error); + createSessionInternal(OPEN_CAMERA_DELAY_MS); + } + } + } + }; + + @Nullable + private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() { + @Override + public void onCameraOpening() { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (currentSession != null) { + Logging.w(TAG, "onCameraOpening while session was open."); + return; + } + eventsHandler.onCameraOpening(cameraName); + } + } + + @Override + public void onCameraError(CameraSession session, String error) { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (session != currentSession) { + Logging.w(TAG, "onCameraError from another session: " + error); + return; + } + eventsHandler.onCameraError(error); + stopCapture(); + } + } + + @Override + public void onCameraDisconnected(CameraSession session) { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (session != currentSession) { + Logging.w(TAG, "onCameraDisconnected from another session."); + return; + } + eventsHandler.onCameraDisconnected(); + stopCapture(); + } + } + + @Override + public void onCameraClosed(CameraSession session) { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (session != currentSession && currentSession != null) { + Logging.d(TAG, "onCameraClosed from another session."); + return; + } + eventsHandler.onCameraClosed(); + } + } + + @Override + public void onFrameCaptured(CameraSession session, VideoFrame frame) { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (session != currentSession) { + Logging.w(TAG, "onFrameCaptured from another session."); + return; + } + if (!firstFrameObserved) { + eventsHandler.onFirstFrameAvailable(); + firstFrameObserved = true; + } + cameraStatistics.addFrame(); + capturerObserver.onFrameCaptured(frame); + } + } + }; + + private final Runnable openCameraTimeoutRunnable = new Runnable() { + @Override + public void run() { + eventsHandler.onCameraError("Camera failed to start within timeout."); + } + }; + + // Initialized on initialize + // ------------------------- + private Handler cameraThreadHandler; + private Context applicationContext; + private org.webrtc.CapturerObserver capturerObserver; + private SurfaceTextureHelper surfaceHelper; + + private final Object stateLock = new Object(); + private boolean sessionOpening; /* guarded by stateLock */ + @Nullable private CameraSession currentSession; /* guarded by stateLock */ + private String cameraName; /* guarded by stateLock */ + private String pendingCameraName; /* guarded by stateLock */ + private int width; /* guarded by stateLock */ + private int height; /* guarded by stateLock */ + private int framerate; /* guarded by stateLock */ + private int openAttemptsRemaining; /* guarded by stateLock */ + private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */ + @Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */ + // Valid from onDone call until stopCapture, otherwise null. + @Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */ + private boolean firstFrameObserved; /* guarded by stateLock */ + + public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler, + CameraEnumerator cameraEnumerator) { + if (eventsHandler == null) { + eventsHandler = new CameraEventsHandler() { + @Override + public void onCameraError(String errorDescription) {} + @Override + public void onCameraDisconnected() {} + @Override + public void onCameraFreezed(String errorDescription) {} + @Override + public void onCameraOpening(String cameraName) {} + @Override + public void onFirstFrameAvailable() {} + @Override + public void onCameraClosed() {} + }; + } + + this.eventsHandler = eventsHandler; + this.cameraEnumerator = cameraEnumerator; + this.cameraName = cameraName; + List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames()); + uiThreadHandler = new Handler(Looper.getMainLooper()); + + if (deviceNames.isEmpty()) { + throw new RuntimeException("No cameras attached."); + } + if (!deviceNames.contains(this.cameraName)) { + throw new IllegalArgumentException( + "Camera name " + this.cameraName + " does not match any known camera device."); + } + } + + @Override + public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, + org.webrtc.CapturerObserver capturerObserver) { + this.applicationContext = applicationContext; + this.capturerObserver = capturerObserver; + this.surfaceHelper = surfaceTextureHelper; + this.cameraThreadHandler = surfaceTextureHelper.getHandler(); + } + + @Override + public void startCapture(int width, int height, int framerate) { + Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate); + if (applicationContext == null) { + throw new RuntimeException("CameraCapturer must be initialized before calling startCapture."); + } + + synchronized (stateLock) { + if (sessionOpening || currentSession != null) { + Logging.w(TAG, "Session already open"); + return; + } + + this.width = width; + this.height = height; + this.framerate = framerate; + + sessionOpening = true; + openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS; + createSessionInternal(0); + } + } + + private void createSessionInternal(int delayMs) { + uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT); + cameraThreadHandler.postDelayed(new Runnable() { + @Override + public void run() { + createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext, + surfaceHelper, cameraName, width, height, framerate); + } + }, delayMs); + } + + @Override + public void stopCapture() { + Logging.d(TAG, "Stop capture"); + + synchronized (stateLock) { + while (sessionOpening) { + Logging.d(TAG, "Stop capture: Waiting for session to open"); + try { + stateLock.wait(); + } catch (InterruptedException e) { + Logging.w(TAG, "Stop capture interrupted while waiting for the session to open."); + Thread.currentThread().interrupt(); + return; + } + } + + if (currentSession != null) { + Logging.d(TAG, "Stop capture: Nulling session"); + cameraStatistics.release(); + cameraStatistics = null; + final CameraSession oldSession = currentSession; + cameraThreadHandler.post(new Runnable() { + @Override + public void run() { + oldSession.stop(); + } + }); + currentSession = null; + capturerObserver.onCapturerStopped(); + } else { + Logging.d(TAG, "Stop capture: No session open"); + } + } + + Logging.d(TAG, "Stop capture done"); + } + + @Override + public void changeCaptureFormat(int width, int height, int framerate) { + Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate); + synchronized (stateLock) { + stopCapture(); + startCapture(width, height, framerate); + } + } + + @Override + public void dispose() { + Logging.d(TAG, "dispose"); + stopCapture(); + } + + @Override + public void switchCamera(final CameraSwitchHandler switchEventsHandler) { + Logging.d(TAG, "switchCamera"); + cameraThreadHandler.post(new Runnable() { + @Override + public void run() { + List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames()); + + if (deviceNames.size() < 2) { + reportCameraSwitchError("No camera to switch to.", switchEventsHandler); + return; + } + + int cameraNameIndex = deviceNames.indexOf(cameraName); + String cameraName = deviceNames.get((cameraNameIndex + 1) % deviceNames.size()); + switchCameraInternal(switchEventsHandler, cameraName); + } + }); + } + + @Override + public void switchCamera(final CameraSwitchHandler switchEventsHandler, final String cameraName) { + Logging.d(TAG, "switchCamera"); + cameraThreadHandler.post(new Runnable() { + @Override + public void run() { + switchCameraInternal(switchEventsHandler, cameraName); + } + }); + } + + @Override + public boolean isScreencast() { + return false; + } + + public void printStackTrace() { + Thread cameraThread = null; + if (cameraThreadHandler != null) { + cameraThread = cameraThreadHandler.getLooper().getThread(); + } + if (cameraThread != null) { + StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace(); + if (cameraStackTrace.length > 0) { + Logging.d(TAG, "CameraCapturer stack trace:"); + for (StackTraceElement traceElem : cameraStackTrace) { + Logging.d(TAG, traceElem.toString()); + } + } + } + } + + private void reportCameraSwitchError( + String error, @Nullable CameraSwitchHandler switchEventsHandler) { + Logging.e(TAG, error); + if (switchEventsHandler != null) { + switchEventsHandler.onCameraSwitchError(error); + } + } + + private void switchCameraInternal( + @Nullable final CameraSwitchHandler switchEventsHandler, final String selectedCameraName) { + Logging.d(TAG, "switchCamera internal"); + List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames()); + + if (!deviceNames.contains(selectedCameraName)) { + reportCameraSwitchError("Attempted to switch to unknown camera device " + selectedCameraName, + switchEventsHandler); + return; + } + + synchronized (stateLock) { + if (switchState != SwitchState.IDLE) { + reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler); + return; + } + if (!sessionOpening && currentSession == null) { + reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler); + return; + } + + this.switchEventsHandler = switchEventsHandler; + if (sessionOpening) { + switchState = SwitchState.PENDING; + pendingCameraName = selectedCameraName; + return; + } else { + switchState = SwitchState.IN_PROGRESS; + } + + Logging.d(TAG, "switchCamera: Stopping session"); + cameraStatistics.release(); + cameraStatistics = null; + final CameraSession oldSession = currentSession; + cameraThreadHandler.post(new Runnable() { + @Override + public void run() { + oldSession.stop(); + } + }); + currentSession = null; + + cameraName = selectedCameraName; + + sessionOpening = true; + openAttemptsRemaining = 1; + createSessionInternal(0); + } + Logging.d(TAG, "switchCamera done"); + } + + private void checkIsOnCameraThread() { + if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { + Logging.e(TAG, "Check is on camera thread failed."); + throw new RuntimeException("Not on camera thread."); + } + } + + protected String getCameraName() { + synchronized (stateLock) { + return cameraName; + } + } + + abstract protected void createCameraSession( + CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events, + Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName, + int width, int height, int framerate); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java new file mode 100644 index 0000000000..8d137854d8 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/CameraSession.java @@ -0,0 +1,72 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.graphics.Matrix; +import android.view.WindowManager; +import android.view.Surface; + +interface CameraSession { + enum FailureType { ERROR, DISCONNECTED } + + // Callbacks are fired on the camera thread. + interface CreateSessionCallback { + void onDone(CameraSession session); + void onFailure(FailureType failureType, String error); + } + + // Events are fired on the camera thread. + interface Events { + void onCameraOpening(); + void onCameraError(CameraSession session, String error); + void onCameraDisconnected(CameraSession session); + void onCameraClosed(CameraSession session); + void onFrameCaptured(CameraSession session, VideoFrame frame); + } + + /** + * Stops the capture. Waits until no more calls to capture observer will be made. + * If waitCameraStop is true, also waits for the camera to stop. + */ + void stop(); + + static int getDeviceOrientation(Context context) { + final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); + switch (wm.getDefaultDisplay().getRotation()) { + case Surface.ROTATION_90: + return 90; + case Surface.ROTATION_180: + return 180; + case Surface.ROTATION_270: + return 270; + case Surface.ROTATION_0: + default: + return 0; + } + } + + static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix( + TextureBufferImpl buffer, boolean mirror, int rotation) { + final Matrix transformMatrix = new Matrix(); + // Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture. + transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f); + if (mirror) { + transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f); + } + transformMatrix.preRotate(rotation); + transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f); + + // The width and height are not affected by rotation since Camera2Session has set them to the + // value they should be after undoing the rotation. + return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight()); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java new file mode 100644 index 0000000000..96a15bbfe1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java @@ -0,0 +1,98 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the + * bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the + * target bitrate by unacceptable margins. + */ +class DynamicBitrateAdjuster extends BaseBitrateAdjuster { + // Change the bitrate at most once every three seconds. + private static final double BITRATE_ADJUSTMENT_SEC = 3.0; + // Maximum bitrate adjustment scale - no more than 4 times. + private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4; + // Amount of adjustment steps to reach maximum scale. + private static final int BITRATE_ADJUSTMENT_STEPS = 20; + + private static final double BITS_PER_BYTE = 8.0; + + // How far the codec has deviated above (or below) the target bitrate (tracked in bytes). + private double deviationBytes; + private double timeSinceLastAdjustmentMs; + private int bitrateAdjustmentScaleExp; + + @Override + public void setTargets(int targetBitrateBps, double targetFramerateFps) { + if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) { + // Rescale the accumulator level if the accumulator max decreases + deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps; + } + super.setTargets(targetBitrateBps, targetFramerateFps); + } + + @Override + public void reportEncodedFrame(int size) { + if (targetFramerateFps == 0) { + return; + } + + // Accumulate the difference between actual and expected frame sizes. + double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFramerateFps; + deviationBytes += (size - expectedBytesPerFrame); + timeSinceLastAdjustmentMs += 1000.0 / targetFramerateFps; + + // Adjust the bitrate when the encoder accumulates one second's worth of data in excess or + // shortfall of the target. + double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE; + + // Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for + // bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle. + double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes; + deviationBytes = Math.min(deviationBytes, deviationCap); + deviationBytes = Math.max(deviationBytes, -deviationCap); + + // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much + // from the target value. + if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) { + return; + } + + if (deviationBytes > deviationThresholdBytes) { + // Encoder generates too high bitrate - need to reduce the scale. + int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5); + bitrateAdjustmentScaleExp -= bitrateAdjustmentInc; + // Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS. + // This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS). + bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS); + deviationBytes = deviationThresholdBytes; + } else if (deviationBytes < -deviationThresholdBytes) { + // Encoder generates too low bitrate - need to increase the scale. + int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5); + bitrateAdjustmentScaleExp += bitrateAdjustmentInc; + // Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS. + // This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS). + bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS); + deviationBytes = -deviationThresholdBytes; + } + timeSinceLastAdjustmentMs = 0; + } + + private double getBitrateAdjustmentScale() { + return Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE, + (double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS); + } + + @Override + public int getAdjustedBitrateBps() { + return (int) (targetBitrateBps * getBitrateAdjustmentScale()); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java new file mode 100644 index 0000000000..254a17c750 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase10Impl.java @@ -0,0 +1,365 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.Canvas; +import android.graphics.Rect; +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.GLException; +import android.view.Surface; +import android.view.SurfaceHolder; +import androidx.annotation.Nullable; +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.egl.EGLSurface; + +/** + * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay, + * and an EGLSurface. + */ +class EglBase10Impl implements EglBase10 { + private static final String TAG = "EglBase10Impl"; + // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION. + private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; + + private final EGL10 egl; + private EGLContext eglContext; + @Nullable private EGLConfig eglConfig; + private EGLDisplay eglDisplay; + private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE; + + // EGL wrapper for an actual EGLContext. + private static class Context implements EglBase10.Context { + private final EGL10 egl; + private final EGLContext eglContext; + private final EGLConfig eglContextConfig; + + @Override + public EGLContext getRawContext() { + return eglContext; + } + + @Override + public long getNativeEglContext() { + EGLContext previousContext = egl.eglGetCurrentContext(); + EGLDisplay currentDisplay = egl.eglGetCurrentDisplay(); + EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL10.EGL_DRAW); + EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL10.EGL_READ); + EGLSurface tempEglSurface = null; + + if (currentDisplay == EGL10.EGL_NO_DISPLAY) { + currentDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + } + + try { + if (previousContext != eglContext) { + int[] surfaceAttribs = {EGL10.EGL_WIDTH, 1, EGL10.EGL_HEIGHT, 1, EGL10.EGL_NONE}; + tempEglSurface = + egl.eglCreatePbufferSurface(currentDisplay, eglContextConfig, surfaceAttribs); + if (!egl.eglMakeCurrent(currentDisplay, tempEglSurface, tempEglSurface, eglContext)) { + throw new GLException(egl.eglGetError(), + "Failed to make temporary EGL surface active: " + egl.eglGetError()); + } + } + + return nativeGetCurrentNativeEGLContext(); + } finally { + if (tempEglSurface != null) { + egl.eglMakeCurrent( + currentDisplay, previousDrawSurface, previousReadSurface, previousContext); + egl.eglDestroySurface(currentDisplay, tempEglSurface); + } + } + } + + public Context(EGL10 egl, EGLContext eglContext, EGLConfig eglContextConfig) { + this.egl = egl; + this.eglContext = eglContext; + this.eglContextConfig = eglContextConfig; + } + } + + // Create a new context with the specified config type, sharing data with sharedContext. + public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) { + this.egl = (EGL10) EGLContext.getEGL(); + eglDisplay = getEglDisplay(); + eglConfig = getEglConfig(egl, eglDisplay, configAttributes); + final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes); + Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion); + eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion); + } + + @Override + public void createSurface(Surface surface) { + /** + * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface + * couldn't actually take a Surface object until API 17. Older versions fortunately just call + * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant. + */ + class FakeSurfaceHolder implements SurfaceHolder { + private final Surface surface; + + FakeSurfaceHolder(Surface surface) { + this.surface = surface; + } + + @Override + public void addCallback(Callback callback) {} + + @Override + public void removeCallback(Callback callback) {} + + @Override + public boolean isCreating() { + return false; + } + + @Deprecated + @Override + public void setType(int i) {} + + @Override + public void setFixedSize(int i, int i2) {} + + @Override + public void setSizeFromLayout() {} + + @Override + public void setFormat(int i) {} + + @Override + public void setKeepScreenOn(boolean b) {} + + @Nullable + @Override + public Canvas lockCanvas() { + return null; + } + + @Nullable + @Override + public Canvas lockCanvas(Rect rect) { + return null; + } + + @Override + public void unlockCanvasAndPost(Canvas canvas) {} + + @Nullable + @Override + public Rect getSurfaceFrame() { + return null; + } + + @Override + public Surface getSurface() { + return surface; + } + } + + createSurfaceInternal(new FakeSurfaceHolder(surface)); + } + + // Create EGLSurface from the Android SurfaceTexture. + @Override + public void createSurface(SurfaceTexture surfaceTexture) { + createSurfaceInternal(surfaceTexture); + } + + // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture. + private void createSurfaceInternal(Object nativeWindow) { + if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) { + throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture"); + } + checkIsNotReleased(); + if (eglSurface != EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL10.EGL_NONE}; + eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new GLException(egl.eglGetError(), + "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + + // Create dummy 1x1 pixel buffer surface so the context can be made current. + @Override + public void createDummyPbufferSurface() { + createPbufferSurface(1, 1); + } + + @Override + public void createPbufferSurface(int width, int height) { + checkIsNotReleased(); + if (eglSurface != EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE}; + eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new GLException(egl.eglGetError(), + "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x" + + Integer.toHexString(egl.eglGetError())); + } + } + + @Override + public org.webrtc.EglBase.Context getEglBaseContext() { + return new Context(egl, eglContext, eglConfig); + } + + @Override + public boolean hasSurface() { + return eglSurface != EGL10.EGL_NO_SURFACE; + } + + @Override + public int surfaceWidth() { + final int widthArray[] = new int[1]; + egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray); + return widthArray[0]; + } + + @Override + public int surfaceHeight() { + final int heightArray[] = new int[1]; + egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray); + return heightArray[0]; + } + + @Override + public void releaseSurface() { + if (eglSurface != EGL10.EGL_NO_SURFACE) { + egl.eglDestroySurface(eglDisplay, eglSurface); + eglSurface = EGL10.EGL_NO_SURFACE; + } + } + + private void checkIsNotReleased() { + if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT + || eglConfig == null) { + throw new RuntimeException("This object has been released"); + } + } + + @Override + public void release() { + checkIsNotReleased(); + releaseSurface(); + detachCurrent(); + egl.eglDestroyContext(eglDisplay, eglContext); + egl.eglTerminate(eglDisplay); + eglContext = EGL10.EGL_NO_CONTEXT; + eglDisplay = EGL10.EGL_NO_DISPLAY; + eglConfig = null; + } + + @Override + public void makeCurrent() { + checkIsNotReleased(); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't make current"); + } + synchronized (EglBase.lock) { + if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + throw new GLException(egl.eglGetError(), + "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + } + + // Detach the current EGL context, so that it can be made current on another thread. + @Override + public void detachCurrent() { + synchronized (EglBase.lock) { + if (!egl.eglMakeCurrent( + eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) { + throw new GLException(egl.eglGetError(), + "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + } + + @Override + public void swapBuffers() { + checkIsNotReleased(); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't swap buffers"); + } + synchronized (EglBase.lock) { + egl.eglSwapBuffers(eglDisplay, eglSurface); + } + } + + @Override + public void swapBuffers(long timeStampNs) { + // Setting presentation time is not supported for EGL 1.0. + swapBuffers(); + } + + // Return an EGLDisplay, or die trying. + private EGLDisplay getEglDisplay() { + EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (eglDisplay == EGL10.EGL_NO_DISPLAY) { + throw new GLException(egl.eglGetError(), + "Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError())); + } + int[] version = new int[2]; + if (!egl.eglInitialize(eglDisplay, version)) { + throw new GLException(egl.eglGetError(), + "Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError())); + } + return eglDisplay; + } + + // Return an EGLConfig, or die trying. + private static EGLConfig getEglConfig(EGL10 egl, EGLDisplay eglDisplay, int[] configAttributes) { + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) { + throw new GLException( + egl.eglGetError(), "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + if (numConfigs[0] <= 0) { + throw new RuntimeException("Unable to find any matching EGL config"); + } + final EGLConfig eglConfig = configs[0]; + if (eglConfig == null) { + throw new RuntimeException("eglChooseConfig returned null"); + } + return eglConfig; + } + + // Return an EGLConfig, or die trying. + private EGLContext createEglContext(@Nullable EGLContext sharedContext, EGLDisplay eglDisplay, + EGLConfig eglConfig, int openGlesVersion) { + if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) { + throw new RuntimeException("Invalid sharedContext"); + } + int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE}; + EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext; + final EGLContext eglContext; + synchronized (EglBase.lock) { + eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes); + } + if (eglContext == EGL10.EGL_NO_CONTEXT) { + throw new GLException(egl.eglGetError(), + "Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError())); + } + return eglContext; + } + + private static native long nativeGetCurrentNativeEGLContext(); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java new file mode 100644 index 0000000000..caf45b091e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/EglBase14Impl.java @@ -0,0 +1,271 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLExt; +import android.opengl.EGLSurface; +import android.opengl.GLException; +import android.os.Build; +import android.view.Surface; +import androidx.annotation.Nullable; + +/** + * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay, + * and an EGLSurface. + */ +@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants. +class EglBase14Impl implements EglBase14 { + private static final String TAG = "EglBase14Impl"; + private EGLContext eglContext; + @Nullable private EGLConfig eglConfig; + private EGLDisplay eglDisplay; + private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE; + + public static class Context implements EglBase14.Context { + private final EGLContext egl14Context; + + @Override + public EGLContext getRawContext() { + return egl14Context; + } + + @Override + public long getNativeEglContext() { + return egl14Context.getNativeHandle(); + } + + public Context(android.opengl.EGLContext eglContext) { + this.egl14Context = eglContext; + } + } + + // Create a new context with the specified config type, sharing data with sharedContext. + // `sharedContext` may be null. + public EglBase14Impl(EGLContext sharedContext, int[] configAttributes) { + eglDisplay = getEglDisplay(); + eglConfig = getEglConfig(eglDisplay, configAttributes); + final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes); + Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion); + eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion); + } + + // Create EGLSurface from the Android Surface. + @Override + public void createSurface(Surface surface) { + createSurfaceInternal(surface); + } + + // Create EGLSurface from the Android SurfaceTexture. + @Override + public void createSurface(SurfaceTexture surfaceTexture) { + createSurfaceInternal(surfaceTexture); + } + + // Create EGLSurface from either Surface or SurfaceTexture. + private void createSurfaceInternal(Object surface) { + if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) { + throw new IllegalStateException("Input must be either a Surface or SurfaceTexture"); + } + checkIsNotReleased(); + if (eglSurface != EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL14.EGL_NONE}; + eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new GLException(EGL14.eglGetError(), + "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + } + + @Override + public void createDummyPbufferSurface() { + createPbufferSurface(1, 1); + } + + @Override + public void createPbufferSurface(int width, int height) { + checkIsNotReleased(); + if (eglSurface != EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE}; + eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new GLException(EGL14.eglGetError(), + "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x" + + Integer.toHexString(EGL14.eglGetError())); + } + } + + @Override + public Context getEglBaseContext() { + return new Context(eglContext); + } + + @Override + public boolean hasSurface() { + return eglSurface != EGL14.EGL_NO_SURFACE; + } + + @Override + public int surfaceWidth() { + final int widthArray[] = new int[1]; + EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0); + return widthArray[0]; + } + + @Override + public int surfaceHeight() { + final int heightArray[] = new int[1]; + EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0); + return heightArray[0]; + } + + @Override + public void releaseSurface() { + if (eglSurface != EGL14.EGL_NO_SURFACE) { + EGL14.eglDestroySurface(eglDisplay, eglSurface); + eglSurface = EGL14.EGL_NO_SURFACE; + } + } + + private void checkIsNotReleased() { + if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT + || eglConfig == null) { + throw new RuntimeException("This object has been released"); + } + } + + @Override + public void release() { + checkIsNotReleased(); + releaseSurface(); + detachCurrent(); + synchronized (EglBase.lock) { + EGL14.eglDestroyContext(eglDisplay, eglContext); + } + EGL14.eglReleaseThread(); + EGL14.eglTerminate(eglDisplay); + eglContext = EGL14.EGL_NO_CONTEXT; + eglDisplay = EGL14.EGL_NO_DISPLAY; + eglConfig = null; + } + + @Override + public void makeCurrent() { + checkIsNotReleased(); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't make current"); + } + synchronized (EglBase.lock) { + if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + throw new GLException(EGL14.eglGetError(), + "eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + } + } + + // Detach the current EGL context, so that it can be made current on another thread. + @Override + public void detachCurrent() { + synchronized (EglBase.lock) { + if (!EGL14.eglMakeCurrent( + eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) { + throw new GLException(EGL14.eglGetError(), + "eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + } + } + + @Override + public void swapBuffers() { + checkIsNotReleased(); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't swap buffers"); + } + synchronized (EglBase.lock) { + EGL14.eglSwapBuffers(eglDisplay, eglSurface); + } + } + + @Override + public void swapBuffers(long timeStampNs) { + checkIsNotReleased(); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't swap buffers"); + } + synchronized (EglBase.lock) { + // See + // https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt + EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs); + EGL14.eglSwapBuffers(eglDisplay, eglSurface); + } + } + + // Return an EGLDisplay, or die trying. + private static EGLDisplay getEglDisplay() { + EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + if (eglDisplay == EGL14.EGL_NO_DISPLAY) { + throw new GLException(EGL14.eglGetError(), + "Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + int[] version = new int[2]; + if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) { + throw new GLException(EGL14.eglGetError(), + "Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + return eglDisplay; + } + + // Return an EGLConfig, or die trying. + private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) { + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!EGL14.eglChooseConfig( + eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) { + throw new GLException(EGL14.eglGetError(), + "eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + if (numConfigs[0] <= 0) { + throw new RuntimeException("Unable to find any matching EGL config"); + } + final EGLConfig eglConfig = configs[0]; + if (eglConfig == null) { + throw new RuntimeException("eglChooseConfig returned null"); + } + return eglConfig; + } + + // Return an EGLConfig, or die trying. + private static EGLContext createEglContext(@Nullable EGLContext sharedContext, + EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) { + if (sharedContext != null && sharedContext == EGL14.EGL_NO_CONTEXT) { + throw new RuntimeException("Invalid sharedContext"); + } + int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL14.EGL_NONE}; + EGLContext rootContext = sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext; + final EGLContext eglContext; + synchronized (EglBase.lock) { + eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0); + } + if (eglContext == EGL14.EGL_NO_CONTEXT) { + throw new GLException(EGL14.eglGetError(), + "Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + return eglContext; + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java new file mode 100644 index 0000000000..fe9481e182 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Empty.java @@ -0,0 +1,17 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Empty class for use in libjingle_peerconnection_java because all targets require at least one + * Java file. + */ +class Empty {} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java new file mode 100644 index 0000000000..e28b7b5a26 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/FramerateBitrateAdjuster.java @@ -0,0 +1,26 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with + * hardware codecs that assume the framerate never changes. + */ +class FramerateBitrateAdjuster extends BaseBitrateAdjuster { + private static final int DEFAULT_FRAMERATE_FPS = 30; + + @Override + public void setTargets(int targetBitrateBps, double targetFramerateFps) { + // Keep frame rate unchanged and adjust bit rate. + this.targetFramerateFps = DEFAULT_FRAMERATE_FPS; + this.targetBitrateBps = (int) (targetBitrateBps * DEFAULT_FRAMERATE_FPS / targetFramerateFps); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java new file mode 100644 index 0000000000..34144e2f75 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/GlGenericDrawer.java @@ -0,0 +1,281 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import androidx.annotation.Nullable; +import java.nio.FloatBuffer; + +/** + * Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input + * sources (OES, RGB, or YUV) using a generic fragment shader as input. The generic fragment shader + * should sample pixel values from the function "sample" that will be provided by this class and + * provides an abstraction for the input source type (OES, RGB, or YUV). The texture coordinate + * variable name will be "tc" and the texture matrix in the vertex shader will be "tex_mat". The + * simplest possible generic shader that just draws pixel from the frame unmodified looks like: + * void main() { + * gl_FragColor = sample(tc); + * } + * This class covers the cases for most simple shaders and generates the necessary boiler plate. + * Advanced shaders can always implement RendererCommon.GlDrawer directly. + */ +class GlGenericDrawer implements RendererCommon.GlDrawer { + /** + * The different shader types representing different input sources. YUV here represents three + * separate Y, U, V textures. + */ + public static enum ShaderType { OES, RGB, YUV } + + /** + * The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set + * uniform variables in the shader before a frame is drawn. + */ + public static interface ShaderCallbacks { + /** + * This callback is called when a new shader has been compiled and created. It will be called + * for the first frame as well as when the shader type is changed. This callback can be used to + * do custom initialization of the shader that only needs to happen once. + */ + void onNewShader(GlShader shader); + + /** + * This callback is called before rendering a frame. It can be used to do custom preparation of + * the shader that needs to happen every frame. + */ + void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight, + int viewportWidth, int viewportHeight); + } + + private static final String INPUT_VERTEX_COORDINATE_NAME = "in_pos"; + private static final String INPUT_TEXTURE_COORDINATE_NAME = "in_tc"; + private static final String TEXTURE_MATRIX_NAME = "tex_mat"; + private static final String DEFAULT_VERTEX_SHADER_STRING = "varying vec2 tc;\n" + + "attribute vec4 in_pos;\n" + + "attribute vec4 in_tc;\n" + + "uniform mat4 tex_mat;\n" + + "void main() {\n" + + " gl_Position = in_pos;\n" + + " tc = (tex_mat * in_tc).xy;\n" + + "}\n"; + + // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) + // is top-right. + private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[] { + -1.0f, -1.0f, // Bottom left. + 1.0f, -1.0f, // Bottom right. + -1.0f, 1.0f, // Top left. + 1.0f, 1.0f, // Top right. + }); + + // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right. + private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER = + GlUtil.createFloatBuffer(new float[] { + 0.0f, 0.0f, // Bottom left. + 1.0f, 0.0f, // Bottom right. + 0.0f, 1.0f, // Top left. + 1.0f, 1.0f, // Top right. + }); + + static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) { + final StringBuilder stringBuilder = new StringBuilder(); + if (shaderType == ShaderType.OES) { + stringBuilder.append("#extension GL_OES_EGL_image_external : require\n"); + } + stringBuilder.append("precision mediump float;\n"); + stringBuilder.append("varying vec2 tc;\n"); + + if (shaderType == ShaderType.YUV) { + stringBuilder.append("uniform sampler2D y_tex;\n"); + stringBuilder.append("uniform sampler2D u_tex;\n"); + stringBuilder.append("uniform sampler2D v_tex;\n"); + + // Add separate function for sampling texture. + // yuv_to_rgb_mat is inverse of the matrix defined in YuvConverter. + stringBuilder.append("vec4 sample(vec2 p) {\n"); + stringBuilder.append(" float y = texture2D(y_tex, p).r * 1.16438;\n"); + stringBuilder.append(" float u = texture2D(u_tex, p).r;\n"); + stringBuilder.append(" float v = texture2D(v_tex, p).r;\n"); + stringBuilder.append(" return vec4(y + 1.59603 * v - 0.874202,\n"); + stringBuilder.append(" y - 0.391762 * u - 0.812968 * v + 0.531668,\n"); + stringBuilder.append(" y + 2.01723 * u - 1.08563, 1);\n"); + stringBuilder.append("}\n"); + stringBuilder.append(genericFragmentSource); + } else { + final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D"; + stringBuilder.append("uniform ").append(samplerName).append(" tex;\n"); + + // Update the sampling function in-place. + stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, ")); + } + + return stringBuilder.toString(); + } + + private final String genericFragmentSource; + private final String vertexShader; + private final ShaderCallbacks shaderCallbacks; + @Nullable private ShaderType currentShaderType; + @Nullable private GlShader currentShader; + private int inPosLocation; + private int inTcLocation; + private int texMatrixLocation; + + public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) { + this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks); + } + + public GlGenericDrawer( + String vertexShader, String genericFragmentSource, ShaderCallbacks shaderCallbacks) { + this.vertexShader = vertexShader; + this.genericFragmentSource = genericFragmentSource; + this.shaderCallbacks = shaderCallbacks; + } + + // Visible for testing. + GlShader createShader(ShaderType shaderType) { + return new GlShader( + vertexShader, createFragmentShaderString(genericFragmentSource, shaderType)); + } + + /** + * Draw an OES texture frame with specified texture transformation matrix. Required resources are + * allocated at the first call to this function. + */ + @Override + public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight) { + prepareShader( + ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + // Bind the texture. + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId); + // Draw the texture. + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + // Unbind the texture as a precaution. + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); + } + + /** + * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources + * are allocated at the first call to this function. + */ + @Override + public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight) { + prepareShader( + ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + // Bind the texture. + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); + // Draw the texture. + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + // Unbind the texture as a precaution. + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + } + + /** + * Draw a YUV frame with specified texture transformation matrix. Required resources are allocated + * at the first call to this function. + */ + @Override + public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight) { + prepareShader( + ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + // Bind the textures. + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); + } + // Draw the textures. + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + // Unbind the textures as a precaution. + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + } + } + + private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth, + int frameHeight, int viewportWidth, int viewportHeight) { + final GlShader shader; + if (shaderType.equals(currentShaderType)) { + // Same shader type as before, reuse exising shader. + shader = currentShader; + } else { + // Allocate new shader. + currentShaderType = null; + if (currentShader != null) { + currentShader.release(); + currentShader = null; + } + + shader = createShader(shaderType); + currentShaderType = shaderType; + currentShader = shader; + + shader.useProgram(); + // Set input texture units. + if (shaderType == ShaderType.YUV) { + GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0); + GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1); + GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2); + } else { + GLES20.glUniform1i(shader.getUniformLocation("tex"), 0); + } + + GlUtil.checkNoGLES2Error("Create shader"); + shaderCallbacks.onNewShader(shader); + texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME); + inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME); + inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME); + } + + shader.useProgram(); + + // Upload the vertex coordinates. + GLES20.glEnableVertexAttribArray(inPosLocation); + GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2, + /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, + FULL_RECTANGLE_BUFFER); + + // Upload the texture coordinates. + GLES20.glEnableVertexAttribArray(inTcLocation); + GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2, + /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, + FULL_RECTANGLE_TEXTURE_BUFFER); + + // Upload the texture transformation matrix. + GLES20.glUniformMatrix4fv( + texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */); + + // Do custom per-frame shader preparation. + shaderCallbacks.onPrepareShader( + shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + GlUtil.checkNoGLES2Error("Prepare shader"); + } + + /** + * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked. + */ + @Override + public void release() { + if (currentShader != null) { + currentShader.release(); + currentShader = null; + currentShaderType = null; + } + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java new file mode 100644 index 0000000000..abb79c6582 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/H264Utils.java @@ -0,0 +1,52 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.Map; +import java.util.HashMap; + +/** Container for static helper functions related to dealing with H264 codecs. */ +class H264Utils { + public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id"; + public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed"; + public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode"; + + public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0"; + public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c"; + public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex. + public static final String H264_CONSTRAINED_HIGH_3_1 = + H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1; + public static final String H264_CONSTRAINED_BASELINE_3_1 = + H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1; + + public static Map<String, String> getDefaultH264Params(boolean isHighProfile) { + final Map<String, String> params = new HashMap<>(); + params.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1"); + params.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1"); + params.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, + isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1 + : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1); + return params; + } + + public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC = + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false)); + public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC = + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true)); + + public static boolean isSameH264Profile( + Map<String, String> params1, Map<String, String> params2) { + return nativeIsSameH264Profile(params1, params2); + } + + private static native boolean nativeIsSameH264Profile( + Map<String, String> params1, Map<String, String> params2); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java new file mode 100644 index 0000000000..42a3ccfbfd --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java @@ -0,0 +1,763 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.opengl.GLES20; +import android.os.Build; +import android.os.Bundle; +import android.view.Surface; +import androidx.annotation.Nullable; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.concurrent.BlockingDeque; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import org.webrtc.ThreadUtils.ThreadChecker; + +/** + * Android hardware video encoder. + */ +class HardwareVideoEncoder implements VideoEncoder { + private static final String TAG = "HardwareVideoEncoder"; + + // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined + // in OMX_Video.h + private static final int VIDEO_ControlRateConstant = 2; + // Key associated with the bitrate control mode value (above). Not present as a MediaFormat + // constant until API level 21. + private static final String KEY_BITRATE_MODE = "bitrate-mode"; + + private static final int VIDEO_AVC_PROFILE_HIGH = 8; + private static final int VIDEO_AVC_LEVEL_3 = 0x100; + + private static final int MAX_VIDEO_FRAMERATE = 30; + + // See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc. + private static final int MAX_ENCODER_Q_SIZE = 2; + + private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; + private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; + + // Size of the input frames should be multiple of 16 for the H/W encoder. + private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16; + + /** + * Keeps track of the number of output buffers that have been passed down the pipeline and not yet + * released. We need to wait for this to go down to zero before operations invalidating the output + * buffers, i.e., stop() and getOutputBuffer(). + */ + private static class BusyCount { + private final Object countLock = new Object(); + private int count; + + public void increment() { + synchronized (countLock) { + count++; + } + } + + // This method may be called on an arbitrary thread. + public void decrement() { + synchronized (countLock) { + count--; + if (count == 0) { + countLock.notifyAll(); + } + } + } + + // The increment and waitForZero methods are called on the same thread (deliverEncodedImage, + // running on the output thread). Hence, after waitForZero returns, the count will stay zero + // until the same thread calls increment. + public void waitForZero() { + boolean wasInterrupted = false; + synchronized (countLock) { + while (count > 0) { + try { + countLock.wait(); + } catch (InterruptedException e) { + Logging.e(TAG, "Interrupted while waiting on busy count", e); + wasInterrupted = true; + } + } + } + + if (wasInterrupted) { + Thread.currentThread().interrupt(); + } + } + } + // --- Initialized on construction. + private final MediaCodecWrapperFactory mediaCodecWrapperFactory; + private final String codecName; + private final VideoCodecMimeType codecType; + private final Integer surfaceColorFormat; + private final Integer yuvColorFormat; + private final YuvFormat yuvFormat; + private final Map<String, String> params; + private final int keyFrameIntervalSec; // Base interval for generating key frames. + // Interval at which to force a key frame. Used to reduce color distortions caused by some + // Qualcomm video encoders. + private final long forcedKeyFrameNs; + private final BitrateAdjuster bitrateAdjuster; + // EGL context shared with the application. Used to access texture inputs. + private final EglBase14.Context sharedContext; + + // Drawer used to draw input textures onto the codec's input surface. + private final GlRectDrawer textureDrawer = new GlRectDrawer(); + private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer(); + // A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are + // pre-populated with all the information that can't be sent through MediaCodec. + private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>(); + + private final ThreadChecker encodeThreadChecker = new ThreadChecker(); + private final ThreadChecker outputThreadChecker = new ThreadChecker(); + private final BusyCount outputBuffersBusyCount = new BusyCount(); + + // --- Set on initialize and immutable until release. + private Callback callback; + private boolean automaticResizeOn; + + // --- Valid and immutable while an encoding session is running. + @Nullable private MediaCodecWrapper codec; + // Thread that delivers encoded frames to the user callback. + @Nullable private Thread outputThread; + + // EGL base wrapping the shared texture context. Holds hooks to both the shared context and the + // input surface. Making this base current allows textures from the context to be drawn onto the + // surface. + @Nullable private EglBase14 textureEglBase; + // Input surface for the codec. The encoder will draw input textures onto this surface. + @Nullable private Surface textureInputSurface; + + private int width; + private int height; + // Y-plane strides in the encoder's input + private int stride; + // Y-plane slice-height in the encoder's input + private int sliceHeight; + private boolean useSurfaceMode; + + // --- Only accessed from the encoding thread. + // Presentation timestamp of next frame to encode. + private long nextPresentationTimestampUs; + // Presentation timestamp of the last requested (or forced) key frame. + private long lastKeyFrameNs; + + // --- Only accessed on the output thread. + // Contents of the last observed config frame output by the MediaCodec. Used by H.264. + @Nullable private ByteBuffer configBuffer; + private int adjustedBitrate; + + // Whether the encoder is running. Volatile so that the output thread can watch this value and + // exit when the encoder stops. + private volatile boolean running; + // Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this + // value to send exceptions thrown during release back to the encoder thread. + @Nullable private volatile Exception shutdownException; + + /** + * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame + * intervals, and bitrateAdjuster. + * + * @param codecName the hardware codec implementation to use + * @param codecType the type of the given video codec (eg. VP8, VP9, H264 or AV1) + * @param surfaceColorFormat color format for surface mode or null if not available + * @param yuvColorFormat color format for bytebuffer mode + * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec + * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested; + * used to reduce distortion caused by some codec implementations + * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the + * desired bitrates + * @throws IllegalArgumentException if colorFormat is unsupported + */ + public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, + VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, + Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, + BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) { + this.mediaCodecWrapperFactory = mediaCodecWrapperFactory; + this.codecName = codecName; + this.codecType = codecType; + this.surfaceColorFormat = surfaceColorFormat; + this.yuvColorFormat = yuvColorFormat; + this.yuvFormat = YuvFormat.valueOf(yuvColorFormat); + this.params = params; + this.keyFrameIntervalSec = keyFrameIntervalSec; + this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs); + this.bitrateAdjuster = bitrateAdjuster; + this.sharedContext = sharedContext; + + // Allow construction on a different thread. + encodeThreadChecker.detachThread(); + } + + @Override + public VideoCodecStatus initEncode(Settings settings, Callback callback) { + encodeThreadChecker.checkIsOnValidThread(); + + this.callback = callback; + automaticResizeOn = settings.automaticResizeOn; + + if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned."); + return VideoCodecStatus.ERR_SIZE; + } + this.width = settings.width; + this.height = settings.height; + useSurfaceMode = canUseSurface(); + + if (settings.startBitrate != 0 && settings.maxFramerate != 0) { + bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate); + } + adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); + + Logging.d(TAG, + "initEncode: " + width + " x " + height + ". @ " + settings.startBitrate + + "kbps. Fps: " + settings.maxFramerate + " Use surface mode: " + useSurfaceMode); + return initEncodeInternal(); + } + + private VideoCodecStatus initEncodeInternal() { + encodeThreadChecker.checkIsOnValidThread(); + + nextPresentationTimestampUs = 0; + lastKeyFrameNs = -1; + + try { + codec = mediaCodecWrapperFactory.createByCodecName(codecName); + } catch (IOException | IllegalArgumentException e) { + Logging.e(TAG, "Cannot create media encoder " + codecName); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + + final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat; + try { + MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height); + format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate); + format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant); + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + format.setFloat( + MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps()); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); + if (codecType == VideoCodecMimeType.H264) { + String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID); + if (profileLevelId == null) { + profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1; + } + switch (profileLevelId) { + case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1: + format.setInteger("profile", VIDEO_AVC_PROFILE_HIGH); + format.setInteger("level", VIDEO_AVC_LEVEL_3); + break; + case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1: + break; + default: + Logging.w(TAG, "Unknown profile level id: " + profileLevelId); + } + } + Logging.d(TAG, "Format: " + format); + codec.configure( + format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE); + + if (useSurfaceMode) { + textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE); + textureInputSurface = codec.createInputSurface(); + textureEglBase.createSurface(textureInputSurface); + textureEglBase.makeCurrent(); + } + + MediaFormat inputFormat = codec.getInputFormat(); + stride = getStride(inputFormat, width); + sliceHeight = getSliceHeight(inputFormat, height); + + codec.start(); + } catch (IllegalStateException e) { + Logging.e(TAG, "initEncodeInternal failed", e); + release(); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + + running = true; + outputThreadChecker.detachThread(); + outputThread = createOutputThread(); + outputThread.start(); + + return VideoCodecStatus.OK; + } + + @Override + public VideoCodecStatus release() { + encodeThreadChecker.checkIsOnValidThread(); + + final VideoCodecStatus returnValue; + if (outputThread == null) { + returnValue = VideoCodecStatus.OK; + } else { + // The outputThread actually stops and releases the codec once running is false. + running = false; + if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) { + Logging.e(TAG, "Media encoder release timeout"); + returnValue = VideoCodecStatus.TIMEOUT; + } else if (shutdownException != null) { + // Log the exception and turn it into an error. + Logging.e(TAG, "Media encoder release exception", shutdownException); + returnValue = VideoCodecStatus.ERROR; + } else { + returnValue = VideoCodecStatus.OK; + } + } + + textureDrawer.release(); + videoFrameDrawer.release(); + if (textureEglBase != null) { + textureEglBase.release(); + textureEglBase = null; + } + if (textureInputSurface != null) { + textureInputSurface.release(); + textureInputSurface = null; + } + outputBuilders.clear(); + + codec = null; + outputThread = null; + + // Allow changing thread after release. + encodeThreadChecker.detachThread(); + + return returnValue; + } + + @Override + public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { + encodeThreadChecker.checkIsOnValidThread(); + if (codec == null) { + return VideoCodecStatus.UNINITIALIZED; + } + + final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer(); + final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer; + + // If input resolution changed, restart the codec with the new resolution. + final int frameWidth = videoFrame.getBuffer().getWidth(); + final int frameHeight = videoFrame.getBuffer().getHeight(); + final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer; + if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) { + VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode); + if (status != VideoCodecStatus.OK) { + return status; + } + } + + if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) { + // Too many frames in the encoder. Drop this frame. + Logging.e(TAG, "Dropped frame, encoder queue full"); + return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887. + } + + boolean requestedKeyFrame = false; + for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) { + if (frameType == EncodedImage.FrameType.VideoFrameKey) { + requestedKeyFrame = true; + } + } + + if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) { + requestKeyFrame(videoFrame.getTimestampNs()); + } + + // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are + // subsampled at one byte per four pixels. + int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2; + EncodedImage.Builder builder = EncodedImage.builder() + .setCaptureTimeNs(videoFrame.getTimestampNs()) + .setEncodedWidth(videoFrame.getBuffer().getWidth()) + .setEncodedHeight(videoFrame.getBuffer().getHeight()) + .setRotation(videoFrame.getRotation()); + outputBuilders.offer(builder); + + long presentationTimestampUs = nextPresentationTimestampUs; + // Round frame duration down to avoid bitrate overshoot. + long frameDurationUs = + (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps()); + nextPresentationTimestampUs += frameDurationUs; + + final VideoCodecStatus returnValue; + if (useSurfaceMode) { + returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs); + } else { + returnValue = + encodeByteBuffer(videoFrame, presentationTimestampUs, videoFrameBuffer, bufferSize); + } + + // Check if the queue was successful. + if (returnValue != VideoCodecStatus.OK) { + // Keep the output builders in sync with buffers in the codec. + outputBuilders.pollLast(); + } + + return returnValue; + } + + private VideoCodecStatus encodeTextureBuffer( + VideoFrame videoFrame, long presentationTimestampUs) { + encodeThreadChecker.checkIsOnValidThread(); + try { + // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway, + // but it's a workaround for bug webrtc:5147. + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + // It is not necessary to release this frame because it doesn't own the buffer. + VideoFrame derotatedFrame = + new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs()); + videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */); + textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)); + } catch (RuntimeException e) { + Logging.e(TAG, "encodeTexture failed", e); + return VideoCodecStatus.ERROR; + } + return VideoCodecStatus.OK; + } + + private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs, + VideoFrame.Buffer videoFrameBuffer, int bufferSize) { + encodeThreadChecker.checkIsOnValidThread(); + // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind. + int index; + try { + index = codec.dequeueInputBuffer(0 /* timeout */); + } catch (IllegalStateException e) { + Logging.e(TAG, "dequeueInputBuffer failed", e); + return VideoCodecStatus.ERROR; + } + + if (index == -1) { + // Encoder is falling behind. No input buffers available. Drop the frame. + Logging.d(TAG, "Dropped frame, no input buffers available"); + return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887. + } + + ByteBuffer buffer; + try { + buffer = codec.getInputBuffer(index); + } catch (IllegalStateException e) { + Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e); + return VideoCodecStatus.ERROR; + } + fillInputBuffer(buffer, videoFrameBuffer); + + try { + codec.queueInputBuffer( + index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */); + } catch (IllegalStateException e) { + Logging.e(TAG, "queueInputBuffer failed", e); + // IllegalStateException thrown when the codec is in the wrong state. + return VideoCodecStatus.ERROR; + } + return VideoCodecStatus.OK; + } + + @Override + public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) { + encodeThreadChecker.checkIsOnValidThread(); + if (framerate > MAX_VIDEO_FRAMERATE) { + framerate = MAX_VIDEO_FRAMERATE; + } + bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate); + return VideoCodecStatus.OK; + } + + @Override + public VideoCodecStatus setRates(RateControlParameters rcParameters) { + encodeThreadChecker.checkIsOnValidThread(); + bitrateAdjuster.setTargets(rcParameters.bitrate.getSum(), rcParameters.framerateFps); + return VideoCodecStatus.OK; + } + + @Override + public ScalingSettings getScalingSettings() { + encodeThreadChecker.checkIsOnValidThread(); + if (automaticResizeOn) { + if (codecType == VideoCodecMimeType.VP8) { + final int kLowVp8QpThreshold = 29; + final int kHighVp8QpThreshold = 95; + return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold); + } else if (codecType == VideoCodecMimeType.H264) { + final int kLowH264QpThreshold = 24; + final int kHighH264QpThreshold = 37; + return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold); + } + } + return ScalingSettings.OFF; + } + + @Override + public String getImplementationName() { + return codecName; + } + + @Override + public EncoderInfo getEncoderInfo() { + // Since our MediaCodec is guaranteed to encode 16-pixel-aligned frames only, we set alignment + // value to be 16. Additionally, this encoder produces a single stream. So it should not require + // alignment for all layers. + return new EncoderInfo( + /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT, + /* applyAlignmentToAllSimulcastLayers= */ false); + } + + private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) { + encodeThreadChecker.checkIsOnValidThread(); + VideoCodecStatus status = release(); + if (status != VideoCodecStatus.OK) { + return status; + } + + if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned."); + return VideoCodecStatus.ERR_SIZE; + } + width = newWidth; + height = newHeight; + useSurfaceMode = newUseSurfaceMode; + return initEncodeInternal(); + } + + private boolean shouldForceKeyFrame(long presentationTimestampNs) { + encodeThreadChecker.checkIsOnValidThread(); + return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs; + } + + private void requestKeyFrame(long presentationTimestampNs) { + encodeThreadChecker.checkIsOnValidThread(); + // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could + // indicate this in queueInputBuffer() below and guarantee _this_ frame + // be encoded as a key frame, but sadly that flag is ignored. Instead, + // we request a key frame "soon". + try { + Bundle b = new Bundle(); + b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); + codec.setParameters(b); + } catch (IllegalStateException e) { + Logging.e(TAG, "requestKeyFrame failed", e); + return; + } + lastKeyFrameNs = presentationTimestampNs; + } + + private Thread createOutputThread() { + return new Thread() { + @Override + public void run() { + while (running) { + deliverEncodedImage(); + } + releaseCodecOnOutputThread(); + } + }; + } + + // Visible for testing. + protected void deliverEncodedImage() { + outputThreadChecker.checkIsOnValidThread(); + try { + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US); + if (index < 0) { + if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + outputBuffersBusyCount.waitForZero(); + } + return; + } + + ByteBuffer codecOutputBuffer = codec.getOutputBuffer(index); + codecOutputBuffer.position(info.offset); + codecOutputBuffer.limit(info.offset + info.size); + + if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size); + configBuffer = ByteBuffer.allocateDirect(info.size); + configBuffer.put(codecOutputBuffer); + } else { + bitrateAdjuster.reportEncodedFrame(info.size); + if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) { + updateBitrate(); + } + + final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0; + if (isKeyFrame) { + Logging.d(TAG, "Sync frame generated"); + } + + final ByteBuffer frameBuffer; + if (isKeyFrame && codecType == VideoCodecMimeType.H264) { + Logging.d(TAG, + "Prepending config frame of size " + configBuffer.capacity() + + " to output buffer with offset " + info.offset + ", size " + info.size); + // For H.264 key frame prepend SPS and PPS NALs at the start. + frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity()); + configBuffer.rewind(); + frameBuffer.put(configBuffer); + frameBuffer.put(codecOutputBuffer); + frameBuffer.rewind(); + } else { + frameBuffer = codecOutputBuffer.slice(); + } + + final EncodedImage.FrameType frameType = isKeyFrame + ? EncodedImage.FrameType.VideoFrameKey + : EncodedImage.FrameType.VideoFrameDelta; + + outputBuffersBusyCount.increment(); + EncodedImage.Builder builder = outputBuilders.poll(); + EncodedImage encodedImage = builder + .setBuffer(frameBuffer, + () -> { + // This callback should not throw any exceptions since + // it may be called on an arbitrary thread. + // Check bug webrtc:11230 for more details. + try { + codec.releaseOutputBuffer(index, false); + } catch (Exception e) { + Logging.e(TAG, "releaseOutputBuffer failed", e); + } + outputBuffersBusyCount.decrement(); + }) + .setFrameType(frameType) + .createEncodedImage(); + // TODO(mellem): Set codec-specific info. + callback.onEncodedFrame(encodedImage, new CodecSpecificInfo()); + // Note that the callback may have retained the image. + encodedImage.release(); + } + } catch (IllegalStateException e) { + Logging.e(TAG, "deliverOutput failed", e); + } + } + + private void releaseCodecOnOutputThread() { + outputThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, "Releasing MediaCodec on output thread"); + outputBuffersBusyCount.waitForZero(); + try { + codec.stop(); + } catch (Exception e) { + Logging.e(TAG, "Media encoder stop failed", e); + } + try { + codec.release(); + } catch (Exception e) { + Logging.e(TAG, "Media encoder release failed", e); + // Propagate exceptions caught during release back to the main thread. + shutdownException = e; + } + configBuffer = null; + Logging.d(TAG, "Release on output thread done"); + } + + private VideoCodecStatus updateBitrate() { + outputThreadChecker.checkIsOnValidThread(); + adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); + try { + Bundle params = new Bundle(); + params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate); + codec.setParameters(params); + return VideoCodecStatus.OK; + } catch (IllegalStateException e) { + Logging.e(TAG, "updateBitrate failed", e); + return VideoCodecStatus.ERROR; + } + } + + private boolean canUseSurface() { + return sharedContext != null && surfaceColorFormat != null; + } + + private static int getStride(MediaFormat inputFormat, int width) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null + && inputFormat.containsKey(MediaFormat.KEY_STRIDE)) { + return inputFormat.getInteger(MediaFormat.KEY_STRIDE); + } + return width; + } + + private static int getSliceHeight(MediaFormat inputFormat, int height) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null + && inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) { + return inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT); + } + return height; + } + + // Visible for testing. + protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer videoFrameBuffer) { + yuvFormat.fillBuffer(buffer, videoFrameBuffer, stride, sliceHeight); + } + + /** + * Enumeration of supported YUV color formats used for MediaCodec's input. + */ + private enum YuvFormat { + I420 { + @Override + void fillBuffer( + ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) { + /* + * According to the docs in Android MediaCodec, the stride of the U and V planes can be + * calculated based on the color format, though it is generally undefined and depends on the + * device and release. + * <p/> Assuming the width and height, dstStrideY and dstSliceHeightY are + * even, it works fine when we define the stride and slice-height of the dst U/V plane to be + * half of the dst Y plane. + */ + int dstStrideU = dstStrideY / 2; + int dstSliceHeight = dstSliceHeightY / 2; + VideoFrame.I420Buffer i420 = srcBuffer.toI420(); + YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), + i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(), + dstStrideY, dstSliceHeightY, dstStrideU, dstSliceHeight); + i420.release(); + } + }, + NV12 { + @Override + void fillBuffer( + ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) { + VideoFrame.I420Buffer i420 = srcBuffer.toI420(); + YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), + i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(), + dstStrideY, dstSliceHeightY); + i420.release(); + } + }; + + abstract void fillBuffer( + ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY); + + static YuvFormat valueOf(int colorFormat) { + switch (colorFormat) { + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: + return I420; + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar: + case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: + return NV12; + default: + throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat); + } + } + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java new file mode 100644 index 0000000000..c1d2d61a71 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/Histogram.java @@ -0,0 +1,39 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Class for holding the native pointer of a histogram. Since there is no way to destroy a + * histogram, please don't create unnecessary instances of this object. This class is thread safe. + * + * Usage example: + * private static final Histogram someMetricHistogram = + * Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50); + * someMetricHistogram.addSample(someVariable); + */ +class Histogram { + private final long handle; + + private Histogram(long handle) { + this.handle = handle; + } + + static public Histogram createCounts(String name, int min, int max, int bucketCount) { + return new Histogram(0); + } + + static public Histogram createEnumeration(String name, int max) { + return new Histogram(0); + } + + public void addSample(int sample) { + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java new file mode 100644 index 0000000000..f391db61a1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JNILogging.java @@ -0,0 +1,28 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.CalledByNative; +import org.webrtc.Loggable; +import org.webrtc.Logging.Severity; + +class JNILogging { + private final Loggable loggable; + + public JNILogging(Loggable loggable) { + this.loggable = loggable; + } + + @CalledByNative + public void logToInjectable(String message, Integer severity, String tag) { + loggable.onLogMessage(message, Severity.values()[severity], tag); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java new file mode 100644 index 0000000000..e1b2e513d7 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/JniCommon.java @@ -0,0 +1,23 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** Class with static JNI helper functions that are used in many places. */ +public class JniCommon { + /** Functions to increment/decrement an rtc::RefCountInterface pointer. */ + public static native void nativeAddRef(long refCountedPointer); + public static native void nativeReleaseRef(long refCountedPointer); + + public static native ByteBuffer nativeAllocateByteBuffer(int size); + public static native void nativeFreeByteBuffer(ByteBuffer buffer); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java new file mode 100644 index 0000000000..d5ccae9688 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecUtils.java @@ -0,0 +1,129 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.annotation.TargetApi; +import android.media.MediaCodecInfo; +import android.media.MediaCodecInfo.CodecCapabilities; +import android.os.Build; +import androidx.annotation.Nullable; +import java.util.HashMap; +import java.util.Map; + +/** Container class for static constants and helpers used with MediaCodec. */ +// We are forced to use the old API because we want to support API level < 21. +@SuppressWarnings("deprecation") +class MediaCodecUtils { + private static final String TAG = "MediaCodecUtils"; + + // Prefixes for supported hardware encoder/decoder component names. + static final String EXYNOS_PREFIX = "OMX.Exynos."; + static final String INTEL_PREFIX = "OMX.Intel."; + static final String NVIDIA_PREFIX = "OMX.Nvidia."; + static final String QCOM_PREFIX = "OMX.qcom."; + static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = { + "OMX.google.", "OMX.SEC.", "c2.android"}; + + // NV12 color format supported by QCOM codec, but not declared in MediaCodec - + // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h + static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01; + static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02; + static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03; + static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04; + + // Color formats supported by hardware decoder - in order of preference. + static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar, + CodecCapabilities.COLOR_FormatYUV420SemiPlanar, + CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka, + MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; + + // Color formats supported by hardware encoder - in order of preference. + static final int[] ENCODER_COLOR_FORMATS = { + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, + MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, + MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; + + // Color formats supported by texture mode encoding - in order of preference. + static final int[] TEXTURE_COLOR_FORMATS = + new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface}; + + static @Nullable Integer selectColorFormat( + int[] supportedColorFormats, CodecCapabilities capabilities) { + for (int supportedColorFormat : supportedColorFormats) { + for (int codecColorFormat : capabilities.colorFormats) { + if (codecColorFormat == supportedColorFormat) { + return codecColorFormat; + } + } + } + return null; + } + + static boolean codecSupportsType(MediaCodecInfo info, VideoCodecMimeType type) { + for (String mimeType : info.getSupportedTypes()) { + if (type.mimeType().equals(mimeType)) { + return true; + } + } + return false; + } + + static Map<String, String> getCodecProperties(VideoCodecMimeType type, boolean highProfile) { + switch (type) { + case VP8: + case VP9: + case AV1: + return new HashMap<String, String>(); + case H264: + return H264Utils.getDefaultH264Params(highProfile); + default: + throw new IllegalArgumentException("Unsupported codec: " + type); + } + } + + static boolean isHardwareAccelerated(MediaCodecInfo info) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + return isHardwareAcceleratedQOrHigher(info); + } + return !isSoftwareOnly(info); + } + + @TargetApi(29) + private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) { + return codecInfo.isHardwareAccelerated(); + } + + static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + return isSoftwareOnlyQOrHigher(codecInfo); + } + String name = codecInfo.getName(); + for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) { + if (name.startsWith(prefix)) { + return true; + } + } + return false; + } + + @TargetApi(29) + private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) { + return codecInfo.isSoftwareOnly(); + } + + private MediaCodecUtils() { + // This class should not be instantiated. + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java new file mode 100644 index 0000000000..bf591dda26 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -0,0 +1,139 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX; +import static org.webrtc.MediaCodecUtils.QCOM_PREFIX; + +import android.media.MediaCodecInfo; +import android.media.MediaCodecInfo.CodecCapabilities; +import android.media.MediaCodecList; +import android.os.Build; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.List; + +/** Factory for decoders backed by Android MediaCodec API. */ +@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods. +class MediaCodecVideoDecoderFactory implements VideoDecoderFactory { + private static final String TAG = "MediaCodecVideoDecoderFactory"; + + private final @Nullable EglBase.Context sharedContext; + private final @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate; + + /** + * MediaCodecVideoDecoderFactory with support of codecs filtering. + * + * @param sharedContext The textures generated will be accessible from this context. May be null, + * this disables texture support. + * @param codecAllowedPredicate optional predicate to test if codec allowed. All codecs are + * allowed when predicate is not provided. + */ + public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext, + @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) { + this.sharedContext = sharedContext; + this.codecAllowedPredicate = codecAllowedPredicate; + } + + @Nullable + @Override + public VideoDecoder createDecoder(VideoCodecInfo codecType) { + VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName()); + MediaCodecInfo info = findCodecForType(type); + + if (info == null) { + return null; + } + + CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType()); + return new AndroidVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type, + MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities), + sharedContext); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>(); + // Generate a list of supported codecs in order of preference: + // VP8, VP9, H264 (high profile), and H264 (baseline profile). + for (VideoCodecMimeType type : new VideoCodecMimeType[] {VideoCodecMimeType.VP8, + VideoCodecMimeType.VP9, VideoCodecMimeType.H264, VideoCodecMimeType.AV1}) { + MediaCodecInfo codec = findCodecForType(type); + if (codec != null) { + String name = type.name(); + if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { + supportedCodecInfos.add(new VideoCodecInfo( + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + } + + supportedCodecInfos.add(new VideoCodecInfo( + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + } + } + + return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } + + private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) { + for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { + MediaCodecInfo info = null; + try { + info = MediaCodecList.getCodecInfoAt(i); + } catch (IllegalArgumentException e) { + Logging.e(TAG, "Cannot retrieve decoder codec info", e); + } + + if (info == null || info.isEncoder()) { + continue; + } + + if (isSupportedCodec(info, type)) { + return info; + } + } + + return null; // No support for this type. + } + + // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type. + private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { + if (!MediaCodecUtils.codecSupportsType(info, type)) { + return false; + } + // Check for a supported color format. + if (MediaCodecUtils.selectColorFormat( + MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType())) + == null) { + return false; + } + return isCodecAllowed(info); + } + + private boolean isCodecAllowed(MediaCodecInfo info) { + if (codecAllowedPredicate == null) { + return true; + } + return codecAllowedPredicate.test(info); + } + + private boolean isH264HighProfileSupported(MediaCodecInfo info) { + String name = info.getName(); + // Support H.264 HP decoding on QCOM chips. + if (name.startsWith(QCOM_PREFIX)) { + return true; + } + // Support H.264 HP decoding on Exynos chips for Android M and above. + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && name.startsWith(EXYNOS_PREFIX)) { + return true; + } + return false; + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java new file mode 100644 index 0000000000..60c853df35 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapper.java @@ -0,0 +1,55 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodec; +import android.media.MediaCrypto; +import android.media.MediaFormat; +import android.os.Bundle; +import android.view.Surface; +import java.nio.ByteBuffer; + +/** + * Subset of methods defined in {@link android.media.MediaCodec} needed by + * {@link HardwareVideoEncoder} and {@link AndroidVideoDecoder}. This interface + * exists to allow mocking and using a fake implementation in tests. + */ +interface MediaCodecWrapper { + void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags); + + void start(); + + void flush(); + + void stop(); + + void release(); + + int dequeueInputBuffer(long timeoutUs); + + void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags); + + int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs); + + void releaseOutputBuffer(int index, boolean render); + + MediaFormat getInputFormat(); + + MediaFormat getOutputFormat(); + + ByteBuffer getInputBuffer(int index); + + ByteBuffer getOutputBuffer(int index); + + Surface createInputSurface(); + + void setParameters(Bundle params); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java new file mode 100644 index 0000000000..2962cb62a7 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactory.java @@ -0,0 +1,22 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.io.IOException; + +interface MediaCodecWrapperFactory { + /** + * Creates a new {@link MediaCodecWrapper} by codec name. + * + * <p>For additional information see {@link android.media.MediaCodec#createByCodecName}. + */ + MediaCodecWrapper createByCodecName(String name) throws IOException; +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java new file mode 100644 index 0000000000..2ba62ac7d6 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java @@ -0,0 +1,115 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodec; +import android.media.MediaCodec.BufferInfo; +import android.media.MediaCrypto; +import android.media.MediaFormat; +import android.os.Bundle; +import android.view.Surface; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Implementation of MediaCodecWrapperFactory that returns MediaCodecInterfaces wrapping + * {@link android.media.MediaCodec} objects. + */ +class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory { + private static class MediaCodecWrapperImpl implements MediaCodecWrapper { + private final MediaCodec mediaCodec; + + public MediaCodecWrapperImpl(MediaCodec mediaCodec) { + this.mediaCodec = mediaCodec; + } + + @Override + public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) { + mediaCodec.configure(format, surface, crypto, flags); + } + + @Override + public void start() { + mediaCodec.start(); + } + + @Override + public void flush() { + mediaCodec.flush(); + } + + @Override + public void stop() { + mediaCodec.stop(); + } + + @Override + public void release() { + mediaCodec.release(); + } + + @Override + public int dequeueInputBuffer(long timeoutUs) { + return mediaCodec.dequeueInputBuffer(timeoutUs); + } + + @Override + public void queueInputBuffer( + int index, int offset, int size, long presentationTimeUs, int flags) { + mediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags); + } + + @Override + public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) { + return mediaCodec.dequeueOutputBuffer(info, timeoutUs); + } + + @Override + public void releaseOutputBuffer(int index, boolean render) { + mediaCodec.releaseOutputBuffer(index, render); + } + + @Override + public MediaFormat getInputFormat() { + return mediaCodec.getInputFormat(); + } + + @Override + public MediaFormat getOutputFormat() { + return mediaCodec.getOutputFormat(); + } + + @Override + public ByteBuffer getInputBuffer(int index) { + return mediaCodec.getInputBuffer(index); + } + + @Override + public ByteBuffer getOutputBuffer(int index) { + return mediaCodec.getOutputBuffer(index); + } + + @Override + public Surface createInputSurface() { + return mediaCodec.createInputSurface(); + } + + @Override + public void setParameters(Bundle params) { + mediaCodec.setParameters(params); + } + } + + @Override + public MediaCodecWrapper createByCodecName(String name) throws IOException { + return new MediaCodecWrapperImpl(MediaCodec.createByCodecName(name)); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java new file mode 100644 index 0000000000..fe0221d826 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV12Buffer.java @@ -0,0 +1,73 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; + +public class NV12Buffer implements VideoFrame.Buffer { + private final int width; + private final int height; + private final int stride; + private final int sliceHeight; + private final ByteBuffer buffer; + private final RefCountDelegate refCountDelegate; + + public NV12Buffer(int width, int height, int stride, int sliceHeight, ByteBuffer buffer, + @Nullable Runnable releaseCallback) { + this.width = width; + this.height = height; + this.stride = stride; + this.sliceHeight = sliceHeight; + this.buffer = buffer; + this.refCountDelegate = new RefCountDelegate(releaseCallback); + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public VideoFrame.I420Buffer toI420() { + return (VideoFrame.I420Buffer) cropAndScale(0, 0, width, height, width, height); + } + + @Override + public void retain() { + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountDelegate.release(); + } + + @Override + public VideoFrame.Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight); + nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, buffer, width, + height, stride, sliceHeight, newBuffer.getDataY(), newBuffer.getStrideY(), + newBuffer.getDataU(), newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV()); + return newBuffer; + } + + private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight, + int scaleWidth, int scaleHeight, ByteBuffer src, int srcWidth, int srcHeight, int srcStride, + int srcSliceHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int dstStrideU, + ByteBuffer dstV, int dstStrideV); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java new file mode 100644 index 0000000000..0fb1afe74b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java @@ -0,0 +1,69 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; + +public class NV21Buffer implements VideoFrame.Buffer { + private final byte[] data; + private final int width; + private final int height; + private final RefCountDelegate refCountDelegate; + + public NV21Buffer(byte[] data, int width, int height, @Nullable Runnable releaseCallback) { + this.data = data; + this.width = width; + this.height = height; + this.refCountDelegate = new RefCountDelegate(releaseCallback); + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public VideoFrame.I420Buffer toI420() { + // Cropping converts the frame to I420. Just crop and scale to the whole image. + return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */, + height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */); + } + + @Override + public void retain() { + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountDelegate.release(); + } + + @Override + public VideoFrame.Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight); + nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width, + height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(), + newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV()); + return newBuffer; + } + + private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight, + int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY, + int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java new file mode 100644 index 0000000000..d4fba481e8 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import org.webrtc.VideoFrame; +import org.webrtc.VideoProcessor; + +/** + * This class is meant to be a simple layer that only handles the JNI wrapping of a C++ + * AndroidVideoTrackSource, that can easily be mocked out in Java unit tests. Refrain from adding + * any unnecessary logic to this class. + * This class is thred safe and methods can be called from any thread, but if frames A, B, ..., are + * sent to adaptFrame(), the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same + * order to onFrameCaptured(). + */ +class NativeAndroidVideoTrackSource { + // Pointer to webrtc::jni::AndroidVideoTrackSource. + private final long nativeAndroidVideoTrackSource; + + public NativeAndroidVideoTrackSource(long nativeAndroidVideoTrackSource) { + this.nativeAndroidVideoTrackSource = nativeAndroidVideoTrackSource; + } + + /** + * Set the state for the native MediaSourceInterface. Maps boolean to either + * SourceState::kLive or SourceState::kEnded. + */ + public void setState(boolean isLive) { + nativeSetState(nativeAndroidVideoTrackSource, isLive); + } + + /** + * This function should be called before delivering any frame to determine if the frame should be + * dropped or what the cropping and scaling parameters should be. If the return value is null, the + * frame should be dropped, otherwise the frame should be adapted in accordance to the frame + * adaptation parameters before calling onFrameCaptured(). + */ + @Nullable + public VideoProcessor.FrameAdaptationParameters adaptFrame(VideoFrame frame) { + return nativeAdaptFrame(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(), + frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs()); + } + + /** + * Pass an adapted frame to the native AndroidVideoTrackSource. Note that adaptFrame() is + * expected to be called first and that the passed frame conforms to those parameters. + */ + public void onFrameCaptured(VideoFrame frame) { + nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getRotation(), + frame.getTimestampNs(), frame.getBuffer()); + } + + /** + * Calling this function will cause frames to be scaled down to the requested resolution. Also, + * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match + * the requested fps. + */ + public void adaptOutputFormat(VideoSource.AspectRatio targetLandscapeAspectRatio, + @Nullable Integer maxLandscapePixelCount, VideoSource.AspectRatio targetPortraitAspectRatio, + @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) { + nativeAdaptOutputFormat(nativeAndroidVideoTrackSource, targetLandscapeAspectRatio.width, + targetLandscapeAspectRatio.height, maxLandscapePixelCount, targetPortraitAspectRatio.width, + targetPortraitAspectRatio.height, maxPortraitPixelCount, maxFps); + } + + public void setIsScreencast(boolean isScreencast) { + nativeSetIsScreencast(nativeAndroidVideoTrackSource, isScreencast); + } + + @CalledByNative + static VideoProcessor.FrameAdaptationParameters createFrameAdaptationParameters(int cropX, + int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight, long timestampNs, + boolean drop) { + return new VideoProcessor.FrameAdaptationParameters( + cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, timestampNs, drop); + } + + private static native void nativeSetIsScreencast( + long nativeAndroidVideoTrackSource, boolean isScreencast); + private static native void nativeSetState(long nativeAndroidVideoTrackSource, boolean isLive); + private static native void nativeAdaptOutputFormat(long nativeAndroidVideoTrackSource, + int landscapeWidth, int landscapeHeight, @Nullable Integer maxLandscapePixelCount, + int portraitWidth, int portraitHeight, @Nullable Integer maxPortraitPixelCount, + @Nullable Integer maxFps); + @Nullable + private static native VideoProcessor.FrameAdaptationParameters nativeAdaptFrame( + long nativeAndroidVideoTrackSource, int width, int height, int rotation, long timestampNs); + private static native void nativeOnFrameCaptured( + long nativeAndroidVideoTrackSource, int rotation, long timestampNs, VideoFrame.Buffer buffer); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java new file mode 100644 index 0000000000..c195fb3a4c --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeCapturerObserver.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.VideoFrame; + +/** + * Used from native api and implements a simple VideoCapturer.CapturerObserver that feeds frames to + * a webrtc::jni::AndroidVideoTrackSource. + */ +class NativeCapturerObserver implements CapturerObserver { + private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource; + + @CalledByNative + public NativeCapturerObserver(long nativeSource) { + this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource); + } + + @Override + public void onCapturerStarted(boolean success) { + nativeAndroidVideoTrackSource.setState(success); + } + + @Override + public void onCapturerStopped() { + nativeAndroidVideoTrackSource.setState(/* isLive= */ false); + } + + @Override + public void onFrameCaptured(VideoFrame frame) { + final VideoProcessor.FrameAdaptationParameters parameters = + nativeAndroidVideoTrackSource.adaptFrame(frame); + if (parameters == null) { + // Drop frame. + return; + } + + final VideoFrame.Buffer adaptedBuffer = + frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth, + parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight); + nativeAndroidVideoTrackSource.onFrameCaptured( + new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs)); + adaptedBuffer.release(); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java new file mode 100644 index 0000000000..531c216302 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/NativeLibrary.java @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +class NativeLibrary { + private static String TAG = "NativeLibrary"; + + static class DefaultLoader implements NativeLibraryLoader { + @Override + public boolean load(String name) { + Logging.d(TAG, "Loading library: " + name); + System.loadLibrary(name); + + // Not relevant, but kept for API compatibility. + return true; + } + } + + private static Object lock = new Object(); + private static boolean libraryLoaded; + + /** + * Loads the native library. Clients should call PeerConnectionFactory.initialize. It will call + * this method for them. + */ + static void initialize(NativeLibraryLoader loader, String libraryName) { + synchronized (lock) { + if (libraryLoaded) { + Logging.d(TAG, "Native library has already been loaded."); + return; + } + Logging.d(TAG, "Loading native library: " + libraryName); + libraryLoaded = loader.load(libraryName); + } + } + + /** Returns true if the library has been loaded successfully. */ + static boolean isLoaded() { + synchronized (lock) { + return libraryLoaded; + } + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java new file mode 100644 index 0000000000..b9210d26a4 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/RefCountDelegate.java @@ -0,0 +1,63 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Implementation of RefCounted that executes a Runnable once the ref count reaches zero. + */ +class RefCountDelegate implements RefCounted { + private final AtomicInteger refCount = new AtomicInteger(1); + private final @Nullable Runnable releaseCallback; + + /** + * @param releaseCallback Callback that will be executed once the ref count reaches zero. + */ + public RefCountDelegate(@Nullable Runnable releaseCallback) { + this.releaseCallback = releaseCallback; + } + + @Override + public void retain() { + int updated_count = refCount.incrementAndGet(); + if (updated_count < 2) { + throw new IllegalStateException("retain() called on an object with refcount < 1"); + } + } + + @Override + public void release() { + int updated_count = refCount.decrementAndGet(); + if (updated_count < 0) { + throw new IllegalStateException("release() called on an object with refcount < 1"); + } + if (updated_count == 0 && releaseCallback != null) { + releaseCallback.run(); + } + } + + /** + * Tries to retain the object. Can be used in scenarios where it is unknown if the object has + * already been released. Returns true if successful or false if the object was already released. + */ + boolean safeRetain() { + int currentRefCount = refCount.get(); + while (currentRefCount != 0) { + if (refCount.weakCompareAndSet(currentRefCount, currentRefCount + 1)) { + return true; + } + currentRefCount = refCount.get(); + } + return false; + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java new file mode 100644 index 0000000000..26a030919d --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java @@ -0,0 +1,29 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Enumeration of supported video codec types. */ +enum VideoCodecMimeType { + VP8("video/x-vnd.on2.vp8"), + VP9("video/x-vnd.on2.vp9"), + H264("video/avc"), + AV1("video/av01"); + + private final String mimeType; + + private VideoCodecMimeType(String mimeType) { + this.mimeType = mimeType; + } + + String mimeType() { + return mimeType; + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java new file mode 100644 index 0000000000..2aae041640 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.VideoDecoder; + +/** + * This class contains the Java glue code for JNI generation of VideoDecoder. + */ +class VideoDecoderWrapper { + @CalledByNative + static VideoDecoder.Callback createDecoderCallback(final long nativeDecoder) { + return (VideoFrame frame, Integer decodeTimeMs, + Integer qp) -> nativeOnDecodedFrame(nativeDecoder, frame, decodeTimeMs, qp); + } + + private static native void nativeOnDecodedFrame( + long nativeVideoDecoderWrapper, VideoFrame frame, Integer decodeTimeMs, Integer qp); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java new file mode 100644 index 0000000000..b5485d4edb --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java @@ -0,0 +1,46 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +// Explicit imports necessary for JNI generation. +import androidx.annotation.Nullable; +import org.webrtc.VideoEncoder; + +/** + * This class contains the Java glue code for JNI generation of VideoEncoder. + */ +class VideoEncoderWrapper { + @CalledByNative + static boolean getScalingSettingsOn(VideoEncoder.ScalingSettings scalingSettings) { + return scalingSettings.on; + } + + @Nullable + @CalledByNative + static Integer getScalingSettingsLow(VideoEncoder.ScalingSettings scalingSettings) { + return scalingSettings.low; + } + + @Nullable + @CalledByNative + static Integer getScalingSettingsHigh(VideoEncoder.ScalingSettings scalingSettings) { + return scalingSettings.high; + } + + @CalledByNative + static VideoEncoder.Callback createEncoderCallback(final long nativeEncoder) { + return (EncodedImage frame, + VideoEncoder.CodecSpecificInfo info) -> nativeOnEncodedFrame(nativeEncoder, frame); + } + + private static native void nativeOnEncodedFrame( + long nativeVideoEncoderWrapper, EncodedImage frame); +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java new file mode 100644 index 0000000000..023e92cfb1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WebRtcClassLoader.java @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * This class provides a ClassLoader that is capable of loading WebRTC Java classes regardless of + * what thread it's called from. Such a ClassLoader is needed for the few cases where the JNI + * mechanism is unable to automatically determine the appropriate ClassLoader instance. + */ +class WebRtcClassLoader { + @CalledByNative + static Object getClassLoader() { + Object loader = WebRtcClassLoader.class.getClassLoader(); + if (loader == null) { + throw new RuntimeException("Failed to get WebRTC class loader."); + } + return loader; + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java new file mode 100644 index 0000000000..0461660fcf --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java @@ -0,0 +1,110 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** + * This class wraps a webrtc::I420BufferInterface into a VideoFrame.I420Buffer. + */ +class WrappedNativeI420Buffer implements VideoFrame.I420Buffer { + private final int width; + private final int height; + private final ByteBuffer dataY; + private final int strideY; + private final ByteBuffer dataU; + private final int strideU; + private final ByteBuffer dataV; + private final int strideV; + private final long nativeBuffer; + + @CalledByNative + WrappedNativeI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU, + int strideU, ByteBuffer dataV, int strideV, long nativeBuffer) { + this.width = width; + this.height = height; + this.dataY = dataY; + this.strideY = strideY; + this.dataU = dataU; + this.strideU = strideU; + this.dataV = dataV; + this.strideV = strideV; + this.nativeBuffer = nativeBuffer; + + retain(); + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public ByteBuffer getDataY() { + // Return a slice to prevent relative reads from changing the position. + return dataY.slice(); + } + + @Override + public ByteBuffer getDataU() { + // Return a slice to prevent relative reads from changing the position. + return dataU.slice(); + } + + @Override + public ByteBuffer getDataV() { + // Return a slice to prevent relative reads from changing the position. + return dataV.slice(); + } + + @Override + public int getStrideY() { + return strideY; + } + + @Override + public int getStrideU() { + return strideU; + } + + @Override + public int getStrideV() { + return strideV; + } + + @Override + public VideoFrame.I420Buffer toI420() { + retain(); + return this; + } + + @Override + public void retain() { + JniCommon.nativeAddRef(nativeBuffer); + } + + @Override + public void release() { + JniCommon.nativeReleaseRef(nativeBuffer); + } + + @Override + public VideoFrame.Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + return JavaI420Buffer.cropAndScaleI420( + this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java new file mode 100644 index 0000000000..70c625ab4f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/LowLatencyAudioBufferManager.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.media.AudioTrack; +import android.os.Build; +import org.webrtc.Logging; + +// Lowers the buffer size if no underruns are detected for 100 ms. Once an +// underrun is detected, the buffer size is increased by 10 ms and it will not +// be lowered further. The buffer size will never be increased more than +// 5 times, to avoid the possibility of the buffer size increasing without +// bounds. +class LowLatencyAudioBufferManager { + private static final String TAG = "LowLatencyAudioBufferManager"; + // The underrun count that was valid during the previous call to maybeAdjustBufferSize(). Used to + // detect increases in the value. + private int prevUnderrunCount; + // The number of ticks to wait without an underrun before decreasing the buffer size. + private int ticksUntilNextDecrease; + // Indicate if we should continue to decrease the buffer size. + private boolean keepLoweringBufferSize; + // How often the buffer size was increased. + private int bufferIncreaseCounter; + + public LowLatencyAudioBufferManager() { + this.prevUnderrunCount = 0; + this.ticksUntilNextDecrease = 10; + this.keepLoweringBufferSize = true; + this.bufferIncreaseCounter = 0; + } + + public void maybeAdjustBufferSize(AudioTrack audioTrack) { + if (Build.VERSION.SDK_INT >= 26) { + final int underrunCount = audioTrack.getUnderrunCount(); + if (underrunCount > prevUnderrunCount) { + // Don't increase buffer more than 5 times. Continuing to increase the buffer size + // could be harmful on low-power devices that regularly experience underruns under + // normal conditions. + if (bufferIncreaseCounter < 5) { + // Underrun detected, increase buffer size by 10ms. + final int currentBufferSize = audioTrack.getBufferSizeInFrames(); + final int newBufferSize = currentBufferSize + audioTrack.getPlaybackRate() / 100; + Logging.d(TAG, + "Underrun detected! Increasing AudioTrack buffer size from " + currentBufferSize + + " to " + newBufferSize); + audioTrack.setBufferSizeInFrames(newBufferSize); + bufferIncreaseCounter++; + } + // Stop trying to lower the buffer size. + keepLoweringBufferSize = false; + prevUnderrunCount = underrunCount; + ticksUntilNextDecrease = 10; + } else if (keepLoweringBufferSize) { + ticksUntilNextDecrease--; + if (ticksUntilNextDecrease <= 0) { + // No underrun seen for 100 ms, try to lower the buffer size by 10ms. + final int bufferSize10ms = audioTrack.getPlaybackRate() / 100; + // Never go below a buffer size of 10ms. + final int currentBufferSize = audioTrack.getBufferSizeInFrames(); + final int newBufferSize = Math.max(bufferSize10ms, currentBufferSize - bufferSize10ms); + if (newBufferSize != currentBufferSize) { + Logging.d(TAG, + "Lowering AudioTrack buffer size from " + currentBufferSize + " to " + + newBufferSize); + audioTrack.setBufferSizeInFrames(newBufferSize); + } + ticksUntilNextDecrease = 10; + } + } + } + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java new file mode 100644 index 0000000000..06d5cd3a8e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/VolumeLogger.java @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.media.AudioManager; +import androidx.annotation.Nullable; +import java.util.Timer; +import java.util.TimerTask; +import org.webrtc.Logging; + +// TODO(magjed): Do we really need to spawn a new thread just to log volume? Can we re-use the +// AudioTrackThread instead? +/** + * Private utility class that periodically checks and logs the volume level of the audio stream that + * is currently controlled by the volume control. A timer triggers logs once every 30 seconds and + * the timer's associated thread is named "WebRtcVolumeLevelLoggerThread". + */ +class VolumeLogger { + private static final String TAG = "VolumeLogger"; + private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread"; + private static final int TIMER_PERIOD_IN_SECONDS = 30; + + private final AudioManager audioManager; + private @Nullable Timer timer; + + public VolumeLogger(AudioManager audioManager) { + this.audioManager = audioManager; + } + + public void start() { + Logging.d(TAG, "start" + WebRtcAudioUtils.getThreadInfo()); + if (timer != null) { + return; + } + Logging.d(TAG, "audio mode is: " + WebRtcAudioUtils.modeToString(audioManager.getMode())); + + timer = new Timer(THREAD_NAME); + timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING), + audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)), + 0, TIMER_PERIOD_IN_SECONDS * 1000); + } + + private class LogVolumeTask extends TimerTask { + private final int maxRingVolume; + private final int maxVoiceCallVolume; + + LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) { + this.maxRingVolume = maxRingVolume; + this.maxVoiceCallVolume = maxVoiceCallVolume; + } + + @Override + public void run() { + final int mode = audioManager.getMode(); + if (mode == AudioManager.MODE_RINGTONE) { + Logging.d(TAG, + "STREAM_RING stream volume: " + audioManager.getStreamVolume(AudioManager.STREAM_RING) + + " (max=" + maxRingVolume + ")"); + } else if (mode == AudioManager.MODE_IN_COMMUNICATION) { + Logging.d(TAG, + "VOICE_CALL stream volume: " + + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + + " (max=" + maxVoiceCallVolume + ")"); + } + } + } + + public void stop() { + Logging.d(TAG, "stop" + WebRtcAudioUtils.getThreadInfo()); + if (timer != null) { + timer.cancel(); + timer = null; + } + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java new file mode 100644 index 0000000000..a9ff1011b6 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java @@ -0,0 +1,227 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.media.audiofx.AcousticEchoCanceler; +import android.media.audiofx.AudioEffect; +import android.media.audiofx.AudioEffect.Descriptor; +import android.media.audiofx.NoiseSuppressor; +import android.os.Build; +import androidx.annotation.Nullable; +import java.util.UUID; +import org.webrtc.Logging; + +// This class wraps control of three different platform effects. Supported +// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS). +// Calling enable() will active all effects that are +// supported by the device if the corresponding `shouldEnableXXX` member is set. +class WebRtcAudioEffects { + private static final boolean DEBUG = false; + + private static final String TAG = "WebRtcAudioEffectsExternal"; + + // UUIDs for Software Audio Effects that we want to avoid using. + // The implementor field will be set to "The Android Open Source Project". + private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER = + UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b"); + private static final UUID AOSP_NOISE_SUPPRESSOR = + UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b"); + + // Contains the available effect descriptors returned from the + // AudioEffect.getEffects() call. This result is cached to avoid doing the + // slow OS call multiple times. + private static @Nullable Descriptor[] cachedEffects; + + // Contains the audio effect objects. Created in enable() and destroyed + // in release(). + private @Nullable AcousticEchoCanceler aec; + private @Nullable NoiseSuppressor ns; + + // Affects the final state given to the setEnabled() method on each effect. + // The default state is set to "disabled" but each effect can also be enabled + // by calling setAEC() and setNS(). + private boolean shouldEnableAec; + private boolean shouldEnableNs; + + // Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are + // fulfilled. + public static boolean isAcousticEchoCancelerSupported() { + return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER); + } + + // Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled. + public static boolean isNoiseSuppressorSupported() { + return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR); + } + + public WebRtcAudioEffects() { + Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); + } + + // Call this method to enable or disable the platform AEC. It modifies + // `shouldEnableAec` which is used in enable() where the actual state + // of the AEC effect is modified. Returns true if HW AEC is supported and + // false otherwise. + public boolean setAEC(boolean enable) { + Logging.d(TAG, "setAEC(" + enable + ")"); + if (!isAcousticEchoCancelerSupported()) { + Logging.w(TAG, "Platform AEC is not supported"); + shouldEnableAec = false; + return false; + } + if (aec != null && (enable != shouldEnableAec)) { + Logging.e(TAG, "Platform AEC state can't be modified while recording"); + return false; + } + shouldEnableAec = enable; + return true; + } + + // Call this method to enable or disable the platform NS. It modifies + // `shouldEnableNs` which is used in enable() where the actual state + // of the NS effect is modified. Returns true if HW NS is supported and + // false otherwise. + public boolean setNS(boolean enable) { + Logging.d(TAG, "setNS(" + enable + ")"); + if (!isNoiseSuppressorSupported()) { + Logging.w(TAG, "Platform NS is not supported"); + shouldEnableNs = false; + return false; + } + if (ns != null && (enable != shouldEnableNs)) { + Logging.e(TAG, "Platform NS state can't be modified while recording"); + return false; + } + shouldEnableNs = enable; + return true; + } + + public void enable(int audioSession) { + Logging.d(TAG, "enable(audioSession=" + audioSession + ")"); + assertTrue(aec == null); + assertTrue(ns == null); + + if (DEBUG) { + // Add logging of supported effects but filter out "VoIP effects", i.e., + // AEC, AEC and NS. Avoid calling AudioEffect.queryEffects() unless the + // DEBUG flag is set since we have seen crashes in this API. + for (Descriptor d : AudioEffect.queryEffects()) { + if (effectTypeIsVoIP(d.type)) { + Logging.d(TAG, + "name: " + d.name + ", " + + "mode: " + d.connectMode + ", " + + "implementor: " + d.implementor + ", " + + "UUID: " + d.uuid); + } + } + } + + if (isAcousticEchoCancelerSupported()) { + // Create an AcousticEchoCanceler and attach it to the AudioRecord on + // the specified audio session. + aec = AcousticEchoCanceler.create(audioSession); + if (aec != null) { + boolean enabled = aec.getEnabled(); + boolean enable = shouldEnableAec && isAcousticEchoCancelerSupported(); + if (aec.setEnabled(enable) != AudioEffect.SUCCESS) { + Logging.e(TAG, "Failed to set the AcousticEchoCanceler state"); + } + Logging.d(TAG, + "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") + ", enable: " + + enable + ", is now: " + (aec.getEnabled() ? "enabled" : "disabled")); + } else { + Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance"); + } + } + + if (isNoiseSuppressorSupported()) { + // Create an NoiseSuppressor and attach it to the AudioRecord on the + // specified audio session. + ns = NoiseSuppressor.create(audioSession); + if (ns != null) { + boolean enabled = ns.getEnabled(); + boolean enable = shouldEnableNs && isNoiseSuppressorSupported(); + if (ns.setEnabled(enable) != AudioEffect.SUCCESS) { + Logging.e(TAG, "Failed to set the NoiseSuppressor state"); + } + Logging.d(TAG, + "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable + + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled")); + } else { + Logging.e(TAG, "Failed to create the NoiseSuppressor instance"); + } + } + } + + // Releases all native audio effect resources. It is a good practice to + // release the effect engine when not in use as control can be returned + // to other applications or the native resources released. + public void release() { + Logging.d(TAG, "release"); + if (aec != null) { + aec.release(); + aec = null; + } + if (ns != null) { + ns.release(); + ns = null; + } + } + + // Returns true for effect types in `type` that are of "VoIP" types: + // Acoustic Echo Canceler (AEC) or Automatic Gain Control (AGC) or + // Noise Suppressor (NS). Note that, an extra check for support is needed + // in each comparison since some devices includes effects in the + // AudioEffect.Descriptor array that are actually not available on the device. + // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but + // AutomaticGainControl.isAvailable() returns false. + private boolean effectTypeIsVoIP(UUID type) { + return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported()) + || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported()); + } + + // Helper method which throws an exception when an assertion has failed. + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + // Returns the cached copy of the audio effects array, if available, or + // queries the operating system for the list of effects. + private static @Nullable Descriptor[] getAvailableEffects() { + if (cachedEffects != null) { + return cachedEffects; + } + // The caching is best effort only - if this method is called from several + // threads in parallel, they may end up doing the underlying OS call + // multiple times. It's normally only called on one thread so there's no + // real need to optimize for the multiple threads case. + cachedEffects = AudioEffect.queryEffects(); + return cachedEffects; + } + + // Returns true if an effect of the specified type is available. Functionally + // equivalent to (NoiseSuppressor`AutomaticGainControl`...).isAvailable(), but + // faster as it avoids the expensive OS call to enumerate effects. + private static boolean isEffectTypeAvailable(UUID effectType, UUID blockListedUuid) { + Descriptor[] effects = getAvailableEffects(); + if (effects == null) { + return false; + } + for (Descriptor d : effects) { + if (d.type.equals(effectType)) { + return !d.uuid.equals(blockListedUuid); + } + } + return false; + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java new file mode 100644 index 0000000000..f398602a28 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java @@ -0,0 +1,122 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.content.Context; +import android.content.pm.PackageManager; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioRecord; +import android.media.AudioTrack; +import android.os.Build; +import org.webrtc.Logging; +import org.webrtc.CalledByNative; + +/** + * This class contains static functions to query sample rate and input/output audio buffer sizes. + */ +class WebRtcAudioManager { + private static final String TAG = "WebRtcAudioManagerExternal"; + + private static final int DEFAULT_SAMPLE_RATE_HZ = 16000; + + // Default audio data format is PCM 16 bit per sample. + // Guaranteed to be supported by all devices. + private static final int BITS_PER_SAMPLE = 16; + + private static final int DEFAULT_FRAME_PER_BUFFER = 256; + + @CalledByNative + static AudioManager getAudioManager(Context context) { + return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); + } + + @CalledByNative + static int getOutputBufferSize( + Context context, AudioManager audioManager, int sampleRate, int numberOfOutputChannels) { + return isLowLatencyOutputSupported(context) + ? getLowLatencyFramesPerBuffer(audioManager) + : getMinOutputFrameSize(sampleRate, numberOfOutputChannels); + } + + @CalledByNative + static int getInputBufferSize( + Context context, AudioManager audioManager, int sampleRate, int numberOfInputChannels) { + return isLowLatencyInputSupported(context) + ? getLowLatencyFramesPerBuffer(audioManager) + : getMinInputFrameSize(sampleRate, numberOfInputChannels); + } + + private static boolean isLowLatencyOutputSupported(Context context) { + return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); + } + + private static boolean isLowLatencyInputSupported(Context context) { + // TODO(henrika): investigate if some sort of device list is needed here + // as well. The NDK doc states that: "As of API level 21, lower latency + // audio input is supported on select devices. To take advantage of this + // feature, first confirm that lower latency output is available". + return isLowLatencyOutputSupported(context); + } + + /** + * Returns the native input/output sample rate for this device's output stream. + */ + @CalledByNative + static int getSampleRate(AudioManager audioManager) { + // Override this if we're running on an old emulator image which only + // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE. + if (WebRtcAudioUtils.runningOnEmulator()) { + Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz."); + return 8000; + } + // Deliver best possible estimate based on default Android AudioManager APIs. + final int sampleRateHz = getSampleRateForApiLevel(audioManager); + Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz"); + return sampleRateHz; + } + + private static int getSampleRateForApiLevel(AudioManager audioManager) { + String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); + return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString); + } + + // Returns the native output buffer size for low-latency output streams. + private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) { + String framesPerBuffer = + audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); + return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer); + } + + // Returns the minimum output buffer size for Java based audio (AudioTrack). + // This size can also be used for OpenSL ES implementations on devices that + // lacks support of low-latency output. + private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) { + final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); + final int channelConfig = + (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); + return AudioTrack.getMinBufferSize( + sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) + / bytesPerFrame; + } + + // Returns the minimum input buffer size for Java based audio (AudioRecord). + // This size can calso be used for OpenSL ES implementations on devices that + // lacks support of low-latency input. + private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) { + final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); + final int channelConfig = + (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); + return AudioRecord.getMinBufferSize( + sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) + / bytesPerFrame; + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java new file mode 100644 index 0000000000..6647e5fcbb --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -0,0 +1,743 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.annotation.TargetApi; +import android.content.Context; +import android.media.AudioDeviceInfo; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioRecord; +import android.media.AudioRecordingConfiguration; +import android.media.AudioTimestamp; +import android.media.MediaRecorder.AudioSource; +import android.os.Build; +import android.os.Process; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import java.lang.System; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import org.webrtc.CalledByNative; +import org.webrtc.Logging; +import org.webrtc.ThreadUtils; +import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback; +import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode; +import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback; +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; + +class WebRtcAudioRecord { + private static final String TAG = "WebRtcAudioRecordExternal"; + + // Requested size of each recorded buffer provided to the client. + private static final int CALLBACK_BUFFER_SIZE_MS = 10; + + // Average number of callbacks per second. + private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS; + + // We ask for a native buffer size of BUFFER_SIZE_FACTOR * (minimum required + // buffer size). The extra space is allocated to guard against glitches under + // high load. + private static final int BUFFER_SIZE_FACTOR = 2; + + // The AudioRecordJavaThread is allowed to wait for successful call to join() + // but the wait times out afther this amount of time. + private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000; + + public static final int DEFAULT_AUDIO_SOURCE = AudioSource.VOICE_COMMUNICATION; + + // Default audio data format is PCM 16 bit per sample. + // Guaranteed to be supported by all devices. + public static final int DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; + + // Indicates AudioRecord has started recording audio. + private static final int AUDIO_RECORD_START = 0; + + // Indicates AudioRecord has stopped recording audio. + private static final int AUDIO_RECORD_STOP = 1; + + // Time to wait before checking recording status after start has been called. Tests have + // shown that the result can sometimes be invalid (our own status might be missing) if we check + // directly after start. + private static final int CHECK_REC_STATUS_DELAY_MS = 100; + + private final Context context; + private final AudioManager audioManager; + private final int audioSource; + private final int audioFormat; + + private long nativeAudioRecord; + + private final WebRtcAudioEffects effects = new WebRtcAudioEffects(); + + private @Nullable ByteBuffer byteBuffer; + + private @Nullable AudioRecord audioRecord; + private @Nullable AudioRecordThread audioThread; + private @Nullable AudioDeviceInfo preferredDevice; + + private final ScheduledExecutorService executor; + private @Nullable ScheduledFuture<String> future; + + private volatile boolean microphoneMute; + private final AtomicReference<Boolean> audioSourceMatchesRecordingSessionRef = + new AtomicReference<>(); + private byte[] emptyBytes; + + private final @Nullable AudioRecordErrorCallback errorCallback; + private final @Nullable AudioRecordStateCallback stateCallback; + private final @Nullable SamplesReadyCallback audioSamplesReadyCallback; + private final boolean isAcousticEchoCancelerSupported; + private final boolean isNoiseSuppressorSupported; + + /** + * Audio thread which keeps calling ByteBuffer.read() waiting for audio + * to be recorded. Feeds recorded data to the native counterpart as a + * periodic sequence of callbacks using DataIsRecorded(). + * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority. + */ + private class AudioRecordThread extends Thread { + private volatile boolean keepAlive = true; + + public AudioRecordThread(String name) { + super(name); + } + + @Override + public void run() { + Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); + Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo()); + assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING); + + // Audio recording has started and the client is informed about it. + doAudioRecordStateCallback(AUDIO_RECORD_START); + + long lastTime = System.nanoTime(); + AudioTimestamp audioTimestamp = null; + if (Build.VERSION.SDK_INT >= 24) { + audioTimestamp = new AudioTimestamp(); + } + while (keepAlive) { + int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity()); + if (bytesRead == byteBuffer.capacity()) { + if (microphoneMute) { + byteBuffer.clear(); + byteBuffer.put(emptyBytes); + } + // It's possible we've been shut down during the read, and stopRecording() tried and + // failed to join this thread. To be a bit safer, try to avoid calling any native methods + // in case they've been unregistered after stopRecording() returned. + if (keepAlive) { + long captureTimeNs = 0; + if (Build.VERSION.SDK_INT >= 24) { + if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC) + == AudioRecord.SUCCESS) { + captureTimeNs = audioTimestamp.nanoTime; + } + } + nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); + } + if (audioSamplesReadyCallback != null) { + // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily + // at index 0. + byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), + byteBuffer.capacity() + byteBuffer.arrayOffset()); + audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( + new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(), + audioRecord.getChannelCount(), audioRecord.getSampleRate(), data)); + } + } else { + String errorMessage = "AudioRecord.read failed: " + bytesRead; + Logging.e(TAG, errorMessage); + if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) { + keepAlive = false; + reportWebRtcAudioRecordError(errorMessage); + } + } + } + + try { + if (audioRecord != null) { + audioRecord.stop(); + doAudioRecordStateCallback(AUDIO_RECORD_STOP); + } + } catch (IllegalStateException e) { + Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); + } + } + + // Stops the inner thread loop and also calls AudioRecord.stop(). + // Does not block the calling thread. + public void stopThread() { + Logging.d(TAG, "stopThread"); + keepAlive = false; + } + } + + @CalledByNative + WebRtcAudioRecord(Context context, AudioManager audioManager) { + this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, + DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, + null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + WebRtcAudioEffects.isNoiseSuppressorSupported()); + } + + public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, + AudioManager audioManager, int audioSource, int audioFormat, + @Nullable AudioRecordErrorCallback errorCallback, + @Nullable AudioRecordStateCallback stateCallback, + @Nullable SamplesReadyCallback audioSamplesReadyCallback, + boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { + if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) { + throw new IllegalArgumentException("HW AEC not supported"); + } + if (isNoiseSuppressorSupported && !WebRtcAudioEffects.isNoiseSuppressorSupported()) { + throw new IllegalArgumentException("HW NS not supported"); + } + this.context = context; + this.executor = scheduler; + this.audioManager = audioManager; + this.audioSource = audioSource; + this.audioFormat = audioFormat; + this.errorCallback = errorCallback; + this.stateCallback = stateCallback; + this.audioSamplesReadyCallback = audioSamplesReadyCallback; + this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported; + this.isNoiseSuppressorSupported = isNoiseSuppressorSupported; + Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); + } + + @CalledByNative + public void setNativeAudioRecord(long nativeAudioRecord) { + this.nativeAudioRecord = nativeAudioRecord; + } + + @CalledByNative + boolean isAcousticEchoCancelerSupported() { + return isAcousticEchoCancelerSupported; + } + + @CalledByNative + boolean isNoiseSuppressorSupported() { + return isNoiseSuppressorSupported; + } + + // Returns true if a valid call to verifyAudioConfig() has been done. Should always be + // checked before using the returned value of isAudioSourceMatchingRecordingSession(). + @CalledByNative + boolean isAudioConfigVerified() { + return audioSourceMatchesRecordingSessionRef.get() != null; + } + + // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when + // startRecording() has been called. Hence, should preferably be called in combination with + // stopRecording() to ensure that it has been set properly. `isAudioConfigVerified` is + // enabled in WebRtcAudioRecord to ensure that the returned value is valid. + @CalledByNative + boolean isAudioSourceMatchingRecordingSession() { + Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get(); + if (audioSourceMatchesRecordingSession == null) { + Logging.w(TAG, "Audio configuration has not yet been verified"); + return false; + } + return audioSourceMatchesRecordingSession; + } + + @CalledByNative + private boolean enableBuiltInAEC(boolean enable) { + Logging.d(TAG, "enableBuiltInAEC(" + enable + ")"); + return effects.setAEC(enable); + } + + @CalledByNative + private boolean enableBuiltInNS(boolean enable) { + Logging.d(TAG, "enableBuiltInNS(" + enable + ")"); + return effects.setNS(enable); + } + + @CalledByNative + private int initRecording(int sampleRate, int channels) { + Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")"); + if (audioRecord != null) { + reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); + return -1; + } + final int bytesPerFrame = channels * getBytesPerSample(audioFormat); + final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND; + byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); + if (!(byteBuffer.hasArray())) { + reportWebRtcAudioRecordInitError("ByteBuffer does not have backing array."); + return -1; + } + Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); + emptyBytes = new byte[byteBuffer.capacity()]; + // Rather than passing the ByteBuffer with every callback (requiring + // the potentially expensive GetDirectBufferAddress) we simply have the + // the native class cache the address to the memory once. + nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); + + // Get the minimum buffer size required for the successful creation of + // an AudioRecord object, in byte units. + // Note that this size doesn't guarantee a smooth recording under load. + final int channelConfig = channelCountToConfiguration(channels); + int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); + if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) { + reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize); + return -1; + } + Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize); + + // Use a larger buffer size than the minimum required when creating the + // AudioRecord instance to ensure smooth recording under load. It has been + // verified that it does not increase the actual recording latency. + int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity()); + Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes); + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + // Use the AudioRecord.Builder class on Android M (23) and above. + // Throws IllegalArgumentException. + audioRecord = createAudioRecordOnMOrHigher( + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + if (preferredDevice != null) { + setPreferredDevice(preferredDevice); + } + } else { + // Use the old AudioRecord constructor for API levels below 23. + // Throws UnsupportedOperationException. + audioRecord = createAudioRecordOnLowerThanM( + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + } + } catch (IllegalArgumentException | UnsupportedOperationException e) { + // Report of exception message is sufficient. Example: "Cannot create AudioRecord". + reportWebRtcAudioRecordInitError(e.getMessage()); + releaseAudioResources(); + return -1; + } + if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { + reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed."); + releaseAudioResources(); + return -1; + } + effects.enable(audioRecord.getAudioSessionId()); + logMainParameters(); + logMainParametersExtended(); + // Check number of active recording sessions. Should be zero but we have seen conflict cases + // and adding a log for it can help us figure out details about conflicting sessions. + final int numActiveRecordingSessions = + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); + if (numActiveRecordingSessions != 0) { + // Log the conflict as a warning since initialization did in fact succeed. Most likely, the + // upcoming call to startRecording() will fail under these conditions. + Logging.w( + TAG, "Potential microphone conflict. Active sessions: " + numActiveRecordingSessions); + } + return framesPerBuffer; + } + + /** + * Prefer a specific {@link AudioDeviceInfo} device for recording. Calling after recording starts + * is valid but may cause a temporary interruption if the audio routing changes. + */ + @RequiresApi(Build.VERSION_CODES.M) + @TargetApi(Build.VERSION_CODES.M) + void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) { + Logging.d( + TAG, "setPreferredDevice " + (preferredDevice != null ? preferredDevice.getId() : null)); + this.preferredDevice = preferredDevice; + if (audioRecord != null) { + if (!audioRecord.setPreferredDevice(preferredDevice)) { + Logging.e(TAG, "setPreferredDevice failed"); + } + } + } + + @CalledByNative + private boolean startRecording() { + Logging.d(TAG, "startRecording"); + assertTrue(audioRecord != null); + assertTrue(audioThread == null); + try { + audioRecord.startRecording(); + } catch (IllegalStateException e) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, + "AudioRecord.startRecording failed: " + e.getMessage()); + return false; + } + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, + "AudioRecord.startRecording failed - incorrect state: " + + audioRecord.getRecordingState()); + return false; + } + audioThread = new AudioRecordThread("AudioRecordJavaThread"); + audioThread.start(); + scheduleLogRecordingConfigurationsTask(audioRecord); + return true; + } + + @CalledByNative + private boolean stopRecording() { + Logging.d(TAG, "stopRecording"); + assertTrue(audioThread != null); + if (future != null) { + if (!future.isDone()) { + // Might be needed if the client calls startRecording(), stopRecording() back-to-back. + future.cancel(true /* mayInterruptIfRunning */); + } + future = null; + } + audioThread.stopThread(); + if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { + Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + } + audioThread = null; + effects.release(); + releaseAudioResources(); + return true; + } + + @TargetApi(Build.VERSION_CODES.M) + private static AudioRecord createAudioRecordOnMOrHigher( + int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { + Logging.d(TAG, "createAudioRecordOnMOrHigher"); + return new AudioRecord.Builder() + .setAudioSource(audioSource) + .setAudioFormat(new AudioFormat.Builder() + .setEncoding(audioFormat) + .setSampleRate(sampleRate) + .setChannelMask(channelConfig) + .build()) + .setBufferSizeInBytes(bufferSizeInBytes) + .build(); + } + + private static AudioRecord createAudioRecordOnLowerThanM( + int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { + Logging.d(TAG, "createAudioRecordOnLowerThanM"); + return new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + } + + private void logMainParameters() { + Logging.d(TAG, + "AudioRecord: " + + "session ID: " + audioRecord.getAudioSessionId() + ", " + + "channels: " + audioRecord.getChannelCount() + ", " + + "sample rate: " + audioRecord.getSampleRate()); + } + + @TargetApi(Build.VERSION_CODES.M) + private void logMainParametersExtended() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Logging.d(TAG, + "AudioRecord: " + // The frame count of the native AudioRecord buffer. + + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); + } + } + + @TargetApi(Build.VERSION_CODES.N) + // Checks the number of active recording sessions and logs the states of all active sessions. + // Returns number of active sessions. Note that this could occur on arbituary thread. + private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { + Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher"); + return 0; + } + if (audioRecord == null) { + return 0; + } + + // Get a list of the currently active audio recording configurations of the device (can be more + // than one). An empty list indicates there is no recording active when queried. + List<AudioRecordingConfiguration> configs = audioManager.getActiveRecordingConfigurations(); + final int numActiveRecordingSessions = configs.size(); + Logging.d(TAG, "Number of active recording sessions: " + numActiveRecordingSessions); + if (numActiveRecordingSessions > 0) { + logActiveRecordingConfigs(audioRecord.getAudioSessionId(), configs); + if (verifyAudioConfig) { + // Run an extra check to verify that the existing audio source doing the recording (tied + // to the AudioRecord instance) is matching what the audio recording configuration lists + // as its client parameters. If these do not match, recording might work but under invalid + // conditions. + audioSourceMatchesRecordingSessionRef.set( + verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(), + audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs)); + } + } + return numActiveRecordingSessions; + } + + // Helper method which throws an exception when an assertion has failed. + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + private int channelCountToConfiguration(int channels) { + return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); + } + + private native void nativeCacheDirectBufferAddress( + long nativeAudioRecordJni, ByteBuffer byteBuffer); + private native void nativeDataIsRecorded( + long nativeAudioRecordJni, int bytes, long captureTimestampNs); + + // Sets all recorded samples to zero if `mute` is true, i.e., ensures that + // the microphone is muted. + public void setMicrophoneMute(boolean mute) { + Logging.w(TAG, "setMicrophoneMute(" + mute + ")"); + microphoneMute = mute; + } + + // Releases the native AudioRecord resources. + private void releaseAudioResources() { + Logging.d(TAG, "releaseAudioResources"); + if (audioRecord != null) { + audioRecord.release(); + audioRecord = null; + } + audioSourceMatchesRecordingSessionRef.set(null); + } + + private void reportWebRtcAudioRecordInitError(String errorMessage) { + Logging.e(TAG, "Init recording error: " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordInitError(errorMessage); + } + } + + private void reportWebRtcAudioRecordStartError( + AudioRecordStartErrorCode errorCode, String errorMessage) { + Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); + } + } + + private void reportWebRtcAudioRecordError(String errorMessage) { + Logging.e(TAG, "Run-time recording error: " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordError(errorMessage); + } + } + + private void doAudioRecordStateCallback(int audioState) { + Logging.d(TAG, "doAudioRecordStateCallback: " + audioStateToString(audioState)); + if (stateCallback != null) { + if (audioState == WebRtcAudioRecord.AUDIO_RECORD_START) { + stateCallback.onWebRtcAudioRecordStart(); + } else if (audioState == WebRtcAudioRecord.AUDIO_RECORD_STOP) { + stateCallback.onWebRtcAudioRecordStop(); + } else { + Logging.e(TAG, "Invalid audio state"); + } + } + } + + // Reference from Android code, AudioFormat.getBytesPerSample. BitPerSample / 8 + // Default audio data format is PCM 16 bits per sample. + // Guaranteed to be supported by all devices + private static int getBytesPerSample(int audioFormat) { + switch (audioFormat) { + case AudioFormat.ENCODING_PCM_8BIT: + return 1; + case AudioFormat.ENCODING_PCM_16BIT: + case AudioFormat.ENCODING_IEC61937: + case AudioFormat.ENCODING_DEFAULT: + return 2; + case AudioFormat.ENCODING_PCM_FLOAT: + return 4; + case AudioFormat.ENCODING_INVALID: + default: + throw new IllegalArgumentException("Bad audio format " + audioFormat); + } + } + + // Use an ExecutorService to schedule a task after a given delay where the task consists of + // checking (by logging) the current status of active recording sessions. + private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) { + Logging.d(TAG, "scheduleLogRecordingConfigurationsTask"); + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { + return; + } + + Callable<String> callable = () -> { + if (this.audioRecord == audioRecord) { + logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */); + } else { + Logging.d(TAG, "audio record has changed"); + } + return "Scheduled task is done"; + }; + + if (future != null && !future.isDone()) { + future.cancel(true /* mayInterruptIfRunning */); + } + // Schedule call to logRecordingConfigurations() from executor thread after fixed delay. + future = executor.schedule(callable, CHECK_REC_STATUS_DELAY_MS, TimeUnit.MILLISECONDS); + }; + + @TargetApi(Build.VERSION_CODES.N) + private static boolean logActiveRecordingConfigs( + int session, List<AudioRecordingConfiguration> configs) { + assertTrue(!configs.isEmpty()); + final Iterator<AudioRecordingConfiguration> it = configs.iterator(); + Logging.d(TAG, "AudioRecordingConfigurations: "); + while (it.hasNext()) { + final AudioRecordingConfiguration config = it.next(); + StringBuilder conf = new StringBuilder(); + // The audio source selected by the client. + final int audioSource = config.getClientAudioSource(); + conf.append(" client audio source=") + .append(WebRtcAudioUtils.audioSourceToString(audioSource)) + .append(", client session id=") + .append(config.getClientAudioSessionId()) + // Compare with our own id (based on AudioRecord#getAudioSessionId()). + .append(" (") + .append(session) + .append(")") + .append("\n"); + // Audio format at which audio is recorded on this Android device. Note that it may differ + // from the client application recording format (see getClientFormat()). + AudioFormat format = config.getFormat(); + conf.append(" Device AudioFormat: ") + .append("channel count=") + .append(format.getChannelCount()) + .append(", channel index mask=") + .append(format.getChannelIndexMask()) + // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices. + .append(", channel mask=") + .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask())) + .append(", encoding=") + .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding())) + .append(", sample rate=") + .append(format.getSampleRate()) + .append("\n"); + // Audio format at which the client application is recording audio. + format = config.getClientFormat(); + conf.append(" Client AudioFormat: ") + .append("channel count=") + .append(format.getChannelCount()) + .append(", channel index mask=") + .append(format.getChannelIndexMask()) + // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices. + .append(", channel mask=") + .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask())) + .append(", encoding=") + .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding())) + .append(", sample rate=") + .append(format.getSampleRate()) + .append("\n"); + // Audio input device used for this recording session. + final AudioDeviceInfo device = config.getAudioDevice(); + if (device != null) { + assertTrue(device.isSource()); + conf.append(" AudioDevice: ") + .append("type=") + .append(WebRtcAudioUtils.deviceTypeToString(device.getType())) + .append(", id=") + .append(device.getId()); + } + Logging.d(TAG, conf.toString()); + } + return true; + } + + // Verify that the client audio configuration (device and format) matches the requested + // configuration (same as AudioRecord's). + @TargetApi(Build.VERSION_CODES.N) + private static boolean verifyAudioConfig(int source, int session, AudioFormat format, + AudioDeviceInfo device, List<AudioRecordingConfiguration> configs) { + assertTrue(!configs.isEmpty()); + final Iterator<AudioRecordingConfiguration> it = configs.iterator(); + while (it.hasNext()) { + final AudioRecordingConfiguration config = it.next(); + final AudioDeviceInfo configDevice = config.getAudioDevice(); + if (configDevice == null) { + continue; + } + if ((config.getClientAudioSource() == source) + && (config.getClientAudioSessionId() == session) + // Check the client format (should match the format of the AudioRecord instance). + && (config.getClientFormat().getEncoding() == format.getEncoding()) + && (config.getClientFormat().getSampleRate() == format.getSampleRate()) + && (config.getClientFormat().getChannelMask() == format.getChannelMask()) + && (config.getClientFormat().getChannelIndexMask() == format.getChannelIndexMask()) + // Ensure that the device format is properly configured. + && (config.getFormat().getEncoding() != AudioFormat.ENCODING_INVALID) + && (config.getFormat().getSampleRate() > 0) + // For the channel mask, either the position or index-based value must be valid. + && ((config.getFormat().getChannelMask() != AudioFormat.CHANNEL_INVALID) + || (config.getFormat().getChannelIndexMask() != AudioFormat.CHANNEL_INVALID)) + && checkDeviceMatch(configDevice, device)) { + Logging.d(TAG, "verifyAudioConfig: PASS"); + return true; + } + } + Logging.e(TAG, "verifyAudioConfig: FAILED"); + return false; + } + + @TargetApi(Build.VERSION_CODES.N) + // Returns true if device A parameters matches those of device B. + // TODO(henrika): can be improved by adding AudioDeviceInfo#getAddress() but it requires API 29. + private static boolean checkDeviceMatch(AudioDeviceInfo devA, AudioDeviceInfo devB) { + return ((devA.getId() == devB.getId() && (devA.getType() == devB.getType()))); + } + + private static String audioStateToString(int state) { + switch (state) { + case WebRtcAudioRecord.AUDIO_RECORD_START: + return "START"; + case WebRtcAudioRecord.AUDIO_RECORD_STOP: + return "STOP"; + default: + return "INVALID"; + } + } + + private static final AtomicInteger nextSchedulerId = new AtomicInteger(0); + + static ScheduledExecutorService newDefaultScheduler() { + AtomicInteger nextThreadId = new AtomicInteger(0); + return Executors.newScheduledThreadPool(0, new ThreadFactory() { + /** + * Constructs a new {@code Thread} + */ + @Override + public Thread newThread(Runnable r) { + Thread thread = Executors.defaultThreadFactory().newThread(r); + thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s", + nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement())); + return thread; + } + }); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java new file mode 100644 index 0000000000..2b34e34013 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -0,0 +1,585 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.annotation.TargetApi; +import android.content.Context; +import android.media.AudioAttributes; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.os.Build; +import android.os.Process; +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; +import org.webrtc.CalledByNative; +import org.webrtc.Logging; +import org.webrtc.ThreadUtils; +import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; +import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode; +import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; +import org.webrtc.audio.LowLatencyAudioBufferManager; + +class WebRtcAudioTrack { + private static final String TAG = "WebRtcAudioTrackExternal"; + + // Default audio data format is PCM 16 bit per sample. + // Guaranteed to be supported by all devices. + private static final int BITS_PER_SAMPLE = 16; + + // Requested size of each recorded buffer provided to the client. + private static final int CALLBACK_BUFFER_SIZE_MS = 10; + + // Average number of callbacks per second. + private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS; + + // The AudioTrackThread is allowed to wait for successful call to join() + // but the wait times out afther this amount of time. + private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000; + + // By default, WebRTC creates audio tracks with a usage attribute + // corresponding to voice communications, such as telephony or VoIP. + private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION; + + // Indicates the AudioTrack has started playing audio. + private static final int AUDIO_TRACK_START = 0; + + // Indicates the AudioTrack has stopped playing audio. + private static final int AUDIO_TRACK_STOP = 1; + + private long nativeAudioTrack; + private final Context context; + private final AudioManager audioManager; + private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); + + private ByteBuffer byteBuffer; + + private @Nullable final AudioAttributes audioAttributes; + private @Nullable AudioTrack audioTrack; + private @Nullable AudioTrackThread audioThread; + private final VolumeLogger volumeLogger; + + // Samples to be played are replaced by zeros if `speakerMute` is set to true. + // Can be used to ensure that the speaker is fully muted. + private volatile boolean speakerMute; + private byte[] emptyBytes; + private boolean useLowLatency; + private int initialBufferSizeInFrames; + + private final @Nullable AudioTrackErrorCallback errorCallback; + private final @Nullable AudioTrackStateCallback stateCallback; + + /** + * Audio thread which keeps calling AudioTrack.write() to stream audio. + * Data is periodically acquired from the native WebRTC layer using the + * nativeGetPlayoutData callback function. + * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority. + */ + private class AudioTrackThread extends Thread { + private volatile boolean keepAlive = true; + private LowLatencyAudioBufferManager bufferManager; + + public AudioTrackThread(String name) { + super(name); + bufferManager = new LowLatencyAudioBufferManager(); + } + + @Override + public void run() { + Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); + Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); + assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); + + // Audio playout has started and the client is informed about it. + doAudioTrackStateCallback(AUDIO_TRACK_START); + + // Fixed size in bytes of each 10ms block of audio data that we ask for + // using callbacks to the native WebRTC client. + final int sizeInBytes = byteBuffer.capacity(); + + while (keepAlive) { + // Get 10ms of PCM data from the native WebRTC client. Audio data is + // written into the common ByteBuffer using the address that was + // cached at construction. + nativeGetPlayoutData(nativeAudioTrack, sizeInBytes); + // Write data until all data has been written to the audio sink. + // Upon return, the buffer position will have been advanced to reflect + // the amount of data that was successfully written to the AudioTrack. + assertTrue(sizeInBytes <= byteBuffer.remaining()); + if (speakerMute) { + byteBuffer.clear(); + byteBuffer.put(emptyBytes); + byteBuffer.position(0); + } + int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING); + if (bytesWritten != sizeInBytes) { + Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten); + // If a write() returns a negative value, an error has occurred. + // Stop playing and report an error in this case. + if (bytesWritten < 0) { + keepAlive = false; + reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); + } + } + if (useLowLatency) { + bufferManager.maybeAdjustBufferSize(audioTrack); + } + // The byte buffer must be rewinded since byteBuffer.position() is + // increased at each call to AudioTrack.write(). If we don't do this, + // next call to AudioTrack.write() will fail. + byteBuffer.rewind(); + + // TODO(henrika): it is possible to create a delay estimate here by + // counting number of written frames and subtracting the result from + // audioTrack.getPlaybackHeadPosition(). + } + } + + // Stops the inner thread loop which results in calling AudioTrack.stop(). + // Does not block the calling thread. + public void stopThread() { + Logging.d(TAG, "stopThread"); + keepAlive = false; + } + } + + @CalledByNative + WebRtcAudioTrack(Context context, AudioManager audioManager) { + this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, + null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); + } + + WebRtcAudioTrack(Context context, AudioManager audioManager, + @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, + @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency, + boolean enableVolumeLogger) { + threadChecker.detachThread(); + this.context = context; + this.audioManager = audioManager; + this.audioAttributes = audioAttributes; + this.errorCallback = errorCallback; + this.stateCallback = stateCallback; + this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null; + this.useLowLatency = useLowLatency; + Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); + } + + @CalledByNative + public void setNativeAudioTrack(long nativeAudioTrack) { + this.nativeAudioTrack = nativeAudioTrack; + } + + @CalledByNative + private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, + "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + + ", bufferSizeFactor=" + bufferSizeFactor + ")"); + final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); + byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); + Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); + emptyBytes = new byte[byteBuffer.capacity()]; + // Rather than passing the ByteBuffer with every callback (requiring + // the potentially expensive GetDirectBufferAddress) we simply have the + // the native class cache the address to the memory once. + nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer); + + // Get the minimum buffer size required for the successful creation of an + // AudioTrack object to be created in the MODE_STREAM mode. + // Note that this size doesn't guarantee a smooth playback under load. + final int channelConfig = channelCountToConfiguration(channels); + final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig, + AudioFormat.ENCODING_PCM_16BIT) + * bufferSizeFactor); + Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes); + // For the streaming mode, data must be written to the audio sink in + // chunks of size (given by byteBuffer.capacity()) less than or equal + // to the total buffer size `minBufferSizeInBytes`. But, we have seen + // reports of "getMinBufferSize(): error querying hardware". Hence, it + // can happen that `minBufferSizeInBytes` contains an invalid value. + if (minBufferSizeInBytes < byteBuffer.capacity()) { + reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value."); + return -1; + } + + // Don't use low-latency mode when a bufferSizeFactor > 1 is used. When bufferSizeFactor > 1 + // we want to use a larger buffer to prevent underruns. However, low-latency mode would + // decrease the buffer size, which makes the bufferSizeFactor have no effect. + if (bufferSizeFactor > 1.0) { + useLowLatency = false; + } + + // Ensure that prevision audio session was stopped correctly before trying + // to create a new AudioTrack. + if (audioTrack != null) { + reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack."); + return -1; + } + try { + // Create an AudioTrack object and initialize its associated audio buffer. + // The size of this buffer determines how long an AudioTrack can play + // before running out of data. + if (useLowLatency && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + // On API level 26 or higher, we can use a low latency mode. + audioTrack = createAudioTrackOnOreoOrHigher( + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); + } else { + // As we are on API level 21 or higher, it is possible to use a special AudioTrack + // constructor that uses AudioAttributes and AudioFormat as input. It allows us to + // supersede the notion of stream types for defining the behavior of audio playback, + // and to allow certain platforms or routing policies to use this information for more + // refined volume or routing decisions. + audioTrack = createAudioTrackBeforeOreo( + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); + } + } catch (IllegalArgumentException e) { + reportWebRtcAudioTrackInitError(e.getMessage()); + releaseAudioResources(); + return -1; + } + + // It can happen that an AudioTrack is created but it was not successfully + // initialized upon creation. Seems to be the case e.g. when the maximum + // number of globally available audio tracks is exceeded. + if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) { + reportWebRtcAudioTrackInitError("Initialization of audio track failed."); + releaseAudioResources(); + return -1; + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + initialBufferSizeInFrames = audioTrack.getBufferSizeInFrames(); + } else { + initialBufferSizeInFrames = -1; + } + logMainParameters(); + logMainParametersExtended(); + return minBufferSizeInBytes; + } + + @CalledByNative + private boolean startPlayout() { + threadChecker.checkIsOnValidThread(); + if (volumeLogger != null) { + volumeLogger.start(); + } + Logging.d(TAG, "startPlayout"); + assertTrue(audioTrack != null); + assertTrue(audioThread == null); + + // Starts playing an audio track. + try { + audioTrack.play(); + } catch (IllegalStateException e) { + reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION, + "AudioTrack.play failed: " + e.getMessage()); + releaseAudioResources(); + return false; + } + if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { + reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH, + "AudioTrack.play failed - incorrect state :" + audioTrack.getPlayState()); + releaseAudioResources(); + return false; + } + + // Create and start new high-priority thread which calls AudioTrack.write() + // and where we also call the native nativeGetPlayoutData() callback to + // request decoded audio from WebRTC. + audioThread = new AudioTrackThread("AudioTrackJavaThread"); + audioThread.start(); + return true; + } + + @CalledByNative + private boolean stopPlayout() { + threadChecker.checkIsOnValidThread(); + if (volumeLogger != null) { + volumeLogger.stop(); + } + Logging.d(TAG, "stopPlayout"); + assertTrue(audioThread != null); + logUnderrunCount(); + audioThread.stopThread(); + + Logging.d(TAG, "Stopping the AudioTrackThread..."); + audioThread.interrupt(); + if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { + Logging.e(TAG, "Join of AudioTrackThread timed out."); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + } + Logging.d(TAG, "AudioTrackThread has now been stopped."); + audioThread = null; + if (audioTrack != null) { + Logging.d(TAG, "Calling AudioTrack.stop..."); + try { + audioTrack.stop(); + Logging.d(TAG, "AudioTrack.stop is done."); + doAudioTrackStateCallback(AUDIO_TRACK_STOP); + } catch (IllegalStateException e) { + Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage()); + } + } + releaseAudioResources(); + return true; + } + + // Get max possible volume index for a phone call audio stream. + @CalledByNative + private int getStreamMaxVolume() { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, "getStreamMaxVolume"); + return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL); + } + + // Set current volume level for a phone call audio stream. + @CalledByNative + private boolean setStreamVolume(int volume) { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, "setStreamVolume(" + volume + ")"); + if (audioManager.isVolumeFixed()) { + Logging.e(TAG, "The device implements a fixed volume policy."); + return false; + } + audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0); + return true; + } + + /** Get current volume level for a phone call audio stream. */ + @CalledByNative + private int getStreamVolume() { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, "getStreamVolume"); + return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); + } + + @CalledByNative + private int GetPlayoutUnderrunCount() { + if (Build.VERSION.SDK_INT >= 24) { + if (audioTrack != null) { + return audioTrack.getUnderrunCount(); + } else { + return -1; + } + } else { + return -2; + } + } + + private void logMainParameters() { + Logging.d(TAG, + "AudioTrack: " + + "session ID: " + audioTrack.getAudioSessionId() + ", " + + "channels: " + audioTrack.getChannelCount() + ", " + + "sample rate: " + audioTrack.getSampleRate() + + ", " + // Gain (>=1.0) expressed as linear multiplier on sample values. + + "max gain: " + AudioTrack.getMaxVolume()); + } + + private static void logNativeOutputSampleRate(int requestedSampleRateInHz) { + final int nativeOutputSampleRate = + AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL); + Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate); + if (requestedSampleRateInHz != nativeOutputSampleRate) { + Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native"); + } + } + + private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes overrideAttributes) { + AudioAttributes.Builder attributesBuilder = + new AudioAttributes.Builder() + .setUsage(DEFAULT_USAGE) + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH); + + if (overrideAttributes != null) { + if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) { + attributesBuilder.setUsage(overrideAttributes.getUsage()); + } + if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) { + attributesBuilder.setContentType(overrideAttributes.getContentType()); + } + + attributesBuilder.setFlags(overrideAttributes.getFlags()); + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + attributesBuilder = applyAttributesOnQOrHigher(attributesBuilder, overrideAttributes); + } + } + return attributesBuilder.build(); + } + + // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. + // It allows certain platforms or routing policies to use this information for more + // refined volume or routing decisions. + private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig, + int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { + Logging.d(TAG, "createAudioTrackBeforeOreo"); + logNativeOutputSampleRate(sampleRateInHz); + + // Create an audio track where the audio usage is for VoIP and the content type is speech. + return new AudioTrack(getAudioAttributes(overrideAttributes), + new AudioFormat.Builder() + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setSampleRate(sampleRateInHz) + .setChannelMask(channelConfig) + .build(), + bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE); + } + + // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. + // Use the low-latency mode to improve audio latency. Note that the low-latency mode may + // prevent effects (such as AEC) from working. Assuming AEC is working, the delay changes + // that happen in low-latency mode during the call will cause the AEC to perform worse. + // The behavior of the low-latency mode may be device dependent, use at your own risk. + @TargetApi(Build.VERSION_CODES.O) + private static AudioTrack createAudioTrackOnOreoOrHigher(int sampleRateInHz, int channelConfig, + int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { + Logging.d(TAG, "createAudioTrackOnOreoOrHigher"); + logNativeOutputSampleRate(sampleRateInHz); + + // Create an audio track where the audio usage is for VoIP and the content type is speech. + return new AudioTrack.Builder() + .setAudioAttributes(getAudioAttributes(overrideAttributes)) + .setAudioFormat(new AudioFormat.Builder() + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setSampleRate(sampleRateInHz) + .setChannelMask(channelConfig) + .build()) + .setBufferSizeInBytes(bufferSizeInBytes) + .setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY) + .setTransferMode(AudioTrack.MODE_STREAM) + .setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE) + .build(); + } + + @TargetApi(Build.VERSION_CODES.Q) + private static AudioAttributes.Builder applyAttributesOnQOrHigher( + AudioAttributes.Builder builder, AudioAttributes overrideAttributes) { + return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy()); + } + + private void logBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Logging.d(TAG, + "AudioTrack: " + // The effective size of the AudioTrack buffer that the app writes to. + + "buffer size in frames: " + audioTrack.getBufferSizeInFrames()); + } + } + + @CalledByNative + private int getBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return audioTrack.getBufferSizeInFrames(); + } + return -1; + } + + @CalledByNative + private int getInitialBufferSizeInFrames() { + return initialBufferSizeInFrames; + } + + private void logBufferCapacityInFrames() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + Logging.d(TAG, + "AudioTrack: " + // Maximum size of the AudioTrack buffer in frames. + + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames()); + } + } + + private void logMainParametersExtended() { + logBufferSizeInFrames(); + logBufferCapacityInFrames(); + } + + // Prints the number of underrun occurrences in the application-level write + // buffer since the AudioTrack was created. An underrun occurs if the app does + // not write audio data quickly enough, causing the buffer to underflow and a + // potential audio glitch. + // TODO(henrika): keep track of this value in the field and possibly add new + // UMA stat if needed. + private void logUnderrunCount() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount()); + } + } + + // Helper method which throws an exception when an assertion has failed. + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + private int channelCountToConfiguration(int channels) { + return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); + } + + private static native void nativeCacheDirectBufferAddress( + long nativeAudioTrackJni, ByteBuffer byteBuffer); + private static native void nativeGetPlayoutData(long nativeAudioTrackJni, int bytes); + + // Sets all samples to be played out to zero if `mute` is true, i.e., + // ensures that the speaker is muted. + public void setSpeakerMute(boolean mute) { + Logging.w(TAG, "setSpeakerMute(" + mute + ")"); + speakerMute = mute; + } + + // Releases the native AudioTrack resources. + private void releaseAudioResources() { + Logging.d(TAG, "releaseAudioResources"); + if (audioTrack != null) { + audioTrack.release(); + audioTrack = null; + } + } + + private void reportWebRtcAudioTrackInitError(String errorMessage) { + Logging.e(TAG, "Init playout error: " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackInitError(errorMessage); + } + } + + private void reportWebRtcAudioTrackStartError( + AudioTrackStartErrorCode errorCode, String errorMessage) { + Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage); + } + } + + private void reportWebRtcAudioTrackError(String errorMessage) { + Logging.e(TAG, "Run-time playback error: " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackError(errorMessage); + } + } + + private void doAudioTrackStateCallback(int audioState) { + Logging.d(TAG, "doAudioTrackStateCallback: " + audioState); + if (stateCallback != null) { + if (audioState == WebRtcAudioTrack.AUDIO_TRACK_START) { + stateCallback.onWebRtcAudioTrackStart(); + } else if (audioState == WebRtcAudioTrack.AUDIO_TRACK_STOP) { + stateCallback.onWebRtcAudioTrackStop(); + } else { + Logging.e(TAG, "Invalid audio state"); + } + } + } +} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java new file mode 100644 index 0000000000..7b4b809ab1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java @@ -0,0 +1,308 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import static android.media.AudioManager.MODE_IN_CALL; +import static android.media.AudioManager.MODE_IN_COMMUNICATION; +import static android.media.AudioManager.MODE_NORMAL; +import static android.media.AudioManager.MODE_RINGTONE; + +import android.annotation.SuppressLint; +import android.annotation.TargetApi; +import android.content.Context; +import android.content.pm.PackageManager; +import android.media.AudioDeviceInfo; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.MediaRecorder.AudioSource; +import android.os.Build; +import java.lang.Thread; +import java.util.Arrays; +import org.webrtc.Logging; + +final class WebRtcAudioUtils { + private static final String TAG = "WebRtcAudioUtilsExternal"; + + // Helper method for building a string of thread information. + public static String getThreadInfo() { + return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId() + + "]"; + } + + // Returns true if we're running on emulator. + public static boolean runningOnEmulator() { + return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_"); + } + + // Information about the current build, taken from system properties. + static void logDeviceInfo(String tag) { + Logging.d(tag, + "Android SDK: " + Build.VERSION.SDK_INT + ", " + + "Release: " + Build.VERSION.RELEASE + ", " + + "Brand: " + Build.BRAND + ", " + + "Device: " + Build.DEVICE + ", " + + "Id: " + Build.ID + ", " + + "Hardware: " + Build.HARDWARE + ", " + + "Manufacturer: " + Build.MANUFACTURER + ", " + + "Model: " + Build.MODEL + ", " + + "Product: " + Build.PRODUCT); + } + + // Logs information about the current audio state. The idea is to call this + // method when errors are detected to log under what conditions the error + // occurred. Hopefully it will provide clues to what might be the root cause. + static void logAudioState(String tag, Context context, AudioManager audioManager) { + logDeviceInfo(tag); + logAudioStateBasic(tag, context, audioManager); + logAudioStateVolume(tag, audioManager); + logAudioDeviceInfo(tag, audioManager); + } + + // Converts AudioDeviceInfo types to local string representation. + static String deviceTypeToString(int type) { + switch (type) { + case AudioDeviceInfo.TYPE_UNKNOWN: + return "TYPE_UNKNOWN"; + case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE: + return "TYPE_BUILTIN_EARPIECE"; + case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER: + return "TYPE_BUILTIN_SPEAKER"; + case AudioDeviceInfo.TYPE_WIRED_HEADSET: + return "TYPE_WIRED_HEADSET"; + case AudioDeviceInfo.TYPE_WIRED_HEADPHONES: + return "TYPE_WIRED_HEADPHONES"; + case AudioDeviceInfo.TYPE_LINE_ANALOG: + return "TYPE_LINE_ANALOG"; + case AudioDeviceInfo.TYPE_LINE_DIGITAL: + return "TYPE_LINE_DIGITAL"; + case AudioDeviceInfo.TYPE_BLUETOOTH_SCO: + return "TYPE_BLUETOOTH_SCO"; + case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP: + return "TYPE_BLUETOOTH_A2DP"; + case AudioDeviceInfo.TYPE_HDMI: + return "TYPE_HDMI"; + case AudioDeviceInfo.TYPE_HDMI_ARC: + return "TYPE_HDMI_ARC"; + case AudioDeviceInfo.TYPE_USB_DEVICE: + return "TYPE_USB_DEVICE"; + case AudioDeviceInfo.TYPE_USB_ACCESSORY: + return "TYPE_USB_ACCESSORY"; + case AudioDeviceInfo.TYPE_DOCK: + return "TYPE_DOCK"; + case AudioDeviceInfo.TYPE_FM: + return "TYPE_FM"; + case AudioDeviceInfo.TYPE_BUILTIN_MIC: + return "TYPE_BUILTIN_MIC"; + case AudioDeviceInfo.TYPE_FM_TUNER: + return "TYPE_FM_TUNER"; + case AudioDeviceInfo.TYPE_TV_TUNER: + return "TYPE_TV_TUNER"; + case AudioDeviceInfo.TYPE_TELEPHONY: + return "TYPE_TELEPHONY"; + case AudioDeviceInfo.TYPE_AUX_LINE: + return "TYPE_AUX_LINE"; + case AudioDeviceInfo.TYPE_IP: + return "TYPE_IP"; + case AudioDeviceInfo.TYPE_BUS: + return "TYPE_BUS"; + case AudioDeviceInfo.TYPE_USB_HEADSET: + return "TYPE_USB_HEADSET"; + default: + return "TYPE_UNKNOWN"; + } + } + + @TargetApi(Build.VERSION_CODES.N) + public static String audioSourceToString(int source) { + // AudioSource.UNPROCESSED requires API level 29. Use local define instead. + final int VOICE_PERFORMANCE = 10; + switch (source) { + case AudioSource.DEFAULT: + return "DEFAULT"; + case AudioSource.MIC: + return "MIC"; + case AudioSource.VOICE_UPLINK: + return "VOICE_UPLINK"; + case AudioSource.VOICE_DOWNLINK: + return "VOICE_DOWNLINK"; + case AudioSource.VOICE_CALL: + return "VOICE_CALL"; + case AudioSource.CAMCORDER: + return "CAMCORDER"; + case AudioSource.VOICE_RECOGNITION: + return "VOICE_RECOGNITION"; + case AudioSource.VOICE_COMMUNICATION: + return "VOICE_COMMUNICATION"; + case AudioSource.UNPROCESSED: + return "UNPROCESSED"; + case VOICE_PERFORMANCE: + return "VOICE_PERFORMANCE"; + default: + return "INVALID"; + } + } + + public static String channelMaskToString(int mask) { + // For input or AudioRecord, the mask should be AudioFormat#CHANNEL_IN_MONO or + // AudioFormat#CHANNEL_IN_STEREO. AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all + // devices. + switch (mask) { + case AudioFormat.CHANNEL_IN_STEREO: + return "IN_STEREO"; + case AudioFormat.CHANNEL_IN_MONO: + return "IN_MONO"; + default: + return "INVALID"; + } + } + + @TargetApi(Build.VERSION_CODES.N) + public static String audioEncodingToString(int enc) { + switch (enc) { + case AudioFormat.ENCODING_INVALID: + return "INVALID"; + case AudioFormat.ENCODING_PCM_16BIT: + return "PCM_16BIT"; + case AudioFormat.ENCODING_PCM_8BIT: + return "PCM_8BIT"; + case AudioFormat.ENCODING_PCM_FLOAT: + return "PCM_FLOAT"; + case AudioFormat.ENCODING_AC3: + return "AC3"; + case AudioFormat.ENCODING_E_AC3: + return "AC3"; + case AudioFormat.ENCODING_DTS: + return "DTS"; + case AudioFormat.ENCODING_DTS_HD: + return "DTS_HD"; + case AudioFormat.ENCODING_MP3: + return "MP3"; + default: + return "Invalid encoding: " + enc; + } + } + + // Reports basic audio statistics. + private static void logAudioStateBasic(String tag, Context context, AudioManager audioManager) { + Logging.d(tag, + "Audio State: " + + "audio mode: " + modeToString(audioManager.getMode()) + ", " + + "has mic: " + hasMicrophone(context) + ", " + + "mic muted: " + audioManager.isMicrophoneMute() + ", " + + "music active: " + audioManager.isMusicActive() + ", " + + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", " + + "BT SCO: " + audioManager.isBluetoothScoOn()); + } + + // Adds volume information for all possible stream types. + private static void logAudioStateVolume(String tag, AudioManager audioManager) { + final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC, + AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION, + AudioManager.STREAM_SYSTEM}; + Logging.d(tag, "Audio State: "); + // Some devices may not have volume controls and might use a fixed volume. + boolean fixedVolume = audioManager.isVolumeFixed(); + Logging.d(tag, " fixed volume=" + fixedVolume); + if (!fixedVolume) { + for (int stream : streams) { + StringBuilder info = new StringBuilder(); + info.append(" " + streamTypeToString(stream) + ": "); + info.append("volume=").append(audioManager.getStreamVolume(stream)); + info.append(", max=").append(audioManager.getStreamMaxVolume(stream)); + logIsStreamMute(tag, audioManager, stream, info); + Logging.d(tag, info.toString()); + } + } + } + + private static void logIsStreamMute( + String tag, AudioManager audioManager, int stream, StringBuilder info) { + if (Build.VERSION.SDK_INT >= 23) { + info.append(", muted=").append(audioManager.isStreamMute(stream)); + } + } + + // Moz linting complains even though AudioManager.GET_DEVICES_ALL is + // listed in the docs here: + // https://developer.android.com/reference/android/media/AudioManager#GET_DEVICES_ALL + @SuppressLint("WrongConstant") + private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { + if (Build.VERSION.SDK_INT < 23) { + return; + } + final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL); + if (devices.length == 0) { + return; + } + Logging.d(tag, "Audio Devices: "); + for (AudioDeviceInfo device : devices) { + StringBuilder info = new StringBuilder(); + info.append(" ").append(deviceTypeToString(device.getType())); + info.append(device.isSource() ? "(in): " : "(out): "); + // An empty array indicates that the device supports arbitrary channel counts. + if (device.getChannelCounts().length > 0) { + info.append("channels=").append(Arrays.toString(device.getChannelCounts())); + info.append(", "); + } + if (device.getEncodings().length > 0) { + // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4. + info.append("encodings=").append(Arrays.toString(device.getEncodings())); + info.append(", "); + } + if (device.getSampleRates().length > 0) { + info.append("sample rates=").append(Arrays.toString(device.getSampleRates())); + info.append(", "); + } + info.append("id=").append(device.getId()); + Logging.d(tag, info.toString()); + } + } + + // Converts media.AudioManager modes into local string representation. + static String modeToString(int mode) { + switch (mode) { + case MODE_IN_CALL: + return "MODE_IN_CALL"; + case MODE_IN_COMMUNICATION: + return "MODE_IN_COMMUNICATION"; + case MODE_NORMAL: + return "MODE_NORMAL"; + case MODE_RINGTONE: + return "MODE_RINGTONE"; + default: + return "MODE_INVALID"; + } + } + + private static String streamTypeToString(int stream) { + switch (stream) { + case AudioManager.STREAM_VOICE_CALL: + return "STREAM_VOICE_CALL"; + case AudioManager.STREAM_MUSIC: + return "STREAM_MUSIC"; + case AudioManager.STREAM_RING: + return "STREAM_RING"; + case AudioManager.STREAM_ALARM: + return "STREAM_ALARM"; + case AudioManager.STREAM_NOTIFICATION: + return "STREAM_NOTIFICATION"; + case AudioManager.STREAM_SYSTEM: + return "STREAM_SYSTEM"; + default: + return "STREAM_INVALID"; + } + } + + // Returns true if the device can record audio via a microphone. + private static boolean hasMicrophone(Context context) { + return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); + } +} diff --git a/third_party/libwebrtc/sdk/android/src/jni/DEPS b/third_party/libwebrtc/sdk/android/src/jni/DEPS new file mode 100644 index 0000000000..ae33fa6830 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/DEPS @@ -0,0 +1,15 @@ +include_rules = [ + "+third_party/libyuv", + "+call/callfactoryinterface.h", + "+common_video", + "+logging/rtc_event_log/rtc_event_log_factory.h", + "+media/base", + "+media/engine", + "+modules/audio_device/include/audio_device.h", + "+modules/audio_processing/include/audio_processing.h", + "+modules/include", + "+modules/utility/include/jvm_android.h", + "+modules/video_coding", + "+pc", + "+system_wrappers/include", +] diff --git a/third_party/libwebrtc/sdk/android/src/jni/OWNERS b/third_party/libwebrtc/sdk/android/src/jni/OWNERS new file mode 100644 index 0000000000..557373424b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/OWNERS @@ -0,0 +1,4 @@ +per-file androidhistogram.cc=xalep@webrtc.org +per-file androidmetrics.cc=xalep@webrtc.org +per-file androidvideotracksource.*=xalep@webrtc.org +per-file androidvideotracksource.cc=xalep@webrtc.org diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc b/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc new file mode 100644 index 0000000000..498f143743 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc @@ -0,0 +1,50 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <map> +#include <memory> + +#include "sdk/android/generated_base_jni/Histogram_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "system_wrappers/include/metrics.h" + +// Enables collection of native histograms and creating them. +namespace webrtc { +namespace jni { + +static jlong JNI_Histogram_CreateCounts(JNIEnv* jni, + const JavaParamRef<jstring>& j_name, + jint min, + jint max, + jint buckets) { + std::string name = JavaToStdString(jni, j_name); + return jlongFromPointer( + metrics::HistogramFactoryGetCounts(name, min, max, buckets)); +} + +static jlong JNI_Histogram_CreateEnumeration( + JNIEnv* jni, + const JavaParamRef<jstring>& j_name, + jint max) { + std::string name = JavaToStdString(jni, j_name); + return jlongFromPointer(metrics::HistogramFactoryGetEnumeration(name, max)); +} + +static void JNI_Histogram_AddSample(JNIEnv* jni, + jlong histogram, + jint sample) { + if (histogram) { + HistogramAdd(reinterpret_cast<metrics::Histogram*>(histogram), sample); + } +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc b/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc new file mode 100644 index 0000000000..01398cc77f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc @@ -0,0 +1,53 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <map> +#include <memory> + +#include "rtc_base/string_utils.h" +#include "sdk/android/generated_metrics_jni/Metrics_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "system_wrappers/include/metrics.h" + +// Enables collection of native histograms and creating them. +namespace webrtc { +namespace jni { + +static void JNI_Metrics_Enable(JNIEnv* jni) { + metrics::Enable(); +} + +// Gets and clears native histograms. +static ScopedJavaLocalRef<jobject> JNI_Metrics_GetAndReset(JNIEnv* jni) { + ScopedJavaLocalRef<jobject> j_metrics = Java_Metrics_Constructor(jni); + + std::map<std::string, std::unique_ptr<metrics::SampleInfo>, + rtc::AbslStringViewCmp> + histograms; + metrics::GetAndReset(&histograms); + for (const auto& kv : histograms) { + // Create and add samples to `HistogramInfo`. + ScopedJavaLocalRef<jobject> j_info = Java_HistogramInfo_Constructor( + jni, kv.second->min, kv.second->max, + static_cast<int>(kv.second->bucket_count)); + for (const auto& sample : kv.second->samples) { + Java_HistogramInfo_addSample(jni, j_info, sample.first, sample.second); + } + // Add `HistogramInfo` to `Metrics`. + ScopedJavaLocalRef<jstring> j_name = NativeToJavaString(jni, kv.first); + Java_Metrics_add(jni, j_metrics, j_name, j_info); + } + CHECK_EXCEPTION(jni); + return j_metrics; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc new file mode 100644 index 0000000000..539d41487e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc @@ -0,0 +1,686 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/android_network_monitor.h" + +#include <dlfcn.h> + +#include "absl/strings/string_view.h" +#ifndef RTLD_NOLOAD +// This was added in Lollipop to dlfcn.h +#define RTLD_NOLOAD 4 +#endif + +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "sdk/android/generated_base_jni/NetworkChangeDetector_jni.h" +#include "sdk/android/generated_base_jni/NetworkMonitor_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +namespace { + +const char* NetworkTypeToString(NetworkType type) { + switch (type) { + case NETWORK_UNKNOWN: + return "UNKNOWN"; + case NETWORK_ETHERNET: + return "ETHERNET"; + case NETWORK_WIFI: + return "WIFI"; + case NETWORK_5G: + return "5G"; + case NETWORK_4G: + return "4G"; + case NETWORK_3G: + return "3G"; + case NETWORK_2G: + return "2G"; + case NETWORK_UNKNOWN_CELLULAR: + return "UNKNOWN_CELLULAR"; + case NETWORK_BLUETOOTH: + return "BLUETOOTH"; + case NETWORK_VPN: + return "VPN"; + case NETWORK_NONE: + return "NONE"; + } +} + +} // namespace + +enum AndroidSdkVersion { + SDK_VERSION_LOLLIPOP = 21, + SDK_VERSION_MARSHMALLOW = 23 +}; + +static NetworkType GetNetworkTypeFromJava( + JNIEnv* jni, + const JavaRef<jobject>& j_network_type) { + std::string enum_name = GetJavaEnumName(jni, j_network_type); + if (enum_name == "CONNECTION_UNKNOWN") { + return NetworkType::NETWORK_UNKNOWN; + } + if (enum_name == "CONNECTION_ETHERNET") { + return NetworkType::NETWORK_ETHERNET; + } + if (enum_name == "CONNECTION_WIFI") { + return NetworkType::NETWORK_WIFI; + } + if (enum_name == "CONNECTION_5G") { + return NetworkType::NETWORK_5G; + } + if (enum_name == "CONNECTION_4G") { + return NetworkType::NETWORK_4G; + } + if (enum_name == "CONNECTION_3G") { + return NetworkType::NETWORK_3G; + } + if (enum_name == "CONNECTION_2G") { + return NetworkType::NETWORK_2G; + } + if (enum_name == "CONNECTION_UNKNOWN_CELLULAR") { + return NetworkType::NETWORK_UNKNOWN_CELLULAR; + } + if (enum_name == "CONNECTION_BLUETOOTH") { + return NetworkType::NETWORK_BLUETOOTH; + } + if (enum_name == "CONNECTION_VPN") { + return NetworkType::NETWORK_VPN; + } + if (enum_name == "CONNECTION_NONE") { + return NetworkType::NETWORK_NONE; + } + RTC_DCHECK_NOTREACHED(); + return NetworkType::NETWORK_UNKNOWN; +} + +static rtc::AdapterType AdapterTypeFromNetworkType( + NetworkType network_type, + bool surface_cellular_types) { + switch (network_type) { + case NETWORK_UNKNOWN: + return rtc::ADAPTER_TYPE_UNKNOWN; + case NETWORK_ETHERNET: + return rtc::ADAPTER_TYPE_ETHERNET; + case NETWORK_WIFI: + return rtc::ADAPTER_TYPE_WIFI; + case NETWORK_5G: + return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_5G + : rtc::ADAPTER_TYPE_CELLULAR; + case NETWORK_4G: + return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_4G + : rtc::ADAPTER_TYPE_CELLULAR; + case NETWORK_3G: + return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_3G + : rtc::ADAPTER_TYPE_CELLULAR; + case NETWORK_2G: + return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_2G + : rtc::ADAPTER_TYPE_CELLULAR; + case NETWORK_UNKNOWN_CELLULAR: + return rtc::ADAPTER_TYPE_CELLULAR; + case NETWORK_VPN: + return rtc::ADAPTER_TYPE_VPN; + case NETWORK_BLUETOOTH: + // There is no corresponding mapping for bluetooth networks. + // Map it to UNKNOWN for now. + return rtc::ADAPTER_TYPE_UNKNOWN; + case NETWORK_NONE: + return rtc::ADAPTER_TYPE_UNKNOWN; + } + + RTC_DCHECK_NOTREACHED() << "Invalid network type " << network_type; + return rtc::ADAPTER_TYPE_UNKNOWN; +} + +static rtc::IPAddress JavaToNativeIpAddress( + JNIEnv* jni, + const JavaRef<jobject>& j_ip_address) { + std::vector<int8_t> address = + JavaToNativeByteArray(jni, Java_IPAddress_getAddress(jni, j_ip_address)); + size_t address_length = address.size(); + if (address_length == 4) { + // IP4 + struct in_addr ip4_addr; + memcpy(&ip4_addr.s_addr, address.data(), 4); + return rtc::IPAddress(ip4_addr); + } + // IP6 + RTC_CHECK(address_length == 16); + struct in6_addr ip6_addr; + memcpy(ip6_addr.s6_addr, address.data(), address_length); + return rtc::IPAddress(ip6_addr); +} + +static NetworkInformation GetNetworkInformationFromJava( + JNIEnv* jni, + const JavaRef<jobject>& j_network_info) { + NetworkInformation network_info; + network_info.interface_name = JavaToStdString( + jni, Java_NetworkInformation_getName(jni, j_network_info)); + network_info.handle = static_cast<NetworkHandle>( + Java_NetworkInformation_getHandle(jni, j_network_info)); + network_info.type = GetNetworkTypeFromJava( + jni, Java_NetworkInformation_getConnectionType(jni, j_network_info)); + network_info.underlying_type_for_vpn = GetNetworkTypeFromJava( + jni, Java_NetworkInformation_getUnderlyingConnectionTypeForVpn( + jni, j_network_info)); + ScopedJavaLocalRef<jobjectArray> j_ip_addresses = + Java_NetworkInformation_getIpAddresses(jni, j_network_info); + network_info.ip_addresses = JavaToNativeVector<rtc::IPAddress>( + jni, j_ip_addresses, &JavaToNativeIpAddress); + return network_info; +} + +static bool AddressMatch(const rtc::IPAddress& ip1, const rtc::IPAddress& ip2) { + if (ip1.family() != ip2.family()) { + return false; + } + if (ip1.family() == AF_INET) { + return ip1.ipv4_address().s_addr == ip2.ipv4_address().s_addr; + } + if (ip1.family() == AF_INET6) { + // The last 64-bits of an ipv6 address are temporary address and it could + // change over time. So we only compare the first 64-bits. + return memcmp(ip1.ipv6_address().s6_addr, ip2.ipv6_address().s6_addr, + sizeof(in6_addr) / 2) == 0; + } + return false; +} + +NetworkInformation::NetworkInformation() = default; + +NetworkInformation::NetworkInformation(const NetworkInformation&) = default; + +NetworkInformation::NetworkInformation(NetworkInformation&&) = default; + +NetworkInformation::~NetworkInformation() = default; + +NetworkInformation& NetworkInformation::operator=(const NetworkInformation&) = + default; + +NetworkInformation& NetworkInformation::operator=(NetworkInformation&&) = + default; + +std::string NetworkInformation::ToString() const { + rtc::StringBuilder ss; + ss << "NetInfo[name " << interface_name << "; handle " << handle << "; type " + << type; + if (type == NETWORK_VPN) { + ss << "; underlying_type_for_vpn " << underlying_type_for_vpn; + } + ss << "]"; + return ss.Release(); +} + +AndroidNetworkMonitor::AndroidNetworkMonitor( + JNIEnv* env, + const JavaRef<jobject>& j_application_context, + const FieldTrialsView& field_trials) + : android_sdk_int_(Java_NetworkMonitor_androidSdkInt(env)), + j_application_context_(env, j_application_context), + j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)), + network_thread_(rtc::Thread::Current()), + field_trials_(field_trials) {} + +AndroidNetworkMonitor::~AndroidNetworkMonitor() { + RTC_DCHECK(!started_); +} + +void AndroidNetworkMonitor::Start() { + RTC_DCHECK_RUN_ON(network_thread_); + if (started_) { + return; + } + reset(); + started_ = true; + surface_cellular_types_ = + field_trials_.IsEnabled("WebRTC-SurfaceCellularTypes"); + find_network_handle_without_ipv6_temporary_part_ = field_trials_.IsEnabled( + "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart"); + bind_using_ifname_ = + !field_trials_.IsDisabled("WebRTC-BindUsingInterfaceName"); + disable_is_adapter_available_ = field_trials_.IsDisabled( + "WebRTC-AndroidNetworkMonitor-IsAdapterAvailable"); + + // This pointer is also accessed by the methods called from java threads. + // Assigning it here is safe, because the java monitor is in a stopped state, + // and will not make any callbacks. + safety_flag_ = PendingTaskSafetyFlag::Create(); + + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_NetworkMonitor_startMonitoring( + env, j_network_monitor_, j_application_context_, jlongFromPointer(this), + NativeToJavaString( + env, field_trials_.Lookup("WebRTC-NetworkMonitorAutoDetect"))); +} + +void AndroidNetworkMonitor::reset() { + RTC_DCHECK_RUN_ON(network_thread_); + network_handle_by_address_.clear(); + network_handle_by_if_name_.clear(); + network_info_by_handle_.clear(); + network_preference_by_adapter_type_.clear(); +} + +void AndroidNetworkMonitor::Stop() { + RTC_DCHECK_RUN_ON(network_thread_); + if (!started_) { + return; + } + started_ = false; + find_network_handle_without_ipv6_temporary_part_ = false; + + // Cancel any pending tasks. We should not call + // `InvokeNetworksChangedCallback()` when the monitor is stopped. + safety_flag_->SetNotAlive(); + + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_NetworkMonitor_stopMonitoring(env, j_network_monitor_, + jlongFromPointer(this)); + + reset(); +} + +// The implementation is largely taken from UDPSocketPosix::BindToNetwork in +// https://cs.chromium.org/chromium/src/net/udp/udp_socket_posix.cc +rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( + int socket_fd, + const rtc::IPAddress& address, + absl::string_view if_name) { + RTC_DCHECK_RUN_ON(network_thread_); + + // Android prior to Lollipop didn't have support for binding sockets to + // networks. This may also occur if there is no connectivity manager + // service. + JNIEnv* env = AttachCurrentThreadIfNeeded(); + const bool network_binding_supported = + Java_NetworkMonitor_networkBindingSupported(env, j_network_monitor_); + if (!network_binding_supported) { + RTC_LOG(LS_WARNING) + << "BindSocketToNetwork is not supported on this platform " + "(Android SDK: " + << android_sdk_int_ << ")"; + return rtc::NetworkBindingResult::NOT_IMPLEMENTED; + } + + absl::optional<NetworkHandle> network_handle = + FindNetworkHandleFromAddressOrName(address, if_name); + if (!network_handle) { + RTC_LOG(LS_WARNING) + << "BindSocketToNetwork unable to find network handle for" + << " addr: " << address.ToSensitiveString() << " ifname: " << if_name; + return rtc::NetworkBindingResult::ADDRESS_NOT_FOUND; + } + + if (*network_handle == 0 /* NETWORK_UNSPECIFIED */) { + RTC_LOG(LS_WARNING) << "BindSocketToNetwork 0 network handle for" + << " addr: " << address.ToSensitiveString() + << " ifname: " << if_name; + return rtc::NetworkBindingResult::NOT_IMPLEMENTED; + } + + int rv = 0; + if (android_sdk_int_ >= SDK_VERSION_MARSHMALLOW) { + // See declaration of android_setsocknetwork() here: + // http://androidxref.com/6.0.0_r1/xref/development/ndk/platforms/android-M/include/android/multinetwork.h#65 + // Function cannot be called directly as it will cause app to fail to load + // on pre-marshmallow devices. + typedef int (*MarshmallowSetNetworkForSocket)(NetworkHandle net, + int socket); + static MarshmallowSetNetworkForSocket marshmallowSetNetworkForSocket; + // This is not thread-safe, but we are running this only on the worker + // thread. + if (!marshmallowSetNetworkForSocket) { + const std::string android_native_lib_path = "libandroid.so"; + void* lib = dlopen(android_native_lib_path.c_str(), RTLD_NOW); + if (lib == nullptr) { + RTC_LOG(LS_ERROR) << "Library " << android_native_lib_path + << " not found!"; + return rtc::NetworkBindingResult::NOT_IMPLEMENTED; + } + marshmallowSetNetworkForSocket = + reinterpret_cast<MarshmallowSetNetworkForSocket>( + dlsym(lib, "android_setsocknetwork")); + } + if (!marshmallowSetNetworkForSocket) { + RTC_LOG(LS_ERROR) << "Symbol marshmallowSetNetworkForSocket is not found"; + return rtc::NetworkBindingResult::NOT_IMPLEMENTED; + } + rv = marshmallowSetNetworkForSocket(*network_handle, socket_fd); + } else { + // NOTE: This relies on Android implementation details, but it won't + // change because Lollipop is already released. + typedef int (*LollipopSetNetworkForSocket)(unsigned net, int socket); + static LollipopSetNetworkForSocket lollipopSetNetworkForSocket; + // This is not threadsafe, but we are running this only on the worker + // thread. + if (!lollipopSetNetworkForSocket) { + // Android's netd client library should always be loaded in our address + // space as it shims libc functions like connect(). + const std::string net_library_path = "libnetd_client.so"; + // Use RTLD_NOW to match Android's prior loading of the library: + // http://androidxref.com/6.0.0_r5/xref/bionic/libc/bionic/NetdClient.cpp#37 + // Use RTLD_NOLOAD to assert that the library is already loaded and + // avoid doing any disk IO. + void* lib = dlopen(net_library_path.c_str(), RTLD_NOW | RTLD_NOLOAD); + if (lib == nullptr) { + RTC_LOG(LS_ERROR) << "Library " << net_library_path << " not found!"; + return rtc::NetworkBindingResult::NOT_IMPLEMENTED; + } + lollipopSetNetworkForSocket = + reinterpret_cast<LollipopSetNetworkForSocket>( + dlsym(lib, "setNetworkForSocket")); + } + if (!lollipopSetNetworkForSocket) { + RTC_LOG(LS_ERROR) << "Symbol lollipopSetNetworkForSocket is not found "; + return rtc::NetworkBindingResult::NOT_IMPLEMENTED; + } + rv = lollipopSetNetworkForSocket(*network_handle, socket_fd); + } + + // If `network` has since disconnected, `rv` will be ENONET. Surface this as + // ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back + // the less descriptive ERR_FAILED. + if (rv == 0) { + RTC_LOG(LS_VERBOSE) << "BindSocketToNetwork bound network handle for" + << " addr: " << address.ToSensitiveString() + << " ifname: " << if_name; + return rtc::NetworkBindingResult::SUCCESS; + } + + RTC_LOG(LS_WARNING) << "BindSocketToNetwork got error: " << rv + << " addr: " << address.ToSensitiveString() + << " ifname: " << if_name; + if (rv == ENONET) { + return rtc::NetworkBindingResult::NETWORK_CHANGED; + } + + return rtc::NetworkBindingResult::FAILURE; +} + +void AndroidNetworkMonitor::OnNetworkConnected_n( + const NetworkInformation& network_info) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << "Network connected: " << network_info.ToString(); + network_info_by_handle_[network_info.handle] = network_info; + for (const rtc::IPAddress& address : network_info.ip_addresses) { + network_handle_by_address_[address] = network_info.handle; + } + network_handle_by_if_name_[network_info.interface_name] = network_info.handle; + RTC_CHECK(network_info_by_handle_.size() >= + network_handle_by_if_name_.size()); + InvokeNetworksChangedCallback(); +} + +absl::optional<NetworkHandle> +AndroidNetworkMonitor::FindNetworkHandleFromAddressOrName( + const rtc::IPAddress& ip_address, + absl::string_view if_name) const { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << "Find network handle."; + if (find_network_handle_without_ipv6_temporary_part_) { + for (auto const& iter : network_info_by_handle_) { + const std::vector<rtc::IPAddress>& addresses = iter.second.ip_addresses; + auto address_it = std::find_if(addresses.begin(), addresses.end(), + [ip_address](rtc::IPAddress address) { + return AddressMatch(ip_address, address); + }); + if (address_it != addresses.end()) { + return absl::make_optional(iter.first); + } + } + } else { + auto iter = network_handle_by_address_.find(ip_address); + if (iter != network_handle_by_address_.end()) { + return absl::make_optional(iter->second); + } + } + + return FindNetworkHandleFromIfname(if_name); +} + +absl::optional<NetworkHandle> +AndroidNetworkMonitor::FindNetworkHandleFromIfname( + absl::string_view if_name) const { + RTC_DCHECK_RUN_ON(network_thread_); + + auto iter = network_handle_by_if_name_.find(if_name); + if (iter != network_handle_by_if_name_.end()) { + return iter->second; + } + + if (bind_using_ifname_) { + for (auto const& iter : network_handle_by_if_name_) { + // Use substring match so that e.g if_name="v4-wlan0" is matched + // agains iter="wlan0" + if (if_name.find(iter.first) != absl::string_view::npos) { + return absl::make_optional(iter.second); + } + } + } + + return absl::nullopt; +} + +void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << "Network disconnected for handle " << handle; + auto iter = network_info_by_handle_.find(handle); + if (iter == network_info_by_handle_.end()) { + return; + } + + for (const rtc::IPAddress& address : iter->second.ip_addresses) { + network_handle_by_address_.erase(address); + } + + // We've discovered that the if_name is not always unique, + // i.e it can be several network conencted with same if_name. + // + // This is handled the following way, + // 1) OnNetworkConnected_n overwrites any previous "owner" of an interface + // name ("owner" == entry in network_handle_by_if_name_). + // 2) OnNetworkDisconnected_n, we scan and see if there are any remaining + // connected network with the interface name, and set it as owner. + // + // This means that network_info_by_handle can have more entries than + // network_handle_by_if_name_. + + // Check if we are registered as "owner" of if_name. + const auto& if_name = iter->second.interface_name; + auto iter2 = network_handle_by_if_name_.find(if_name); + RTC_DCHECK(iter2 != network_handle_by_if_name_.end()); + if (iter2 != network_handle_by_if_name_.end() && iter2->second == handle) { + // We are owner... + // Check if there is someone else we can set as owner. + bool found = false; + for (const auto& info : network_info_by_handle_) { + if (info.first == handle) { + continue; + } + if (info.second.interface_name == if_name) { + found = true; + network_handle_by_if_name_[if_name] = info.first; + break; + } + } + if (!found) { + // No new owner... + network_handle_by_if_name_.erase(iter2); + } + } else { + // We are not owner...don't do anything. +#if RTC_DCHECK_IS_ON + auto owner_handle = FindNetworkHandleFromIfname(if_name); + RTC_DCHECK(owner_handle && *owner_handle != handle); +#endif + } + + network_info_by_handle_.erase(iter); +} + +void AndroidNetworkMonitor::OnNetworkPreference_n( + NetworkType type, + rtc::NetworkPreference preference) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << "Android network monitor preference for " + << NetworkTypeToString(type) << " changed to " + << rtc::NetworkPreferenceToString(preference); + auto adapter_type = AdapterTypeFromNetworkType(type, surface_cellular_types_); + network_preference_by_adapter_type_[adapter_type] = preference; + InvokeNetworksChangedCallback(); +} + +void AndroidNetworkMonitor::SetNetworkInfos( + const std::vector<NetworkInformation>& network_infos) { + RTC_DCHECK_RUN_ON(network_thread_); + + // We expect this method to be called once directly after startMonitoring. + // All the caches should be empty. + RTC_DCHECK(network_handle_by_if_name_.empty()); + RTC_DCHECK(network_handle_by_address_.empty()); + RTC_DCHECK(network_info_by_handle_.empty()); + RTC_DCHECK(network_preference_by_adapter_type_.empty()); + + // ...but reset just in case. + reset(); + RTC_LOG(LS_INFO) << "Android network monitor found " << network_infos.size() + << " networks"; + for (const NetworkInformation& network : network_infos) { + OnNetworkConnected_n(network); + } +} + +rtc::NetworkMonitorInterface::InterfaceInfo +AndroidNetworkMonitor::GetInterfaceInfo(absl::string_view if_name) { + RTC_DCHECK_RUN_ON(network_thread_); + auto handle = FindNetworkHandleFromIfname(if_name); + if (!handle) { + return { + .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN, + .available = (disable_is_adapter_available_ ? true : false), + }; + } + auto iter = network_info_by_handle_.find(*handle); + RTC_DCHECK(iter != network_info_by_handle_.end()); + if (iter == network_info_by_handle_.end()) { + return { + .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN, + .available = (disable_is_adapter_available_ ? true : false), + }; + } + + auto type = + AdapterTypeFromNetworkType(iter->second.type, surface_cellular_types_); + auto vpn_type = + (type == rtc::ADAPTER_TYPE_VPN) + ? AdapterTypeFromNetworkType(iter->second.underlying_type_for_vpn, + surface_cellular_types_) + : rtc::ADAPTER_TYPE_UNKNOWN; + return { + .adapter_type = type, + .underlying_type_for_vpn = vpn_type, + .network_preference = GetNetworkPreference(type), + .available = true, + }; +} + +rtc::NetworkPreference AndroidNetworkMonitor::GetNetworkPreference( + rtc::AdapterType adapter_type) const { + RTC_DCHECK_RUN_ON(network_thread_); + auto preference_iter = network_preference_by_adapter_type_.find(adapter_type); + if (preference_iter == network_preference_by_adapter_type_.end()) { + return rtc::NetworkPreference::NEUTRAL; + } + + return preference_iter->second; +} + +AndroidNetworkMonitorFactory::AndroidNetworkMonitorFactory() + : j_application_context_(nullptr) {} + +AndroidNetworkMonitorFactory::AndroidNetworkMonitorFactory( + JNIEnv* env, + const JavaRef<jobject>& j_application_context) + : j_application_context_(env, j_application_context) {} + +AndroidNetworkMonitorFactory::~AndroidNetworkMonitorFactory() = default; + +rtc::NetworkMonitorInterface* +AndroidNetworkMonitorFactory::CreateNetworkMonitor( + const FieldTrialsView& field_trials) { + return new AndroidNetworkMonitor(AttachCurrentThreadIfNeeded(), + j_application_context_, field_trials); +} + +void AndroidNetworkMonitor::NotifyConnectionTypeChanged( + JNIEnv* env, + const JavaRef<jobject>& j_caller) { + network_thread_->PostTask(SafeTask(safety_flag_, [this] { + RTC_LOG(LS_INFO) + << "Android network monitor detected connection type change."; + InvokeNetworksChangedCallback(); + })); +} + +void AndroidNetworkMonitor::NotifyOfActiveNetworkList( + JNIEnv* env, + const JavaRef<jobject>& j_caller, + const JavaRef<jobjectArray>& j_network_infos) { + std::vector<NetworkInformation> network_infos = + JavaToNativeVector<NetworkInformation>(env, j_network_infos, + &GetNetworkInformationFromJava); + SetNetworkInfos(network_infos); +} + +void AndroidNetworkMonitor::NotifyOfNetworkConnect( + JNIEnv* env, + const JavaRef<jobject>& j_caller, + const JavaRef<jobject>& j_network_info) { + NetworkInformation network_info = + GetNetworkInformationFromJava(env, j_network_info); + network_thread_->PostTask( + SafeTask(safety_flag_, [this, network_info = std::move(network_info)] { + OnNetworkConnected_n(network_info); + })); +} + +void AndroidNetworkMonitor::NotifyOfNetworkDisconnect( + JNIEnv* env, + const JavaRef<jobject>& j_caller, + jlong network_handle) { + network_thread_->PostTask(SafeTask(safety_flag_, [this, network_handle] { + OnNetworkDisconnected_n(static_cast<NetworkHandle>(network_handle)); + })); +} + +void AndroidNetworkMonitor::NotifyOfNetworkPreference( + JNIEnv* env, + const JavaRef<jobject>& j_caller, + const JavaRef<jobject>& j_connection_type, + jint jpreference) { + NetworkType type = GetNetworkTypeFromJava(env, j_connection_type); + rtc::NetworkPreference preference = + static_cast<rtc::NetworkPreference>(jpreference); + + network_thread_->PostTask(SafeTask(safety_flag_, [this, type, preference] { + OnNetworkPreference_n(type, preference); + })); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h new file mode 100644 index 0000000000..d0aad5ea76 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h @@ -0,0 +1,198 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_ +#define SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_ + +#include <stdint.h> + +#include <map> +#include <string> +#include <vector> + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "rtc_base/network_monitor.h" +#include "rtc_base/network_monitor_factory.h" +#include "rtc_base/string_utils.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace test { +class AndroidNetworkMonitorTest; +} // namespace test + +namespace jni { + +typedef int64_t NetworkHandle; + +// c++ equivalent of java NetworkChangeDetector.ConnectionType. +enum NetworkType { + NETWORK_UNKNOWN, + NETWORK_ETHERNET, + NETWORK_WIFI, + NETWORK_5G, + NETWORK_4G, + NETWORK_3G, + NETWORK_2G, + NETWORK_UNKNOWN_CELLULAR, + NETWORK_BLUETOOTH, + NETWORK_VPN, + NETWORK_NONE +}; + +// The information is collected from Android OS so that the native code can get +// the network type and handle (Android network ID) for each interface. +struct NetworkInformation { + std::string interface_name; + NetworkHandle handle; + NetworkType type; + NetworkType underlying_type_for_vpn; + std::vector<rtc::IPAddress> ip_addresses; + + NetworkInformation(); + NetworkInformation(const NetworkInformation&); + NetworkInformation(NetworkInformation&&); + ~NetworkInformation(); + NetworkInformation& operator=(const NetworkInformation&); + NetworkInformation& operator=(NetworkInformation&&); + + std::string ToString() const; +}; + +class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface { + public: + AndroidNetworkMonitor(JNIEnv* env, + const JavaRef<jobject>& j_application_context, + const FieldTrialsView& field_trials); + ~AndroidNetworkMonitor() override; + + // TODO(sakal): Remove once down stream dependencies have been updated. + static void SetAndroidContext(JNIEnv* jni, jobject context) {} + + void Start() override; + void Stop() override; + + // Does `this` NetworkMonitorInterface implement BindSocketToNetwork? + // Only Android returns true. + virtual bool SupportsBindSocketToNetwork() const override { return true; } + + rtc::NetworkBindingResult BindSocketToNetwork( + int socket_fd, + const rtc::IPAddress& address, + absl::string_view if_name) override; + + InterfaceInfo GetInterfaceInfo(absl::string_view if_name) override; + + // Always expected to be called on the network thread. + void SetNetworkInfos(const std::vector<NetworkInformation>& network_infos); + + void NotifyConnectionTypeChanged(JNIEnv* env, + const JavaRef<jobject>& j_caller); + void NotifyOfNetworkConnect(JNIEnv* env, + const JavaRef<jobject>& j_caller, + const JavaRef<jobject>& j_network_info); + void NotifyOfNetworkDisconnect(JNIEnv* env, + const JavaRef<jobject>& j_caller, + jlong network_handle); + void NotifyOfActiveNetworkList(JNIEnv* env, + const JavaRef<jobject>& j_caller, + const JavaRef<jobjectArray>& j_network_infos); + void NotifyOfNetworkPreference(JNIEnv* env, + const JavaRef<jobject>& j_caller, + const JavaRef<jobject>& j_connection_type, + jint preference); + + // Visible for testing. + absl::optional<NetworkHandle> FindNetworkHandleFromAddressOrName( + const rtc::IPAddress& address, + absl::string_view ifname) const; + + private: + void reset(); + void OnNetworkConnected_n(const NetworkInformation& network_info); + void OnNetworkDisconnected_n(NetworkHandle network_handle); + void OnNetworkPreference_n(NetworkType type, + rtc::NetworkPreference preference); + + rtc::NetworkPreference GetNetworkPreference(rtc::AdapterType) const; + absl::optional<NetworkHandle> FindNetworkHandleFromIfname( + absl::string_view ifname) const; + + const int android_sdk_int_; + ScopedJavaGlobalRef<jobject> j_application_context_; + ScopedJavaGlobalRef<jobject> j_network_monitor_; + rtc::Thread* const network_thread_; + bool started_ RTC_GUARDED_BY(network_thread_) = false; + std::map<std::string, NetworkHandle, rtc::AbslStringViewCmp> + network_handle_by_if_name_ RTC_GUARDED_BY(network_thread_); + std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_ + RTC_GUARDED_BY(network_thread_); + std::map<NetworkHandle, NetworkInformation> network_info_by_handle_ + RTC_GUARDED_BY(network_thread_); + std::map<rtc::AdapterType, rtc::NetworkPreference> + network_preference_by_adapter_type_ RTC_GUARDED_BY(network_thread_); + bool find_network_handle_without_ipv6_temporary_part_ + RTC_GUARDED_BY(network_thread_) = false; + bool surface_cellular_types_ RTC_GUARDED_BY(network_thread_) = false; + + // NOTE: if bind_using_ifname_ is TRUE + // then the adapter name is used with substring matching as follows: + // An adapater name repored by android as 'wlan0' + // will be matched with 'v4-wlan0' ("v4-wlan0".find("wlan0") != npos). + // This applies to adapter_type_by_name_, vpn_underlying_adapter_type_by_name_ + // and FindNetworkHandleFromIfname. + bool bind_using_ifname_ RTC_GUARDED_BY(network_thread_) = true; + + // NOTE: disable_is_adapter_available_ is a kill switch for the impl. + // of IsAdapterAvailable(). + bool disable_is_adapter_available_ RTC_GUARDED_BY(network_thread_) = false; + + rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag_ + RTC_PT_GUARDED_BY(network_thread_) = nullptr; + + const FieldTrialsView& field_trials_; + + friend class webrtc::test::AndroidNetworkMonitorTest; +}; + +class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory { + public: + // Deprecated. Pass in application context to this class. + AndroidNetworkMonitorFactory(); + + AndroidNetworkMonitorFactory(JNIEnv* env, + const JavaRef<jobject>& j_application_context); + + ~AndroidNetworkMonitorFactory() override; + + rtc::NetworkMonitorInterface* CreateNetworkMonitor( + const FieldTrialsView& field_trials) override; + + private: + ScopedJavaGlobalRef<jobject> j_application_context_; +}; + +} // namespace jni +} // namespace webrtc + +// TODO(magjed): Remove once external clients are updated. +namespace webrtc_jni { + +using webrtc::jni::AndroidNetworkMonitor; +using webrtc::jni::AndroidNetworkMonitorFactory; + +} // namespace webrtc_jni + +#endif // SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc new file mode 100644 index 0000000000..4f3152dc6f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/android_video_track_source.h" + +#include <utility> + +#include "rtc_base/logging.h" +#include "sdk/android/generated_video_jni/NativeAndroidVideoTrackSource_jni.h" +#include "sdk/android/src/jni/video_frame.h" + +namespace webrtc { +namespace jni { + +namespace { +// MediaCodec wants resolution to be divisible by 2. +const int kRequiredResolutionAlignment = 2; + +VideoRotation jintToVideoRotation(jint rotation) { + RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || + rotation == 270); + return static_cast<VideoRotation>(rotation); +} + +absl::optional<std::pair<int, int>> OptionalAspectRatio(jint j_width, + jint j_height) { + if (j_width > 0 && j_height > 0) + return std::pair<int, int>(j_width, j_height); + return absl::nullopt; +} + +} // namespace + +AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, + JNIEnv* jni, + bool is_screencast, + bool align_timestamps) + : AdaptedVideoTrackSource(kRequiredResolutionAlignment), + signaling_thread_(signaling_thread), + is_screencast_(is_screencast), + align_timestamps_(align_timestamps) { + RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; +} +AndroidVideoTrackSource::~AndroidVideoTrackSource() = default; + +bool AndroidVideoTrackSource::is_screencast() const { + return is_screencast_.load(); +} + +absl::optional<bool> AndroidVideoTrackSource::needs_denoising() const { + return false; +} + +void AndroidVideoTrackSource::SetState(JNIEnv* env, + jboolean j_is_live) { + const SourceState state = j_is_live ? kLive : kEnded; + if (state_.exchange(state) != state) { + if (rtc::Thread::Current() == signaling_thread_) { + FireOnChanged(); + } else { + // TODO(sakal): Is this even necessary, does FireOnChanged have to be + // called from signaling thread? + signaling_thread_->PostTask([this] { FireOnChanged(); }); + } + } +} + +AndroidVideoTrackSource::SourceState AndroidVideoTrackSource::state() const { + return state_.load(); +} + +bool AndroidVideoTrackSource::remote() const { + return false; +} + +void AndroidVideoTrackSource::SetIsScreencast(JNIEnv* env, + jboolean j_is_screencast) { + is_screencast_.store(j_is_screencast); +} + +ScopedJavaLocalRef<jobject> AndroidVideoTrackSource::AdaptFrame( + JNIEnv* env, + jint j_width, + jint j_height, + jint j_rotation, + jlong j_timestamp_ns) { + const VideoRotation rotation = jintToVideoRotation(j_rotation); + + const int64_t camera_time_us = j_timestamp_ns / rtc::kNumNanosecsPerMicrosec; + const int64_t aligned_timestamp_ns = + align_timestamps_ ? rtc::kNumNanosecsPerMicrosec * + timestamp_aligner_.TranslateTimestamp( + camera_time_us, rtc::TimeMicros()) + : j_timestamp_ns; + + int adapted_width = 0; + int adapted_height = 0; + int crop_width = 0; + int crop_height = 0; + int crop_x = 0; + int crop_y = 0; + bool drop; + + // TODO(magjed): Move this logic to users of NativeAndroidVideoTrackSource + // instead, in order to keep this native wrapping layer as thin as possible. + if (rotation % 180 == 0) { + drop = !rtc::AdaptedVideoTrackSource::AdaptFrame( + j_width, j_height, camera_time_us, &adapted_width, &adapted_height, + &crop_width, &crop_height, &crop_x, &crop_y); + } else { + // Swap all width/height and x/y. + drop = !rtc::AdaptedVideoTrackSource::AdaptFrame( + j_height, j_width, camera_time_us, &adapted_height, &adapted_width, + &crop_height, &crop_width, &crop_y, &crop_x); + } + + return Java_NativeAndroidVideoTrackSource_createFrameAdaptationParameters( + env, crop_x, crop_y, crop_width, crop_height, adapted_width, + adapted_height, aligned_timestamp_ns, drop); +} + +void AndroidVideoTrackSource::OnFrameCaptured( + JNIEnv* env, + jint j_rotation, + jlong j_timestamp_ns, + const JavaRef<jobject>& j_video_frame_buffer) { + rtc::scoped_refptr<VideoFrameBuffer> buffer = + JavaToNativeFrameBuffer(env, j_video_frame_buffer); + const VideoRotation rotation = jintToVideoRotation(j_rotation); + + // AdaptedVideoTrackSource handles applying rotation for I420 frames. + if (apply_rotation() && rotation != kVideoRotation_0) + buffer = buffer->ToI420(); + + OnFrame(VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_rotation(rotation) + .set_timestamp_us(j_timestamp_ns / rtc::kNumNanosecsPerMicrosec) + .build()); +} + +void AndroidVideoTrackSource::AdaptOutputFormat( + JNIEnv* env, + jint j_landscape_width, + jint j_landscape_height, + const JavaRef<jobject>& j_max_landscape_pixel_count, + jint j_portrait_width, + jint j_portrait_height, + const JavaRef<jobject>& j_max_portrait_pixel_count, + const JavaRef<jobject>& j_max_fps) { + video_adapter()->OnOutputFormatRequest( + OptionalAspectRatio(j_landscape_width, j_landscape_height), + JavaToNativeOptionalInt(env, j_max_landscape_pixel_count), + OptionalAspectRatio(j_portrait_width, j_portrait_height), + JavaToNativeOptionalInt(env, j_max_portrait_pixel_count), + JavaToNativeOptionalInt(env, j_max_fps)); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h new file mode 100644 index 0000000000..625633b90b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_ +#define API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_ + +#include <jni.h> + +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "media/base/adapted_video_track_source.h" +#include "rtc_base/checks.h" +#include "rtc_base/thread.h" +#include "rtc_base/timestamp_aligner.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// This class needs to be used in conjunction with the Java corresponding class +// NativeAndroidVideoTrackSource. This class is thred safe and methods can be +// called from any thread, but if frames A, B, ..., are sent to adaptFrame(), +// the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same +// order to onFrameCaptured(). +class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource { + public: + AndroidVideoTrackSource(rtc::Thread* signaling_thread, + JNIEnv* jni, + bool is_screencast, + bool align_timestamps); + ~AndroidVideoTrackSource() override; + + bool is_screencast() const override; + + // Indicates that the encoder should denoise video before encoding it. + // If it is not set, the default configuration is used which is different + // depending on video codec. + absl::optional<bool> needs_denoising() const override; + + void SetState(SourceState state); + + SourceState state() const override; + + bool remote() const override; + + // This function should be called before delivering any frame to determine if + // the frame should be dropped or what the cropping and scaling parameters + // should be. This function is thread safe and can be called from any thread. + // This function returns + // NativeAndroidVideoTrackSource.FrameAdaptationParameters, or null if the + // frame should be dropped. + ScopedJavaLocalRef<jobject> AdaptFrame(JNIEnv* env, + jint j_width, + jint j_height, + jint j_rotation, + jlong j_timestamp_ns); + + // This function converts and passes the frame on to the rest of the C++ + // WebRTC layer. Note that GetFrameAdaptationParameters() is expected to be + // called first and that the delivered frame conforms to those parameters. + // This function is thread safe and can be called from any thread. + void OnFrameCaptured(JNIEnv* env, + jint j_rotation, + jlong j_timestamp_ns, + const JavaRef<jobject>& j_video_frame_buffer); + + void SetState(JNIEnv* env, + jboolean j_is_live); + + void AdaptOutputFormat(JNIEnv* env, + jint j_landscape_width, + jint j_landscape_height, + const JavaRef<jobject>& j_max_landscape_pixel_count, + jint j_portrait_width, + jint j_portrait_height, + const JavaRef<jobject>& j_max_portrait_pixel_count, + const JavaRef<jobject>& j_max_fps); + + void SetIsScreencast(JNIEnv* env, jboolean j_is_screencast); + + private: + rtc::Thread* signaling_thread_; + std::atomic<SourceState> state_; + std::atomic<bool> is_screencast_; + rtc::TimestampAligner timestamp_aligner_; + const bool align_timestamps_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS b/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS new file mode 100644 index 0000000000..9a3adee687 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS @@ -0,0 +1,4 @@ +include_rules = [ + "+base/android/jni_android.h", + "+modules/audio_device", +] diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS b/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS new file mode 100644 index 0000000000..95662c195c --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS @@ -0,0 +1 @@ +henrika@webrtc.org diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc new file mode 100644 index 0000000000..ae8fcb9613 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc @@ -0,0 +1,247 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/aaudio_player.h" + +#include <memory> + +#include "api/array_view.h" +#include "modules/audio_device/fine_audio_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace jni { + +enum AudioDeviceMessageType : uint32_t { + kMessageOutputStreamDisconnected, +}; + +AAudioPlayer::AAudioPlayer(const AudioParameters& audio_parameters) + : main_thread_(rtc::Thread::Current()), + aaudio_(audio_parameters, AAUDIO_DIRECTION_OUTPUT, this) { + RTC_LOG(LS_INFO) << "ctor"; + thread_checker_aaudio_.Detach(); +} + +AAudioPlayer::~AAudioPlayer() { + RTC_LOG(LS_INFO) << "dtor"; + RTC_DCHECK_RUN_ON(&main_thread_checker_); + Terminate(); + RTC_LOG(LS_INFO) << "#detected underruns: " << underrun_count_; +} + +int AAudioPlayer::Init() { + RTC_LOG(LS_INFO) << "Init"; + RTC_DCHECK_RUN_ON(&main_thread_checker_); + if (aaudio_.audio_parameters().channels() == 2) { + RTC_DLOG(LS_WARNING) << "Stereo mode is enabled"; + } + return 0; +} + +int AAudioPlayer::Terminate() { + RTC_LOG(LS_INFO) << "Terminate"; + RTC_DCHECK_RUN_ON(&main_thread_checker_); + StopPlayout(); + return 0; +} + +int AAudioPlayer::InitPlayout() { + RTC_LOG(LS_INFO) << "InitPlayout"; + RTC_DCHECK_RUN_ON(&main_thread_checker_); + RTC_DCHECK(!initialized_); + RTC_DCHECK(!playing_); + if (!aaudio_.Init()) { + return -1; + } + initialized_ = true; + return 0; +} + +bool AAudioPlayer::PlayoutIsInitialized() const { + RTC_DCHECK_RUN_ON(&main_thread_checker_); + return initialized_; +} + +int AAudioPlayer::StartPlayout() { + RTC_LOG(LS_INFO) << "StartPlayout"; + RTC_DCHECK_RUN_ON(&main_thread_checker_); + RTC_DCHECK(!playing_); + if (!initialized_) { + RTC_DLOG(LS_WARNING) + << "Playout can not start since InitPlayout must succeed first"; + return 0; + } + if (fine_audio_buffer_) { + fine_audio_buffer_->ResetPlayout(); + } + if (!aaudio_.Start()) { + return -1; + } + underrun_count_ = aaudio_.xrun_count(); + first_data_callback_ = true; + playing_ = true; + return 0; +} + +int AAudioPlayer::StopPlayout() { + RTC_LOG(LS_INFO) << "StopPlayout"; + RTC_DCHECK_RUN_ON(&main_thread_checker_); + if (!initialized_ || !playing_) { + return 0; + } + if (!aaudio_.Stop()) { + RTC_LOG(LS_ERROR) << "StopPlayout failed"; + return -1; + } + thread_checker_aaudio_.Detach(); + initialized_ = false; + playing_ = false; + return 0; +} + +bool AAudioPlayer::Playing() const { + RTC_DCHECK_RUN_ON(&main_thread_checker_); + return playing_; +} + +void AAudioPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { + RTC_DLOG(LS_INFO) << "AttachAudioBuffer"; + RTC_DCHECK_RUN_ON(&main_thread_checker_); + audio_device_buffer_ = audioBuffer; + const AudioParameters audio_parameters = aaudio_.audio_parameters(); + audio_device_buffer_->SetPlayoutSampleRate(audio_parameters.sample_rate()); + audio_device_buffer_->SetPlayoutChannels(audio_parameters.channels()); + RTC_CHECK(audio_device_buffer_); + // Create a modified audio buffer class which allows us to ask for any number + // of samples (and not only multiple of 10ms) to match the optimal buffer + // size per callback used by AAudio. + fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_); +} + +bool AAudioPlayer::SpeakerVolumeIsAvailable() { + return false; +} + +int AAudioPlayer::SetSpeakerVolume(uint32_t volume) { + return -1; +} + +absl::optional<uint32_t> AAudioPlayer::SpeakerVolume() const { + return absl::nullopt; +} + +absl::optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const { + return absl::nullopt; +} + +absl::optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const { + return absl::nullopt; +} + +void AAudioPlayer::OnErrorCallback(aaudio_result_t error) { + RTC_LOG(LS_ERROR) << "OnErrorCallback: " << AAudio_convertResultToText(error); + // TODO(henrika): investigate if we can use a thread checker here. Initial + // tests shows that this callback can sometimes be called on a unique thread + // but according to the documentation it should be on the same thread as the + // data callback. + // RTC_DCHECK_RUN_ON(&thread_checker_aaudio_); + if (aaudio_.stream_state() == AAUDIO_STREAM_STATE_DISCONNECTED) { + // The stream is disconnected and any attempt to use it will return + // AAUDIO_ERROR_DISCONNECTED. + RTC_LOG(LS_WARNING) << "Output stream disconnected"; + // AAudio documentation states: "You should not close or reopen the stream + // from the callback, use another thread instead". A message is therefore + // sent to the main thread to do the restart operation. + RTC_DCHECK(main_thread_); + main_thread_->Post(RTC_FROM_HERE, this, kMessageOutputStreamDisconnected); + } +} + +aaudio_data_callback_result_t AAudioPlayer::OnDataCallback(void* audio_data, + int32_t num_frames) { + RTC_DCHECK_RUN_ON(&thread_checker_aaudio_); + // Log device id in first data callback to ensure that a valid device is + // utilized. + if (first_data_callback_) { + RTC_LOG(LS_INFO) << "--- First output data callback: " + "device id=" + << aaudio_.device_id(); + first_data_callback_ = false; + } + + // Check if the underrun count has increased. If it has, increase the buffer + // size by adding the size of a burst. It will reduce the risk of underruns + // at the expense of an increased latency. + // TODO(henrika): enable possibility to disable and/or tune the algorithm. + const int32_t underrun_count = aaudio_.xrun_count(); + if (underrun_count > underrun_count_) { + RTC_LOG(LS_ERROR) << "Underrun detected: " << underrun_count; + underrun_count_ = underrun_count; + aaudio_.IncreaseOutputBufferSize(); + } + + // Estimate latency between writing an audio frame to the output stream and + // the time that same frame is played out on the output audio device. + latency_millis_ = aaudio_.EstimateLatencyMillis(); + // TODO(henrika): use for development only. + if (aaudio_.frames_written() % (1000 * aaudio_.frames_per_burst()) == 0) { + RTC_DLOG(LS_INFO) << "output latency: " << latency_millis_ + << ", num_frames: " << num_frames; + } + + // Read audio data from the WebRTC source using the FineAudioBuffer object + // and write that data into `audio_data` to be played out by AAudio. + // Prime output with zeros during a short initial phase to avoid distortion. + // TODO(henrika): do more work to figure out of if the initial forced silence + // period is really needed. + if (aaudio_.frames_written() < 50 * aaudio_.frames_per_burst()) { + const size_t num_bytes = + sizeof(int16_t) * aaudio_.samples_per_frame() * num_frames; + memset(audio_data, 0, num_bytes); + } else { + fine_audio_buffer_->GetPlayoutData( + rtc::MakeArrayView(static_cast<int16_t*>(audio_data), + aaudio_.samples_per_frame() * num_frames), + static_cast<int>(latency_millis_ + 0.5)); + } + + // TODO(henrika): possibly add trace here to be included in systrace. + // See https://developer.android.com/studio/profile/systrace-commandline.html. + return AAUDIO_CALLBACK_RESULT_CONTINUE; +} + +void AAudioPlayer::OnMessage(rtc::Message* msg) { + RTC_DCHECK_RUN_ON(&main_thread_checker_); + switch (msg->message_id) { + case kMessageOutputStreamDisconnected: + HandleStreamDisconnected(); + break; + } +} + +void AAudioPlayer::HandleStreamDisconnected() { + RTC_DCHECK_RUN_ON(&main_thread_checker_); + RTC_DLOG(LS_INFO) << "HandleStreamDisconnected"; + if (!initialized_ || !playing_) { + return; + } + // Perform a restart by first closing the disconnected stream and then start + // a new stream; this time using the new (preferred) audio output device. + StopPlayout(); + InitPlayout(); + StartPlayout(); +} + +} // namespace jni + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h new file mode 100644 index 0000000000..9e775ecfa3 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_ + +#include <aaudio/AAudio.h> + +#include <memory> + +#include "absl/types/optional.h" +#include "api/sequence_checker.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "rtc_base/message_handler.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" + +namespace webrtc { + +class AudioDeviceBuffer; +class FineAudioBuffer; + +namespace jni { + +// Implements low-latency 16-bit mono PCM audio output support for Android +// using the C based AAudio API. +// +// An instance must be created and destroyed on one and the same thread. +// All public methods must also be called on the same thread. A thread checker +// will DCHECK if any method is called on an invalid thread. Audio buffers +// are requested on a dedicated high-priority thread owned by AAudio. +// +// The existing design forces the user to call InitPlayout() after StopPlayout() +// to be able to call StartPlayout() again. This is in line with how the Java- +// based implementation works. +// +// An audio stream can be disconnected, e.g. when an audio device is removed. +// This implementation will restart the audio stream using the new preferred +// device if such an event happens. +// +// Also supports automatic buffer-size adjustment based on underrun detections +// where the internal AAudio buffer can be increased when needed. It will +// reduce the risk of underruns (~glitches) at the expense of an increased +// latency. +class AAudioPlayer final : public AudioOutput, + public AAudioObserverInterface, + public rtc::MessageHandler { + public: + explicit AAudioPlayer(const AudioParameters& audio_parameters); + ~AAudioPlayer() override; + + int Init() override; + int Terminate() override; + + int InitPlayout() override; + bool PlayoutIsInitialized() const override; + + int StartPlayout() override; + int StopPlayout() override; + bool Playing() const override; + + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; + + // Not implemented in AAudio. + bool SpeakerVolumeIsAvailable() override; + int SetSpeakerVolume(uint32_t volume) override; + absl::optional<uint32_t> SpeakerVolume() const override; + absl::optional<uint32_t> MaxSpeakerVolume() const override; + absl::optional<uint32_t> MinSpeakerVolume() const override; + + protected: + // AAudioObserverInterface implementation. + + // For an output stream, this function should render and write `num_frames` + // of data in the streams current data format to the `audio_data` buffer. + // Called on a real-time thread owned by AAudio. + aaudio_data_callback_result_t OnDataCallback(void* audio_data, + int32_t num_frames) override; + // AAudio calls this functions if any error occurs on a callback thread. + // Called on a real-time thread owned by AAudio. + void OnErrorCallback(aaudio_result_t error) override; + + // rtc::MessageHandler used for restart messages from the error-callback + // thread to the main (creating) thread. + void OnMessage(rtc::Message* msg) override; + + private: + // Closes the existing stream and starts a new stream. + void HandleStreamDisconnected(); + + // Ensures that methods are called from the same thread as this object is + // created on. + SequenceChecker main_thread_checker_; + + // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a + // real-time thread owned by AAudio. Detached during construction of this + // object. + SequenceChecker thread_checker_aaudio_; + + // The thread on which this object is created on. + rtc::Thread* main_thread_; + + // Wraps all AAudio resources. Contains an output stream using the default + // output audio device. Can be accessed on both the main thread and the + // real-time thread owned by AAudio. See separate AAudio documentation about + // thread safety. + AAudioWrapper aaudio_; + + // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data + // in chunks of 10ms. It then allows for this data to be pulled in + // a finer or coarser granularity. I.e. interacting with this class instead + // of directly with the AudioDeviceBuffer one can ask for any number of + // audio data samples. + // Example: native buffer size can be 192 audio frames at 48kHz sample rate. + // WebRTC will provide 480 audio frames per 10ms but AAudio asks for 192 + // in each callback (once every 4th ms). This class can then ask for 192 and + // the FineAudioBuffer will ask WebRTC for new data approximately only every + // second callback and also cache non-utilized audio. + std::unique_ptr<FineAudioBuffer> fine_audio_buffer_; + + // Counts number of detected underrun events reported by AAudio. + int32_t underrun_count_ = 0; + + // True only for the first data callback in each audio session. + bool first_data_callback_ = true; + + // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the + // AudioDeviceModuleImpl class and set by AudioDeviceModule::Create(). + AudioDeviceBuffer* audio_device_buffer_ RTC_GUARDED_BY(main_thread_checker_) = + nullptr; + + bool initialized_ RTC_GUARDED_BY(main_thread_checker_) = false; + bool playing_ RTC_GUARDED_BY(main_thread_checker_) = false; + + // Estimated latency between writing an audio frame to the output stream and + // the time that same frame is played out on the output audio device. + double latency_millis_ RTC_GUARDED_BY(thread_checker_aaudio_) = 0; +}; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc new file mode 100644 index 0000000000..d66c1d0235 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc @@ -0,0 +1,234 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/aaudio_recorder.h" + +#include <memory> + +#include "api/array_view.h" +#include "modules/audio_device/fine_audio_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +namespace jni { + +enum AudioDeviceMessageType : uint32_t { + kMessageInputStreamDisconnected, +}; + +AAudioRecorder::AAudioRecorder(const AudioParameters& audio_parameters) + : main_thread_(rtc::Thread::Current()), + aaudio_(audio_parameters, AAUDIO_DIRECTION_INPUT, this) { + RTC_LOG(LS_INFO) << "ctor"; + thread_checker_aaudio_.Detach(); +} + +AAudioRecorder::~AAudioRecorder() { + RTC_LOG(LS_INFO) << "dtor"; + RTC_DCHECK(thread_checker_.IsCurrent()); + Terminate(); + RTC_LOG(LS_INFO) << "detected owerflows: " << overflow_count_; +} + +int AAudioRecorder::Init() { + RTC_LOG(LS_INFO) << "Init"; + RTC_DCHECK(thread_checker_.IsCurrent()); + if (aaudio_.audio_parameters().channels() == 2) { + RTC_DLOG(LS_WARNING) << "Stereo mode is enabled"; + } + return 0; +} + +int AAudioRecorder::Terminate() { + RTC_LOG(LS_INFO) << "Terminate"; + RTC_DCHECK(thread_checker_.IsCurrent()); + StopRecording(); + return 0; +} + +int AAudioRecorder::InitRecording() { + RTC_LOG(LS_INFO) << "InitRecording"; + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(!initialized_); + RTC_DCHECK(!recording_); + if (!aaudio_.Init()) { + return -1; + } + initialized_ = true; + return 0; +} + +bool AAudioRecorder::RecordingIsInitialized() const { + return initialized_; +} + +int AAudioRecorder::StartRecording() { + RTC_LOG(LS_INFO) << "StartRecording"; + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(initialized_); + RTC_DCHECK(!recording_); + if (fine_audio_buffer_) { + fine_audio_buffer_->ResetPlayout(); + } + if (!aaudio_.Start()) { + return -1; + } + overflow_count_ = aaudio_.xrun_count(); + first_data_callback_ = true; + recording_ = true; + return 0; +} + +int AAudioRecorder::StopRecording() { + RTC_LOG(LS_INFO) << "StopRecording"; + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!initialized_ || !recording_) { + return 0; + } + if (!aaudio_.Stop()) { + return -1; + } + thread_checker_aaudio_.Detach(); + initialized_ = false; + recording_ = false; + return 0; +} + +bool AAudioRecorder::Recording() const { + return recording_; +} + +void AAudioRecorder::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { + RTC_LOG(LS_INFO) << "AttachAudioBuffer"; + RTC_DCHECK(thread_checker_.IsCurrent()); + audio_device_buffer_ = audioBuffer; + const AudioParameters audio_parameters = aaudio_.audio_parameters(); + audio_device_buffer_->SetRecordingSampleRate(audio_parameters.sample_rate()); + audio_device_buffer_->SetRecordingChannels(audio_parameters.channels()); + RTC_CHECK(audio_device_buffer_); + // Create a modified audio buffer class which allows us to deliver any number + // of samples (and not only multiples of 10ms which WebRTC uses) to match the + // native AAudio buffer size. + fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_); +} + +bool AAudioRecorder::IsAcousticEchoCancelerSupported() const { + return false; +} + +bool AAudioRecorder::IsNoiseSuppressorSupported() const { + return false; +} + +int AAudioRecorder::EnableBuiltInAEC(bool enable) { + RTC_LOG(LS_INFO) << "EnableBuiltInAEC: " << enable; + RTC_LOG(LS_ERROR) << "Not implemented"; + return -1; +} + +int AAudioRecorder::EnableBuiltInNS(bool enable) { + RTC_LOG(LS_INFO) << "EnableBuiltInNS: " << enable; + RTC_LOG(LS_ERROR) << "Not implemented"; + return -1; +} + +void AAudioRecorder::OnErrorCallback(aaudio_result_t error) { + RTC_LOG(LS_ERROR) << "OnErrorCallback: " << AAudio_convertResultToText(error); + // RTC_DCHECK(thread_checker_aaudio_.IsCurrent()); + if (aaudio_.stream_state() == AAUDIO_STREAM_STATE_DISCONNECTED) { + // The stream is disconnected and any attempt to use it will return + // AAUDIO_ERROR_DISCONNECTED.. + RTC_LOG(LS_WARNING) << "Input stream disconnected => restart is required"; + // AAudio documentation states: "You should not close or reopen the stream + // from the callback, use another thread instead". A message is therefore + // sent to the main thread to do the restart operation. + RTC_DCHECK(main_thread_); + main_thread_->Post(RTC_FROM_HERE, this, kMessageInputStreamDisconnected); + } +} + +// Read and process `num_frames` of data from the `audio_data` buffer. +// TODO(henrika): possibly add trace here to be included in systrace. +// See https://developer.android.com/studio/profile/systrace-commandline.html. +aaudio_data_callback_result_t AAudioRecorder::OnDataCallback( + void* audio_data, + int32_t num_frames) { + // TODO(henrika): figure out why we sometimes hit this one. + // RTC_DCHECK(thread_checker_aaudio_.IsCurrent()); + // RTC_LOG(LS_INFO) << "OnDataCallback: " << num_frames; + // Drain the input buffer at first callback to ensure that it does not + // contain any old data. Will also ensure that the lowest possible latency + // is obtained. + if (first_data_callback_) { + RTC_LOG(LS_INFO) << "--- First input data callback: " + "device id=" + << aaudio_.device_id(); + aaudio_.ClearInputStream(audio_data, num_frames); + first_data_callback_ = false; + } + // Check if the overflow counter has increased and if so log a warning. + // TODO(henrika): possible add UMA stat or capacity extension. + const int32_t overflow_count = aaudio_.xrun_count(); + if (overflow_count > overflow_count_) { + RTC_LOG(LS_ERROR) << "Overflow detected: " << overflow_count; + overflow_count_ = overflow_count; + } + // Estimated time between an audio frame was recorded by the input device and + // it can read on the input stream. + latency_millis_ = aaudio_.EstimateLatencyMillis(); + // TODO(henrika): use for development only. + if (aaudio_.frames_read() % (1000 * aaudio_.frames_per_burst()) == 0) { + RTC_DLOG(LS_INFO) << "input latency: " << latency_millis_ + << ", num_frames: " << num_frames; + } + // Copy recorded audio in `audio_data` to the WebRTC sink using the + // FineAudioBuffer object. + fine_audio_buffer_->DeliverRecordedData( + rtc::MakeArrayView(static_cast<const int16_t*>(audio_data), + aaudio_.samples_per_frame() * num_frames), + static_cast<int>(latency_millis_ + 0.5)); + + return AAUDIO_CALLBACK_RESULT_CONTINUE; +} + +void AAudioRecorder::OnMessage(rtc::Message* msg) { + RTC_DCHECK_RUN_ON(&thread_checker_); + switch (msg->message_id) { + case kMessageInputStreamDisconnected: + HandleStreamDisconnected(); + break; + default: + RTC_LOG(LS_ERROR) << "Invalid message id: " << msg->message_id; + break; + } +} + +void AAudioRecorder::HandleStreamDisconnected() { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_LOG(LS_INFO) << "HandleStreamDisconnected"; + if (!initialized_ || !recording_) { + return; + } + // Perform a restart by first closing the disconnected stream and then start + // a new stream; this time using the new (preferred) audio input device. + // TODO(henrika): resolve issue where a one restart attempt leads to a long + // sequence of new calls to OnErrorCallback(). + // See b/73148976 for details. + StopRecording(); + InitRecording(); + StartRecording(); +} + +} // namespace jni + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h new file mode 100644 index 0000000000..a911577bfe --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_ + +#include <aaudio/AAudio.h> + +#include <memory> + +#include "api/sequence_checker.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "rtc_base/message_handler.h" +#include "rtc_base/thread.h" +#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" + +namespace webrtc { + +class FineAudioBuffer; +class AudioDeviceBuffer; + +namespace jni { + +// Implements low-latency 16-bit mono PCM audio input support for Android +// using the C based AAudio API. +// +// An instance must be created and destroyed on one and the same thread. +// All public methods must also be called on the same thread. A thread checker +// will RTC_DCHECK if any method is called on an invalid thread. Audio buffers +// are delivered on a dedicated high-priority thread owned by AAudio. +// +// The existing design forces the user to call InitRecording() after +// StopRecording() to be able to call StartRecording() again. This is in line +// with how the Java- based implementation works. +// +// TODO(henrika): add comments about device changes and adaptive buffer +// management. +class AAudioRecorder : public AudioInput, + public AAudioObserverInterface, + public rtc::MessageHandler { + public: + explicit AAudioRecorder(const AudioParameters& audio_parameters); + ~AAudioRecorder() override; + + int Init() override; + int Terminate() override; + + int InitRecording() override; + bool RecordingIsInitialized() const override; + + int StartRecording() override; + int StopRecording() override; + bool Recording() const override; + + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; + + // TODO(henrika): add support using AAudio APIs when available. + bool IsAcousticEchoCancelerSupported() const override; + bool IsNoiseSuppressorSupported() const override; + int EnableBuiltInAEC(bool enable) override; + int EnableBuiltInNS(bool enable) override; + + protected: + // AAudioObserverInterface implementation. + + // For an input stream, this function should read `num_frames` of recorded + // data, in the stream's current data format, from the `audio_data` buffer. + // Called on a real-time thread owned by AAudio. + aaudio_data_callback_result_t OnDataCallback(void* audio_data, + int32_t num_frames) override; + + // AAudio calls this function if any error occurs on a callback thread. + // Called on a real-time thread owned by AAudio. + void OnErrorCallback(aaudio_result_t error) override; + + // rtc::MessageHandler used for restart messages. + void OnMessage(rtc::Message* msg) override; + + private: + // Closes the existing stream and starts a new stream. + void HandleStreamDisconnected(); + + // Ensures that methods are called from the same thread as this object is + // created on. + SequenceChecker thread_checker_; + + // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a + // real-time thread owned by AAudio. Detached during construction of this + // object. + SequenceChecker thread_checker_aaudio_; + + // The thread on which this object is created on. + rtc::Thread* main_thread_; + + // Wraps all AAudio resources. Contains an input stream using the default + // input audio device. + AAudioWrapper aaudio_; + + // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the + // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). + AudioDeviceBuffer* audio_device_buffer_ = nullptr; + + bool initialized_ = false; + bool recording_ = false; + + // Consumes audio of native buffer size and feeds the WebRTC layer with 10ms + // chunks of audio. + std::unique_ptr<FineAudioBuffer> fine_audio_buffer_; + + // Counts number of detected overflow events reported by AAudio. + int32_t overflow_count_ = 0; + + // Estimated time between an audio frame was recorded by the input device and + // it can read on the input stream. + double latency_millis_ = 0; + + // True only for the first data callback in each audio session. + bool first_data_callback_ = true; +}; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc new file mode 100644 index 0000000000..6c20703108 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc @@ -0,0 +1,501 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" + +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/time_utils.h" + +#define LOG_ON_ERROR(op) \ + do { \ + aaudio_result_t result = (op); \ + if (result != AAUDIO_OK) { \ + RTC_LOG(LS_ERROR) << #op << ": " << AAudio_convertResultToText(result); \ + } \ + } while (0) + +#define RETURN_ON_ERROR(op, ...) \ + do { \ + aaudio_result_t result = (op); \ + if (result != AAUDIO_OK) { \ + RTC_LOG(LS_ERROR) << #op << ": " << AAudio_convertResultToText(result); \ + return __VA_ARGS__; \ + } \ + } while (0) + +namespace webrtc { + +namespace jni { + +namespace { + +const char* DirectionToString(aaudio_direction_t direction) { + switch (direction) { + case AAUDIO_DIRECTION_OUTPUT: + return "OUTPUT"; + case AAUDIO_DIRECTION_INPUT: + return "INPUT"; + default: + return "UNKNOWN"; + } +} + +const char* SharingModeToString(aaudio_sharing_mode_t mode) { + switch (mode) { + case AAUDIO_SHARING_MODE_EXCLUSIVE: + return "EXCLUSIVE"; + case AAUDIO_SHARING_MODE_SHARED: + return "SHARED"; + default: + return "UNKNOWN"; + } +} + +const char* PerformanceModeToString(aaudio_performance_mode_t mode) { + switch (mode) { + case AAUDIO_PERFORMANCE_MODE_NONE: + return "NONE"; + case AAUDIO_PERFORMANCE_MODE_POWER_SAVING: + return "POWER_SAVING"; + case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY: + return "LOW_LATENCY"; + default: + return "UNKNOWN"; + } +} + +const char* FormatToString(int32_t id) { + switch (id) { + case AAUDIO_FORMAT_INVALID: + return "INVALID"; + case AAUDIO_FORMAT_UNSPECIFIED: + return "UNSPECIFIED"; + case AAUDIO_FORMAT_PCM_I16: + return "PCM_I16"; + case AAUDIO_FORMAT_PCM_FLOAT: + return "FLOAT"; + default: + return "UNKNOWN"; + } +} + +void ErrorCallback(AAudioStream* stream, + void* user_data, + aaudio_result_t error) { + RTC_DCHECK(user_data); + AAudioWrapper* aaudio_wrapper = reinterpret_cast<AAudioWrapper*>(user_data); + RTC_LOG(LS_WARNING) << "ErrorCallback: " + << DirectionToString(aaudio_wrapper->direction()); + RTC_DCHECK(aaudio_wrapper->observer()); + aaudio_wrapper->observer()->OnErrorCallback(error); +} + +aaudio_data_callback_result_t DataCallback(AAudioStream* stream, + void* user_data, + void* audio_data, + int32_t num_frames) { + RTC_DCHECK(user_data); + RTC_DCHECK(audio_data); + AAudioWrapper* aaudio_wrapper = reinterpret_cast<AAudioWrapper*>(user_data); + RTC_DCHECK(aaudio_wrapper->observer()); + return aaudio_wrapper->observer()->OnDataCallback(audio_data, num_frames); +} + +// Wraps the stream builder object to ensure that it is released properly when +// the stream builder goes out of scope. +class ScopedStreamBuilder { + public: + ScopedStreamBuilder() { + LOG_ON_ERROR(AAudio_createStreamBuilder(&builder_)); + RTC_DCHECK(builder_); + } + ~ScopedStreamBuilder() { + if (builder_) { + LOG_ON_ERROR(AAudioStreamBuilder_delete(builder_)); + } + } + + AAudioStreamBuilder* get() const { return builder_; } + + private: + AAudioStreamBuilder* builder_ = nullptr; +}; + +} // namespace + +AAudioWrapper::AAudioWrapper(const AudioParameters& audio_parameters, + aaudio_direction_t direction, + AAudioObserverInterface* observer) + : audio_parameters_(audio_parameters), + direction_(direction), + observer_(observer) { + RTC_LOG(LS_INFO) << "ctor"; + RTC_DCHECK(observer_); + aaudio_thread_checker_.Detach(); + RTC_LOG(LS_INFO) << audio_parameters_.ToString(); +} + +AAudioWrapper::~AAudioWrapper() { + RTC_LOG(LS_INFO) << "dtor"; + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(!stream_); +} + +bool AAudioWrapper::Init() { + RTC_LOG(LS_INFO) << "Init"; + RTC_DCHECK(thread_checker_.IsCurrent()); + // Creates a stream builder which can be used to open an audio stream. + ScopedStreamBuilder builder; + // Configures the stream builder using audio parameters given at construction. + SetStreamConfiguration(builder.get()); + // Opens a stream based on options in the stream builder. + if (!OpenStream(builder.get())) { + return false; + } + // Ensures that the opened stream could activate the requested settings. + if (!VerifyStreamConfiguration()) { + return false; + } + // Optimizes the buffer scheme for lowest possible latency and creates + // additional buffer logic to match the 10ms buffer size used in WebRTC. + if (!OptimizeBuffers()) { + return false; + } + LogStreamState(); + return true; +} + +bool AAudioWrapper::Start() { + RTC_LOG(LS_INFO) << "Start"; + RTC_DCHECK(thread_checker_.IsCurrent()); + // TODO(henrika): this state check might not be needed. + aaudio_stream_state_t current_state = AAudioStream_getState(stream_); + if (current_state != AAUDIO_STREAM_STATE_OPEN) { + RTC_LOG(LS_ERROR) << "Invalid state: " + << AAudio_convertStreamStateToText(current_state); + return false; + } + // Asynchronous request for the stream to start. + RETURN_ON_ERROR(AAudioStream_requestStart(stream_), false); + LogStreamState(); + return true; +} + +bool AAudioWrapper::Stop() { + RTC_LOG(LS_INFO) << "Stop: " << DirectionToString(direction()); + RTC_DCHECK(thread_checker_.IsCurrent()); + // Asynchronous request for the stream to stop. + RETURN_ON_ERROR(AAudioStream_requestStop(stream_), false); + CloseStream(); + aaudio_thread_checker_.Detach(); + return true; +} + +double AAudioWrapper::EstimateLatencyMillis() const { + RTC_DCHECK(stream_); + double latency_millis = 0.0; + if (direction() == AAUDIO_DIRECTION_INPUT) { + // For input streams. Best guess we can do is to use the current burst size + // as delay estimate. + latency_millis = static_cast<double>(frames_per_burst()) / sample_rate() * + rtc::kNumMillisecsPerSec; + } else { + int64_t existing_frame_index; + int64_t existing_frame_presentation_time; + // Get the time at which a particular frame was presented to audio hardware. + aaudio_result_t result = AAudioStream_getTimestamp( + stream_, CLOCK_MONOTONIC, &existing_frame_index, + &existing_frame_presentation_time); + // Results are only valid when the stream is in AAUDIO_STREAM_STATE_STARTED. + if (result == AAUDIO_OK) { + // Get write index for next audio frame. + int64_t next_frame_index = frames_written(); + // Number of frames between next frame and the existing frame. + int64_t frame_index_delta = next_frame_index - existing_frame_index; + // Assume the next frame will be written now. + int64_t next_frame_write_time = rtc::TimeNanos(); + // Calculate time when next frame will be presented to the hardware taking + // sample rate into account. + int64_t frame_time_delta = + (frame_index_delta * rtc::kNumNanosecsPerSec) / sample_rate(); + int64_t next_frame_presentation_time = + existing_frame_presentation_time + frame_time_delta; + // Derive a latency estimate given results above. + latency_millis = static_cast<double>(next_frame_presentation_time - + next_frame_write_time) / + rtc::kNumNanosecsPerMillisec; + } + } + return latency_millis; +} + +// Returns new buffer size or a negative error value if buffer size could not +// be increased. +bool AAudioWrapper::IncreaseOutputBufferSize() { + RTC_LOG(LS_INFO) << "IncreaseBufferSize"; + RTC_DCHECK(stream_); + RTC_DCHECK(aaudio_thread_checker_.IsCurrent()); + RTC_DCHECK_EQ(direction(), AAUDIO_DIRECTION_OUTPUT); + aaudio_result_t buffer_size = AAudioStream_getBufferSizeInFrames(stream_); + // Try to increase size of buffer with one burst to reduce risk of underrun. + buffer_size += frames_per_burst(); + // Verify that the new buffer size is not larger than max capacity. + // TODO(henrika): keep track of case when we reach the capacity limit. + const int32_t max_buffer_size = buffer_capacity_in_frames(); + if (buffer_size > max_buffer_size) { + RTC_LOG(LS_ERROR) << "Required buffer size (" << buffer_size + << ") is higher than max: " << max_buffer_size; + return false; + } + RTC_LOG(LS_INFO) << "Updating buffer size to: " << buffer_size + << " (max=" << max_buffer_size << ")"; + buffer_size = AAudioStream_setBufferSizeInFrames(stream_, buffer_size); + if (buffer_size < 0) { + RTC_LOG(LS_ERROR) << "Failed to change buffer size: " + << AAudio_convertResultToText(buffer_size); + return false; + } + RTC_LOG(LS_INFO) << "Buffer size changed to: " << buffer_size; + return true; +} + +void AAudioWrapper::ClearInputStream(void* audio_data, int32_t num_frames) { + RTC_LOG(LS_INFO) << "ClearInputStream"; + RTC_DCHECK(stream_); + RTC_DCHECK(aaudio_thread_checker_.IsCurrent()); + RTC_DCHECK_EQ(direction(), AAUDIO_DIRECTION_INPUT); + aaudio_result_t cleared_frames = 0; + do { + cleared_frames = AAudioStream_read(stream_, audio_data, num_frames, 0); + } while (cleared_frames > 0); +} + +AAudioObserverInterface* AAudioWrapper::observer() const { + return observer_; +} + +AudioParameters AAudioWrapper::audio_parameters() const { + return audio_parameters_; +} + +int32_t AAudioWrapper::samples_per_frame() const { + RTC_DCHECK(stream_); + return AAudioStream_getSamplesPerFrame(stream_); +} + +int32_t AAudioWrapper::buffer_size_in_frames() const { + RTC_DCHECK(stream_); + return AAudioStream_getBufferSizeInFrames(stream_); +} + +int32_t AAudioWrapper::buffer_capacity_in_frames() const { + RTC_DCHECK(stream_); + return AAudioStream_getBufferCapacityInFrames(stream_); +} + +int32_t AAudioWrapper::device_id() const { + RTC_DCHECK(stream_); + return AAudioStream_getDeviceId(stream_); +} + +int32_t AAudioWrapper::xrun_count() const { + RTC_DCHECK(stream_); + return AAudioStream_getXRunCount(stream_); +} + +int32_t AAudioWrapper::format() const { + RTC_DCHECK(stream_); + return AAudioStream_getFormat(stream_); +} + +int32_t AAudioWrapper::sample_rate() const { + RTC_DCHECK(stream_); + return AAudioStream_getSampleRate(stream_); +} + +int32_t AAudioWrapper::channel_count() const { + RTC_DCHECK(stream_); + return AAudioStream_getChannelCount(stream_); +} + +int32_t AAudioWrapper::frames_per_callback() const { + RTC_DCHECK(stream_); + return AAudioStream_getFramesPerDataCallback(stream_); +} + +aaudio_sharing_mode_t AAudioWrapper::sharing_mode() const { + RTC_DCHECK(stream_); + return AAudioStream_getSharingMode(stream_); +} + +aaudio_performance_mode_t AAudioWrapper::performance_mode() const { + RTC_DCHECK(stream_); + return AAudioStream_getPerformanceMode(stream_); +} + +aaudio_stream_state_t AAudioWrapper::stream_state() const { + RTC_DCHECK(stream_); + return AAudioStream_getState(stream_); +} + +int64_t AAudioWrapper::frames_written() const { + RTC_DCHECK(stream_); + return AAudioStream_getFramesWritten(stream_); +} + +int64_t AAudioWrapper::frames_read() const { + RTC_DCHECK(stream_); + return AAudioStream_getFramesRead(stream_); +} + +void AAudioWrapper::SetStreamConfiguration(AAudioStreamBuilder* builder) { + RTC_LOG(LS_INFO) << "SetStreamConfiguration"; + RTC_DCHECK(builder); + RTC_DCHECK(thread_checker_.IsCurrent()); + // Request usage of default primary output/input device. + // TODO(henrika): verify that default device follows Java APIs. + // https://developer.android.com/reference/android/media/AudioDeviceInfo.html. + AAudioStreamBuilder_setDeviceId(builder, AAUDIO_UNSPECIFIED); + // Use preferred sample rate given by the audio parameters. + AAudioStreamBuilder_setSampleRate(builder, audio_parameters().sample_rate()); + // Use preferred channel configuration given by the audio parameters. + AAudioStreamBuilder_setChannelCount(builder, audio_parameters().channels()); + // Always use 16-bit PCM audio sample format. + AAudioStreamBuilder_setFormat(builder, AAUDIO_FORMAT_PCM_I16); + // TODO(henrika): investigate effect of using AAUDIO_SHARING_MODE_EXCLUSIVE. + // Ask for exclusive mode since this will give us the lowest possible latency. + // If exclusive mode isn't available, shared mode will be used instead. + AAudioStreamBuilder_setSharingMode(builder, AAUDIO_SHARING_MODE_SHARED); + // Use the direction that was given at construction. + AAudioStreamBuilder_setDirection(builder, direction_); + // TODO(henrika): investigate performance using different performance modes. + AAudioStreamBuilder_setPerformanceMode(builder, + AAUDIO_PERFORMANCE_MODE_LOW_LATENCY); + // Given that WebRTC applications require low latency, our audio stream uses + // an asynchronous callback function to transfer data to and from the + // application. AAudio executes the callback in a higher-priority thread that + // has better performance. + AAudioStreamBuilder_setDataCallback(builder, DataCallback, this); + // Request that AAudio calls this functions if any error occurs on a callback + // thread. + AAudioStreamBuilder_setErrorCallback(builder, ErrorCallback, this); +} + +bool AAudioWrapper::OpenStream(AAudioStreamBuilder* builder) { + RTC_LOG(LS_INFO) << "OpenStream"; + RTC_DCHECK(builder); + AAudioStream* stream = nullptr; + RETURN_ON_ERROR(AAudioStreamBuilder_openStream(builder, &stream), false); + stream_ = stream; + LogStreamConfiguration(); + return true; +} + +void AAudioWrapper::CloseStream() { + RTC_LOG(LS_INFO) << "CloseStream"; + RTC_DCHECK(stream_); + LOG_ON_ERROR(AAudioStream_close(stream_)); + stream_ = nullptr; +} + +void AAudioWrapper::LogStreamConfiguration() { + RTC_DCHECK(stream_); + char ss_buf[1024]; + rtc::SimpleStringBuilder ss(ss_buf); + ss << "Stream Configuration: "; + ss << "sample rate=" << sample_rate() << ", channels=" << channel_count(); + ss << ", samples per frame=" << samples_per_frame(); + ss << ", format=" << FormatToString(format()); + ss << ", sharing mode=" << SharingModeToString(sharing_mode()); + ss << ", performance mode=" << PerformanceModeToString(performance_mode()); + ss << ", direction=" << DirectionToString(direction()); + ss << ", device id=" << AAudioStream_getDeviceId(stream_); + ss << ", frames per callback=" << frames_per_callback(); + RTC_LOG(LS_INFO) << ss.str(); +} + +void AAudioWrapper::LogStreamState() { + RTC_LOG(LS_INFO) << "AAudio stream state: " + << AAudio_convertStreamStateToText(stream_state()); +} + +bool AAudioWrapper::VerifyStreamConfiguration() { + RTC_LOG(LS_INFO) << "VerifyStreamConfiguration"; + RTC_DCHECK(stream_); + // TODO(henrika): should we verify device ID as well? + if (AAudioStream_getSampleRate(stream_) != audio_parameters().sample_rate()) { + RTC_LOG(LS_ERROR) << "Stream unable to use requested sample rate"; + return false; + } + if (AAudioStream_getChannelCount(stream_) != + static_cast<int32_t>(audio_parameters().channels())) { + RTC_LOG(LS_ERROR) << "Stream unable to use requested channel count"; + return false; + } + if (AAudioStream_getFormat(stream_) != AAUDIO_FORMAT_PCM_I16) { + RTC_LOG(LS_ERROR) << "Stream unable to use requested format"; + return false; + } + if (AAudioStream_getSharingMode(stream_) != AAUDIO_SHARING_MODE_SHARED) { + RTC_LOG(LS_ERROR) << "Stream unable to use requested sharing mode"; + return false; + } + if (AAudioStream_getPerformanceMode(stream_) != + AAUDIO_PERFORMANCE_MODE_LOW_LATENCY) { + RTC_LOG(LS_ERROR) << "Stream unable to use requested performance mode"; + return false; + } + if (AAudioStream_getDirection(stream_) != direction()) { + RTC_LOG(LS_ERROR) << "Stream direction could not be set"; + return false; + } + if (AAudioStream_getSamplesPerFrame(stream_) != + static_cast<int32_t>(audio_parameters().channels())) { + RTC_LOG(LS_ERROR) << "Invalid number of samples per frame"; + return false; + } + return true; +} + +bool AAudioWrapper::OptimizeBuffers() { + RTC_LOG(LS_INFO) << "OptimizeBuffers"; + RTC_DCHECK(stream_); + // Maximum number of frames that can be filled without blocking. + RTC_LOG(LS_INFO) << "max buffer capacity in frames: " + << buffer_capacity_in_frames(); + // Query the number of frames that the application should read or write at + // one time for optimal performance. + int32_t frames_per_burst = AAudioStream_getFramesPerBurst(stream_); + RTC_LOG(LS_INFO) << "frames per burst for optimal performance: " + << frames_per_burst; + frames_per_burst_ = frames_per_burst; + if (direction() == AAUDIO_DIRECTION_INPUT) { + // There is no point in calling setBufferSizeInFrames() for input streams + // since it has no effect on the performance (latency in this case). + return true; + } + // Set buffer size to same as burst size to guarantee lowest possible latency. + // This size might change for output streams if underruns are detected and + // automatic buffer adjustment is enabled. + AAudioStream_setBufferSizeInFrames(stream_, frames_per_burst); + int32_t buffer_size = AAudioStream_getBufferSizeInFrames(stream_); + if (buffer_size != frames_per_burst) { + RTC_LOG(LS_ERROR) << "Failed to use optimal buffer burst size"; + return false; + } + // Maximum number of frames that can be filled without blocking. + RTC_LOG(LS_INFO) << "buffer burst size in frames: " << buffer_size; + return true; +} + +} // namespace jni + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h new file mode 100644 index 0000000000..cbc78a0a25 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_ + +#include <aaudio/AAudio.h> + +#include "api/sequence_checker.h" +#include "modules/audio_device/include/audio_device_defines.h" + +namespace webrtc { + +namespace jni { + +// AAudio callback interface for audio transport to/from the AAudio stream. +// The interface also contains an error callback method for notifications of +// e.g. device changes. +class AAudioObserverInterface { + public: + // Audio data will be passed in our out of this function dependning on the + // direction of the audio stream. This callback function will be called on a + // real-time thread owned by AAudio. + virtual aaudio_data_callback_result_t OnDataCallback(void* audio_data, + int32_t num_frames) = 0; + // AAudio will call this functions if any error occurs on a callback thread. + // In response, this function could signal or launch another thread to reopen + // a stream on another device. Do not reopen the stream in this callback. + virtual void OnErrorCallback(aaudio_result_t error) = 0; + + protected: + virtual ~AAudioObserverInterface() {} +}; + +// Utility class which wraps the C-based AAudio API into a more handy C++ class +// where the underlying resources (AAudioStreamBuilder and AAudioStream) are +// encapsulated. User must set the direction (in or out) at construction since +// it defines the stream type and the direction of the data flow in the +// AAudioObserverInterface. +// +// AAudio is a new Android C API introduced in the Android O (26) release. +// It is designed for high-performance audio applications that require low +// latency. Applications communicate with AAudio by reading and writing data +// to streams. +// +// Each stream is attached to a single audio device, where each audio device +// has a unique ID. The ID can be used to bind an audio stream to a specific +// audio device but this implementation lets AAudio choose the default primary +// device instead (device selection takes place in Java). A stream can only +// move data in one direction. When a stream is opened, Android checks to +// ensure that the audio device and stream direction agree. +class AAudioWrapper { + public: + AAudioWrapper(const AudioParameters& audio_parameters, + aaudio_direction_t direction, + AAudioObserverInterface* observer); + ~AAudioWrapper(); + + bool Init(); + bool Start(); + bool Stop(); + + // For output streams: estimates latency between writing an audio frame to + // the output stream and the time that same frame is played out on the output + // audio device. + // For input streams: estimates latency between reading an audio frame from + // the input stream and the time that same frame was recorded on the input + // audio device. + double EstimateLatencyMillis() const; + + // Increases the internal buffer size for output streams by one burst size to + // reduce the risk of underruns. Can be used while a stream is active. + bool IncreaseOutputBufferSize(); + + // Drains the recording stream of any existing data by reading from it until + // it's empty. Can be used to clear out old data before starting a new audio + // session. + void ClearInputStream(void* audio_data, int32_t num_frames); + + AAudioObserverInterface* observer() const; + AudioParameters audio_parameters() const; + int32_t samples_per_frame() const; + int32_t buffer_size_in_frames() const; + int32_t buffer_capacity_in_frames() const; + int32_t device_id() const; + int32_t xrun_count() const; + int32_t format() const; + int32_t sample_rate() const; + int32_t channel_count() const; + int32_t frames_per_callback() const; + aaudio_sharing_mode_t sharing_mode() const; + aaudio_performance_mode_t performance_mode() const; + aaudio_stream_state_t stream_state() const; + int64_t frames_written() const; + int64_t frames_read() const; + aaudio_direction_t direction() const { return direction_; } + AAudioStream* stream() const { return stream_; } + int32_t frames_per_burst() const { return frames_per_burst_; } + + private: + void SetStreamConfiguration(AAudioStreamBuilder* builder); + bool OpenStream(AAudioStreamBuilder* builder); + void CloseStream(); + void LogStreamConfiguration(); + void LogStreamState(); + bool VerifyStreamConfiguration(); + bool OptimizeBuffers(); + + SequenceChecker thread_checker_; + SequenceChecker aaudio_thread_checker_; + const AudioParameters audio_parameters_; + const aaudio_direction_t direction_; + AAudioObserverInterface* observer_ = nullptr; + AAudioStream* stream_ = nullptr; + int32_t frames_per_burst_ = 0; +}; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h new file mode 100644 index 0000000000..fdecf384c9 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_ + +namespace webrtc { + +namespace jni { + +const int kDefaultSampleRate = 44100; +// Delay estimates for the two different supported modes. These values are based +// on real-time round-trip delay estimates on a large set of devices and they +// are lower bounds since the filter length is 128 ms, so the AEC works for +// delays in the range [50, ~170] ms and [150, ~270] ms. Note that, in most +// cases, the lowest delay estimate will not be utilized since devices that +// support low-latency output audio often supports HW AEC as well. +const int kLowLatencyModeDelayEstimateInMilliseconds = 50; +const int kHighLatencyModeDelayEstimateInMilliseconds = 150; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc new file mode 100644 index 0000000000..7c59d3e432 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc @@ -0,0 +1,650 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/audio_device_module.h" + +#include <memory> +#include <utility> + +#include "api/make_ref_counted.h" +#include "api/sequence_checker.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/task_queue/task_queue_factory.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "sdk/android/generated_audio_device_module_base_jni/WebRtcAudioManager_jni.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { +namespace jni { + +namespace { + +// This class combines a generic instance of an AudioInput and a generic +// instance of an AudioOutput to create an AudioDeviceModule. This is mostly +// done by delegating to the audio input/output with some glue code. This class +// also directly implements some of the AudioDeviceModule methods with dummy +// implementations. +// +// An instance can be created on any thread, but must then be used on one and +// the same thread. All public methods must also be called on the same thread. +// A thread checker will RTC_DCHECK if any method is called on an invalid +// thread. +// TODO(henrika): it might be useful to also support a scenario where the ADM +// is constructed on thread T1, used on thread T2 and destructed on T2 or T3. +// If so, care must be taken to ensure that only T2 is a COM thread. +class AndroidAudioDeviceModule : public AudioDeviceModule { + public: + // For use with UMA logging. Must be kept in sync with histograms.xml in + // Chrome, located at + // https://cs.chromium.org/chromium/src/tools/metrics/histograms/histograms.xml + enum class InitStatus { + OK = 0, + PLAYOUT_ERROR = 1, + RECORDING_ERROR = 2, + OTHER_ERROR = 3, + NUM_STATUSES = 4 + }; + + AndroidAudioDeviceModule(AudioDeviceModule::AudioLayer audio_layer, + bool is_stereo_playout_supported, + bool is_stereo_record_supported, + uint16_t playout_delay_ms, + std::unique_ptr<AudioInput> audio_input, + std::unique_ptr<AudioOutput> audio_output) + : audio_layer_(audio_layer), + is_stereo_playout_supported_(is_stereo_playout_supported), + is_stereo_record_supported_(is_stereo_record_supported), + playout_delay_ms_(playout_delay_ms), + task_queue_factory_(CreateDefaultTaskQueueFactory()), + input_(std::move(audio_input)), + output_(std::move(audio_output)), + initialized_(false) { + RTC_CHECK(input_); + RTC_CHECK(output_); + RTC_DLOG(LS_INFO) << __FUNCTION__; + thread_checker_.Detach(); + } + + ~AndroidAudioDeviceModule() override { RTC_DLOG(LS_INFO) << __FUNCTION__; } + + int32_t ActiveAudioLayer( + AudioDeviceModule::AudioLayer* audioLayer) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + *audioLayer = audio_layer_; + return 0; + } + + int32_t RegisterAudioCallback(AudioTransport* audioCallback) override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return audio_device_buffer_->RegisterAudioCallback(audioCallback); + } + + int32_t Init() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_DCHECK(thread_checker_.IsCurrent()); + audio_device_buffer_ = + std::make_unique<AudioDeviceBuffer>(task_queue_factory_.get()); + AttachAudioBuffer(); + if (initialized_) { + return 0; + } + InitStatus status; + if (output_->Init() != 0) { + status = InitStatus::PLAYOUT_ERROR; + } else if (input_->Init() != 0) { + output_->Terminate(); + status = InitStatus::RECORDING_ERROR; + } else { + initialized_ = true; + status = InitStatus::OK; + } + RTC_HISTOGRAM_ENUMERATION("WebRTC.Audio.InitializationResult", + static_cast<int>(status), + static_cast<int>(InitStatus::NUM_STATUSES)); + if (status != InitStatus::OK) { + RTC_LOG(LS_ERROR) << "Audio device initialization failed."; + return -1; + } + return 0; + } + + int32_t Terminate() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return 0; + RTC_DCHECK(thread_checker_.IsCurrent()); + int32_t err = input_->Terminate(); + err |= output_->Terminate(); + initialized_ = false; + thread_checker_.Detach(); + audio_device_buffer_.reset(nullptr); + RTC_DCHECK_EQ(err, 0); + return err; + } + + bool Initialized() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << ":" << initialized_; + return initialized_; + } + + int16_t PlayoutDevices() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_LOG(LS_INFO) << "output: " << 1; + return 1; + } + + int16_t RecordingDevices() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_DLOG(LS_INFO) << "output: " << 1; + return 1; + } + + int32_t PlayoutDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) override { + RTC_CHECK_NOTREACHED(); + } + + int32_t RecordingDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) override { + RTC_CHECK_NOTREACHED(); + } + + int32_t SetPlayoutDevice(uint16_t index) override { + // OK to use but it has no effect currently since device selection is + // done using Andoid APIs instead. + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")"; + return 0; + } + + int32_t SetPlayoutDevice( + AudioDeviceModule::WindowsDeviceType device) override { + RTC_CHECK_NOTREACHED(); + } + + int32_t SetRecordingDevice(uint16_t index) override { + // OK to use but it has no effect currently since device selection is + // done using Andoid APIs instead. + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")"; + return 0; + } + + int32_t SetRecordingDevice( + AudioDeviceModule::WindowsDeviceType device) override { + RTC_CHECK_NOTREACHED(); + } + + int32_t PlayoutIsAvailable(bool* available) override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + *available = true; + RTC_DLOG(LS_INFO) << "output: " << *available; + return 0; + } + + int32_t InitPlayout() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + if (PlayoutIsInitialized()) { + return 0; + } + int32_t result = output_->InitPlayout(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess", + static_cast<int>(result == 0)); + return result; + } + + bool PlayoutIsInitialized() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return output_->PlayoutIsInitialized(); + } + + int32_t RecordingIsAvailable(bool* available) override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + *available = true; + RTC_DLOG(LS_INFO) << "output: " << *available; + return 0; + } + + int32_t InitRecording() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + if (RecordingIsInitialized()) { + return 0; + } + int32_t result = input_->InitRecording(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess", + static_cast<int>(result == 0)); + return result; + } + + bool RecordingIsInitialized() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return input_->RecordingIsInitialized(); + } + + int32_t StartPlayout() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + if (Playing()) { + return 0; + } + int32_t result = output_->StartPlayout(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess", + static_cast<int>(result == 0)); + if (result == 0) { + // Only start playing the audio device buffer if starting the audio + // output succeeded. + audio_device_buffer_->StartPlayout(); + } + return result; + } + + int32_t StopPlayout() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + if (!Playing()) + return 0; + RTC_LOG(LS_INFO) << __FUNCTION__; + audio_device_buffer_->StopPlayout(); + int32_t result = output_->StopPlayout(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess", + static_cast<int>(result == 0)); + return result; + } + + bool Playing() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return output_->Playing(); + } + + int32_t StartRecording() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + if (Recording()) { + return 0; + } + int32_t result = input_->StartRecording(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess", + static_cast<int>(result == 0)); + if (result == 0) { + // Only start recording the audio device buffer if starting the audio + // input succeeded. + audio_device_buffer_->StartRecording(); + } + return result; + } + + int32_t StopRecording() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + if (!Recording()) + return 0; + audio_device_buffer_->StopRecording(); + int32_t result = input_->StopRecording(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess", + static_cast<int>(result == 0)); + return result; + } + + bool Recording() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return input_->Recording(); + } + + int32_t InitSpeaker() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return initialized_ ? 0 : -1; + } + + bool SpeakerIsInitialized() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return initialized_; + } + + int32_t InitMicrophone() override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return initialized_ ? 0 : -1; + } + + bool MicrophoneIsInitialized() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return initialized_; + } + + int32_t SpeakerVolumeIsAvailable(bool* available) override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + *available = output_->SpeakerVolumeIsAvailable(); + RTC_DLOG(LS_INFO) << "output: " << *available; + return 0; + } + + int32_t SetSpeakerVolume(uint32_t volume) override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + return output_->SetSpeakerVolume(volume); + } + + int32_t SpeakerVolume(uint32_t* output_volume) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + absl::optional<uint32_t> volume = output_->SpeakerVolume(); + if (!volume) + return -1; + *output_volume = *volume; + RTC_DLOG(LS_INFO) << "output: " << *volume; + return 0; + } + + int32_t MaxSpeakerVolume(uint32_t* output_max_volume) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + absl::optional<uint32_t> max_volume = output_->MaxSpeakerVolume(); + if (!max_volume) + return -1; + *output_max_volume = *max_volume; + return 0; + } + + int32_t MinSpeakerVolume(uint32_t* output_min_volume) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return -1; + absl::optional<uint32_t> min_volume = output_->MinSpeakerVolume(); + if (!min_volume) + return -1; + *output_min_volume = *min_volume; + return 0; + } + + int32_t MicrophoneVolumeIsAvailable(bool* available) override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + *available = false; + RTC_DLOG(LS_INFO) << "output: " << *available; + return -1; + } + + int32_t SetMicrophoneVolume(uint32_t volume) override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")"; + RTC_CHECK_NOTREACHED(); + } + + int32_t MicrophoneVolume(uint32_t* volume) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_CHECK_NOTREACHED(); + } + + int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_CHECK_NOTREACHED(); + } + + int32_t MinMicrophoneVolume(uint32_t* minVolume) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_CHECK_NOTREACHED(); + } + + int32_t SpeakerMuteIsAvailable(bool* available) override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_CHECK_NOTREACHED(); + } + + int32_t SetSpeakerMute(bool enable) override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_CHECK_NOTREACHED(); + } + + int32_t SpeakerMute(bool* enabled) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_CHECK_NOTREACHED(); + } + + int32_t MicrophoneMuteIsAvailable(bool* available) override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_CHECK_NOTREACHED(); + } + + int32_t SetMicrophoneMute(bool enable) override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_CHECK_NOTREACHED(); + } + + int32_t MicrophoneMute(bool* enabled) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_CHECK_NOTREACHED(); + } + + int32_t StereoPlayoutIsAvailable(bool* available) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + *available = is_stereo_playout_supported_; + RTC_DLOG(LS_INFO) << "output: " << *available; + return 0; + } + + int32_t SetStereoPlayout(bool enable) override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + // Android does not support changes between mono and stero on the fly. The + // use of stereo or mono is determined by the audio layer. It is allowed + // to call this method if that same state is not modified. + bool available = is_stereo_playout_supported_; + if (enable != available) { + RTC_LOG(LS_WARNING) << "changing stereo playout not supported"; + return -1; + } + return 0; + } + + int32_t StereoPlayout(bool* enabled) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + *enabled = is_stereo_playout_supported_; + RTC_DLOG(LS_INFO) << "output: " << *enabled; + return 0; + } + + int32_t StereoRecordingIsAvailable(bool* available) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + *available = is_stereo_record_supported_; + RTC_DLOG(LS_INFO) << "output: " << *available; + return 0; + } + + int32_t SetStereoRecording(bool enable) override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + // Android does not support changes between mono and stero on the fly. The + // use of stereo or mono is determined by the audio layer. It is allowed + // to call this method if that same state is not modified. + bool available = is_stereo_record_supported_; + if (enable != available) { + RTC_LOG(LS_WARNING) << "changing stereo recording not supported"; + return -1; + } + return 0; + } + + int32_t StereoRecording(bool* enabled) const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + *enabled = is_stereo_record_supported_; + RTC_DLOG(LS_INFO) << "output: " << *enabled; + return 0; + } + + int32_t PlayoutDelay(uint16_t* delay_ms) const override { + // Best guess we can do is to use half of the estimated total delay. + *delay_ms = playout_delay_ms_ / 2; + RTC_DCHECK_GT(*delay_ms, 0); + return 0; + } + + // Returns true if the device both supports built in AEC and the device + // is not blocklisted. + // Currently, if OpenSL ES is used in both directions, this method will still + // report the correct value and it has the correct effect. As an example: + // a device supports built in AEC and this method returns true. Libjingle + // will then disable the WebRTC based AEC and that will work for all devices + // (mainly Nexus) even when OpenSL ES is used for input since our current + // implementation will enable built-in AEC by default also for OpenSL ES. + // The only "bad" thing that happens today is that when Libjingle calls + // OpenSLESRecorder::EnableBuiltInAEC() it will not have any real effect and + // a "Not Implemented" log will be filed. This non-perfect state will remain + // until I have added full support for audio effects based on OpenSL ES APIs. + bool BuiltInAECIsAvailable() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return false; + bool isAvailable = input_->IsAcousticEchoCancelerSupported(); + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return isAvailable; + } + + // Not implemented for any input device on Android. + bool BuiltInAGCIsAvailable() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + RTC_DLOG(LS_INFO) << "output: " << false; + return false; + } + + // Returns true if the device both supports built in NS and the device + // is not blocklisted. + // TODO(henrika): add implementation for OpenSL ES based audio as well. + // In addition, see comments for BuiltInAECIsAvailable(). + bool BuiltInNSIsAvailable() const override { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) + return false; + bool isAvailable = input_->IsNoiseSuppressorSupported(); + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return isAvailable; + } + + // TODO(henrika): add implementation for OpenSL ES based audio as well. + int32_t EnableBuiltInAEC(bool enable) override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + if (!initialized_) + return -1; + RTC_CHECK(BuiltInAECIsAvailable()) << "HW AEC is not available"; + int32_t result = input_->EnableBuiltInAEC(enable); + RTC_DLOG(LS_INFO) << "output: " << result; + return result; + } + + int32_t EnableBuiltInAGC(bool enable) override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_CHECK_NOTREACHED(); + } + + // TODO(henrika): add implementation for OpenSL ES based audio as well. + int32_t EnableBuiltInNS(bool enable) override { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + if (!initialized_) + return -1; + RTC_CHECK(BuiltInNSIsAvailable()) << "HW NS is not available"; + int32_t result = input_->EnableBuiltInNS(enable); + RTC_DLOG(LS_INFO) << "output: " << result; + return result; + } + + int32_t GetPlayoutUnderrunCount() const override { + if (!initialized_) + return -1; + return output_->GetPlayoutUnderrunCount(); + } + + int32_t AttachAudioBuffer() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + output_->AttachAudioBuffer(audio_device_buffer_.get()); + input_->AttachAudioBuffer(audio_device_buffer_.get()); + return 0; + } + + private: + SequenceChecker thread_checker_; + + const AudioDeviceModule::AudioLayer audio_layer_; + const bool is_stereo_playout_supported_; + const bool is_stereo_record_supported_; + const uint16_t playout_delay_ms_; + const std::unique_ptr<TaskQueueFactory> task_queue_factory_; + const std::unique_ptr<AudioInput> input_; + const std::unique_ptr<AudioOutput> output_; + std::unique_ptr<AudioDeviceBuffer> audio_device_buffer_; + + bool initialized_; +}; + +} // namespace + +ScopedJavaLocalRef<jobject> GetAudioManager(JNIEnv* env, + const JavaRef<jobject>& j_context) { + return Java_WebRtcAudioManager_getAudioManager(env, j_context); +} + +int GetDefaultSampleRate(JNIEnv* env, const JavaRef<jobject>& j_audio_manager) { + return Java_WebRtcAudioManager_getSampleRate(env, j_audio_manager); +} + +void GetAudioParameters(JNIEnv* env, + const JavaRef<jobject>& j_context, + const JavaRef<jobject>& j_audio_manager, + int input_sample_rate, + int output_sample_rate, + bool use_stereo_input, + bool use_stereo_output, + AudioParameters* input_parameters, + AudioParameters* output_parameters) { + const int output_channels = use_stereo_output ? 2 : 1; + const int input_channels = use_stereo_input ? 2 : 1; + const size_t output_buffer_size = Java_WebRtcAudioManager_getOutputBufferSize( + env, j_context, j_audio_manager, output_sample_rate, output_channels); + const size_t input_buffer_size = Java_WebRtcAudioManager_getInputBufferSize( + env, j_context, j_audio_manager, input_sample_rate, input_channels); + output_parameters->reset(output_sample_rate, + static_cast<size_t>(output_channels), + static_cast<size_t>(output_buffer_size)); + input_parameters->reset(input_sample_rate, + static_cast<size_t>(input_channels), + static_cast<size_t>(input_buffer_size)); + RTC_CHECK(input_parameters->is_valid()); + RTC_CHECK(output_parameters->is_valid()); +} + +rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput( + AudioDeviceModule::AudioLayer audio_layer, + bool is_stereo_playout_supported, + bool is_stereo_record_supported, + uint16_t playout_delay_ms, + std::unique_ptr<AudioInput> audio_input, + std::unique_ptr<AudioOutput> audio_output) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return rtc::make_ref_counted<AndroidAudioDeviceModule>( + audio_layer, is_stereo_playout_supported, is_stereo_record_supported, + playout_delay_ms, std::move(audio_input), std::move(audio_output)); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h new file mode 100644 index 0000000000..1918336c5a --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_ + +#include <memory> + +#include "absl/types/optional.h" +#include "modules/audio_device/include/audio_device.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { + +class AudioDeviceBuffer; + +namespace jni { + +class AudioInput { + public: + virtual ~AudioInput() {} + + virtual int32_t Init() = 0; + virtual int32_t Terminate() = 0; + + virtual int32_t InitRecording() = 0; + virtual bool RecordingIsInitialized() const = 0; + + virtual int32_t StartRecording() = 0; + virtual int32_t StopRecording() = 0; + virtual bool Recording() const = 0; + + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; + + // Returns true if the audio input supports built-in audio effects for AEC and + // NS. + virtual bool IsAcousticEchoCancelerSupported() const = 0; + virtual bool IsNoiseSuppressorSupported() const = 0; + + virtual int32_t EnableBuiltInAEC(bool enable) = 0; + virtual int32_t EnableBuiltInNS(bool enable) = 0; +}; + +class AudioOutput { + public: + virtual ~AudioOutput() {} + + virtual int32_t Init() = 0; + virtual int32_t Terminate() = 0; + virtual int32_t InitPlayout() = 0; + virtual bool PlayoutIsInitialized() const = 0; + virtual int32_t StartPlayout() = 0; + virtual int32_t StopPlayout() = 0; + virtual bool Playing() const = 0; + virtual bool SpeakerVolumeIsAvailable() = 0; + virtual int SetSpeakerVolume(uint32_t volume) = 0; + virtual absl::optional<uint32_t> SpeakerVolume() const = 0; + virtual absl::optional<uint32_t> MaxSpeakerVolume() const = 0; + virtual absl::optional<uint32_t> MinSpeakerVolume() const = 0; + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; + virtual int GetPlayoutUnderrunCount() = 0; +}; + +// Extract an android.media.AudioManager from an android.content.Context. +ScopedJavaLocalRef<jobject> GetAudioManager(JNIEnv* env, + const JavaRef<jobject>& j_context); + +// Get default audio sample rate by querying an android.media.AudioManager. +int GetDefaultSampleRate(JNIEnv* env, const JavaRef<jobject>& j_audio_manager); + +// Get audio input and output parameters based on a number of settings. +void GetAudioParameters(JNIEnv* env, + const JavaRef<jobject>& j_context, + const JavaRef<jobject>& j_audio_manager, + int input_sample_rate, + int output_sample_rate, + bool use_stereo_input, + bool use_stereo_output, + AudioParameters* input_parameters, + AudioParameters* output_parameters); + +// Glue together an audio input and audio output to get an AudioDeviceModule. +rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput( + AudioDeviceModule::AudioLayer audio_layer, + bool is_stereo_playout_supported, + bool is_stereo_record_supported, + uint16_t playout_delay_ms, + std::unique_ptr<AudioInput> audio_input, + std::unique_ptr<AudioOutput> audio_output); + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc new file mode 100644 index 0000000000..d206297001 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc @@ -0,0 +1,267 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/audio_record_jni.h" + +#include <string> +#include <utility> + +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/time_utils.h" +#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioRecord_jni.h" +#include "sdk/android/src/jni/audio_device/audio_common.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +namespace jni { + +namespace { +// Scoped class which logs its time of life as a UMA statistic. It generates +// a histogram which measures the time it takes for a method/scope to execute. +class ScopedHistogramTimer { + public: + explicit ScopedHistogramTimer(const std::string& name) + : histogram_name_(name), start_time_ms_(rtc::TimeMillis()) {} + ~ScopedHistogramTimer() { + const int64_t life_time_ms = rtc::TimeSince(start_time_ms_); + RTC_HISTOGRAM_COUNTS_1000(histogram_name_, life_time_ms); + RTC_LOG(LS_INFO) << histogram_name_ << ": " << life_time_ms; + } + + private: + const std::string histogram_name_; + int64_t start_time_ms_; +}; + +} // namespace + +ScopedJavaLocalRef<jobject> AudioRecordJni::CreateJavaWebRtcAudioRecord( + JNIEnv* env, + const JavaRef<jobject>& j_context, + const JavaRef<jobject>& j_audio_manager) { + return Java_WebRtcAudioRecord_Constructor(env, j_context, j_audio_manager); +} + +AudioRecordJni::AudioRecordJni(JNIEnv* env, + const AudioParameters& audio_parameters, + int total_delay_ms, + const JavaRef<jobject>& j_audio_record) + : j_audio_record_(env, j_audio_record), + audio_parameters_(audio_parameters), + total_delay_ms_(total_delay_ms), + direct_buffer_address_(nullptr), + direct_buffer_capacity_in_bytes_(0), + frames_per_buffer_(0), + initialized_(false), + recording_(false), + audio_device_buffer_(nullptr) { + RTC_LOG(LS_INFO) << "ctor"; + RTC_DCHECK(audio_parameters_.is_valid()); + Java_WebRtcAudioRecord_setNativeAudioRecord(env, j_audio_record_, + jni::jlongFromPointer(this)); + // Detach from this thread since construction is allowed to happen on a + // different thread. + thread_checker_.Detach(); + thread_checker_java_.Detach(); +} + +AudioRecordJni::~AudioRecordJni() { + RTC_LOG(LS_INFO) << "dtor"; + RTC_DCHECK(thread_checker_.IsCurrent()); + Terminate(); +} + +int32_t AudioRecordJni::Init() { + RTC_LOG(LS_INFO) << "Init"; + env_ = AttachCurrentThreadIfNeeded(); + RTC_DCHECK(thread_checker_.IsCurrent()); + return 0; +} + +int32_t AudioRecordJni::Terminate() { + RTC_LOG(LS_INFO) << "Terminate"; + RTC_DCHECK(thread_checker_.IsCurrent()); + StopRecording(); + thread_checker_.Detach(); + return 0; +} + +int32_t AudioRecordJni::InitRecording() { + RTC_LOG(LS_INFO) << "InitRecording"; + RTC_DCHECK(thread_checker_.IsCurrent()); + if (initialized_) { + // Already initialized. + return 0; + } + RTC_DCHECK(!recording_); + ScopedHistogramTimer timer("WebRTC.Audio.InitRecordingDurationMs"); + + int frames_per_buffer = Java_WebRtcAudioRecord_initRecording( + env_, j_audio_record_, audio_parameters_.sample_rate(), + static_cast<int>(audio_parameters_.channels())); + if (frames_per_buffer < 0) { + direct_buffer_address_ = nullptr; + RTC_LOG(LS_ERROR) << "InitRecording failed"; + return -1; + } + frames_per_buffer_ = static_cast<size_t>(frames_per_buffer); + RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_; + const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t); + RTC_CHECK_EQ(direct_buffer_capacity_in_bytes_, + frames_per_buffer_ * bytes_per_frame); + RTC_CHECK_EQ(frames_per_buffer_, audio_parameters_.frames_per_10ms_buffer()); + initialized_ = true; + return 0; +} + +bool AudioRecordJni::RecordingIsInitialized() const { + return initialized_; +} + +int32_t AudioRecordJni::StartRecording() { + RTC_LOG(LS_INFO) << "StartRecording"; + RTC_DCHECK(thread_checker_.IsCurrent()); + if (recording_) { + // Already recording. + return 0; + } + if (!initialized_) { + RTC_DLOG(LS_WARNING) + << "Recording can not start since InitRecording must succeed first"; + return 0; + } + ScopedHistogramTimer timer("WebRTC.Audio.StartRecordingDurationMs"); + if (!Java_WebRtcAudioRecord_startRecording(env_, j_audio_record_)) { + RTC_LOG(LS_ERROR) << "StartRecording failed"; + return -1; + } + recording_ = true; + return 0; +} + +int32_t AudioRecordJni::StopRecording() { + RTC_LOG(LS_INFO) << "StopRecording"; + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!initialized_ || !recording_) { + return 0; + } + // Check if the audio source matched the activated recording session but only + // if a valid results exists to avoid invalid statistics. + if (Java_WebRtcAudioRecord_isAudioConfigVerified(env_, j_audio_record_)) { + const bool session_was_ok = + Java_WebRtcAudioRecord_isAudioSourceMatchingRecordingSession( + env_, j_audio_record_); + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.SourceMatchesRecordingSession", + session_was_ok); + RTC_LOG(LS_INFO) + << "HISTOGRAM(WebRTC.Audio.SourceMatchesRecordingSession): " + << session_was_ok; + } + if (!Java_WebRtcAudioRecord_stopRecording(env_, j_audio_record_)) { + RTC_LOG(LS_ERROR) << "StopRecording failed"; + return -1; + } + // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded() + // next time StartRecording() is called since it will create a new Java + // thread. + thread_checker_java_.Detach(); + initialized_ = false; + recording_ = false; + direct_buffer_address_ = nullptr; + return 0; +} + +bool AudioRecordJni::Recording() const { + return recording_; +} + +void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { + RTC_LOG(LS_INFO) << "AttachAudioBuffer"; + RTC_DCHECK(thread_checker_.IsCurrent()); + audio_device_buffer_ = audioBuffer; + const int sample_rate_hz = audio_parameters_.sample_rate(); + RTC_LOG(LS_INFO) << "SetRecordingSampleRate(" << sample_rate_hz << ")"; + audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz); + const size_t channels = audio_parameters_.channels(); + RTC_LOG(LS_INFO) << "SetRecordingChannels(" << channels << ")"; + audio_device_buffer_->SetRecordingChannels(channels); +} + +bool AudioRecordJni::IsAcousticEchoCancelerSupported() const { + RTC_DCHECK(thread_checker_.IsCurrent()); + return Java_WebRtcAudioRecord_isAcousticEchoCancelerSupported( + env_, j_audio_record_); +} + +bool AudioRecordJni::IsNoiseSuppressorSupported() const { + RTC_DCHECK(thread_checker_.IsCurrent()); + return Java_WebRtcAudioRecord_isNoiseSuppressorSupported(env_, + j_audio_record_); +} + +int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) { + RTC_LOG(LS_INFO) << "EnableBuiltInAEC(" << enable << ")"; + RTC_DCHECK(thread_checker_.IsCurrent()); + return Java_WebRtcAudioRecord_enableBuiltInAEC(env_, j_audio_record_, enable) + ? 0 + : -1; +} + +int32_t AudioRecordJni::EnableBuiltInNS(bool enable) { + RTC_LOG(LS_INFO) << "EnableBuiltInNS(" << enable << ")"; + RTC_DCHECK(thread_checker_.IsCurrent()); + return Java_WebRtcAudioRecord_enableBuiltInNS(env_, j_audio_record_, enable) + ? 0 + : -1; +} + +void AudioRecordJni::CacheDirectBufferAddress( + JNIEnv* env, + const JavaParamRef<jobject>& j_caller, + const JavaParamRef<jobject>& byte_buffer) { + RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress"; + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(!direct_buffer_address_); + direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj()); + jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj()); + RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity; + direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity); +} + +// This method is called on a high-priority thread from Java. The name of +// the thread is 'AudioRecordThread'. +void AudioRecordJni::DataIsRecorded(JNIEnv* env, + const JavaParamRef<jobject>& j_caller, + int length, + int64_t capture_timestamp_ns) { + RTC_DCHECK(thread_checker_java_.IsCurrent()); + if (!audio_device_buffer_) { + RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called"; + return; + } + audio_device_buffer_->SetRecordedBuffer( + direct_buffer_address_, frames_per_buffer_, capture_timestamp_ns); + // We provide one (combined) fixed delay estimate for the APM and use the + // `playDelayMs` parameter only. Components like the AEC only sees the sum + // of `playDelayMs` and `recDelayMs`, hence the distributions does not matter. + audio_device_buffer_->SetVQEData(total_delay_ms_, 0); + if (audio_device_buffer_->DeliverRecordedData() == -1) { + RTC_LOG(LS_INFO) << "AudioDeviceBuffer::DeliverRecordedData failed"; + } +} + +} // namespace jni + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h new file mode 100644 index 0000000000..49c905daaf --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_ + +#include <jni.h> + +#include <memory> + +#include "api/sequence_checker.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" + +namespace webrtc { + +namespace jni { + +// Implements 16-bit mono PCM audio input support for Android using the Java +// AudioRecord interface. Most of the work is done by its Java counterpart in +// WebRtcAudioRecord.java. This class is created and lives on a thread in +// C++-land, but recorded audio buffers are delivered on a high-priority +// thread managed by the Java class. +// +// The Java class makes use of AudioEffect features (mainly AEC) which are +// first available in Jelly Bean. If it is instantiated running against earlier +// SDKs, the AEC provided by the APM in WebRTC must be used and enabled +// separately instead. +// +// An instance can be created on any thread, but must then be used on one and +// the same thread. All public methods must also be called on the same thread. A +// thread checker will RTC_DCHECK if any method is called on an invalid thread. +// +// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed. +// Additional thread checking guarantees that no other (possibly non attached) +// thread is used. +class AudioRecordJni : public AudioInput { + public: + static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioRecord( + JNIEnv* env, + const JavaRef<jobject>& j_context, + const JavaRef<jobject>& j_audio_manager); + + AudioRecordJni(JNIEnv* env, + const AudioParameters& audio_parameters, + int total_delay_ms, + const JavaRef<jobject>& j_webrtc_audio_record); + ~AudioRecordJni() override; + + int32_t Init() override; + int32_t Terminate() override; + + int32_t InitRecording() override; + bool RecordingIsInitialized() const override; + + int32_t StartRecording() override; + int32_t StopRecording() override; + bool Recording() const override; + + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; + + bool IsAcousticEchoCancelerSupported() const override; + bool IsNoiseSuppressorSupported() const override; + + int32_t EnableBuiltInAEC(bool enable) override; + int32_t EnableBuiltInNS(bool enable) override; + + // Called from Java side so we can cache the address of the Java-manged + // `byte_buffer` in `direct_buffer_address_`. The size of the buffer + // is also stored in `direct_buffer_capacity_in_bytes_`. + // This method will be called by the WebRtcAudioRecord constructor, i.e., + // on the same thread that this object is created on. + void CacheDirectBufferAddress(JNIEnv* env, + const JavaParamRef<jobject>& j_caller, + const JavaParamRef<jobject>& byte_buffer); + + // Called periodically by the Java based WebRtcAudioRecord object when + // recording has started. Each call indicates that there are `length` new + // bytes recorded in the memory area `direct_buffer_address_` and it is + // now time to send these to the consumer. + // This method is called on a high-priority thread from Java. The name of + // the thread is 'AudioRecordThread'. + void DataIsRecorded(JNIEnv* env, + const JavaParamRef<jobject>& j_caller, + int length, + int64_t capture_timestamp_ns); + + private: + // Stores thread ID in constructor. + SequenceChecker thread_checker_; + + // Stores thread ID in first call to OnDataIsRecorded() from high-priority + // thread in Java. Detached during construction of this object. + SequenceChecker thread_checker_java_; + + // Wraps the Java specific parts of the AudioRecordJni class. + JNIEnv* env_ = nullptr; + ScopedJavaGlobalRef<jobject> j_audio_record_; + + const AudioParameters audio_parameters_; + + // Delay estimate of the total round-trip delay (input + output). + // Fixed value set once in AttachAudioBuffer() and it can take one out of two + // possible values. See audio_common.h for details. + const int total_delay_ms_; + + // Cached copy of address to direct audio buffer owned by `j_audio_record_`. + void* direct_buffer_address_; + + // Number of bytes in the direct audio buffer owned by `j_audio_record_`. + size_t direct_buffer_capacity_in_bytes_; + + // Number audio frames per audio buffer. Each audio frame corresponds to + // one sample of PCM mono data at 16 bits per sample. Hence, each audio + // frame contains 2 bytes (given that the Java layer only supports mono). + // Example: 480 for 48000 Hz or 441 for 44100 Hz. + size_t frames_per_buffer_; + + bool initialized_; + + bool recording_; + + // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the + // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). + AudioDeviceBuffer* audio_device_buffer_; +}; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc new file mode 100644 index 0000000000..c1ff4c30e2 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc @@ -0,0 +1,271 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/audio_track_jni.h" + +#include <utility> + +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/platform_thread.h" +#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +namespace jni { + +ScopedJavaLocalRef<jobject> AudioTrackJni::CreateJavaWebRtcAudioTrack( + JNIEnv* env, + const JavaRef<jobject>& j_context, + const JavaRef<jobject>& j_audio_manager) { + return Java_WebRtcAudioTrack_Constructor(env, j_context, j_audio_manager); +} + +AudioTrackJni::AudioTrackJni(JNIEnv* env, + const AudioParameters& audio_parameters, + const JavaRef<jobject>& j_webrtc_audio_track) + : j_audio_track_(env, j_webrtc_audio_track), + audio_parameters_(audio_parameters), + direct_buffer_address_(nullptr), + direct_buffer_capacity_in_bytes_(0), + frames_per_buffer_(0), + initialized_(false), + playing_(false), + audio_device_buffer_(nullptr) { + RTC_LOG(LS_INFO) << "ctor"; + RTC_DCHECK(audio_parameters_.is_valid()); + Java_WebRtcAudioTrack_setNativeAudioTrack(env, j_audio_track_, + jni::jlongFromPointer(this)); + // Detach from this thread since construction is allowed to happen on a + // different thread. + thread_checker_.Detach(); + thread_checker_java_.Detach(); +} + +AudioTrackJni::~AudioTrackJni() { + RTC_LOG(LS_INFO) << "dtor"; + RTC_DCHECK(thread_checker_.IsCurrent()); + Terminate(); +} + +int32_t AudioTrackJni::Init() { + RTC_LOG(LS_INFO) << "Init"; + env_ = AttachCurrentThreadIfNeeded(); + RTC_DCHECK(thread_checker_.IsCurrent()); + return 0; +} + +int32_t AudioTrackJni::Terminate() { + RTC_LOG(LS_INFO) << "Terminate"; + RTC_DCHECK(thread_checker_.IsCurrent()); + StopPlayout(); + thread_checker_.Detach(); + return 0; +} + +int32_t AudioTrackJni::InitPlayout() { + RTC_LOG(LS_INFO) << "InitPlayout"; + RTC_DCHECK(thread_checker_.IsCurrent()); + if (initialized_) { + // Already initialized. + return 0; + } + RTC_DCHECK(!playing_); + double buffer_size_factor = + strtod(webrtc::field_trial::FindFullName( + "WebRTC-AudioDevicePlayoutBufferSizeFactor") + .c_str(), + nullptr); + if (buffer_size_factor == 0) + buffer_size_factor = 1.0; + int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout( + env_, j_audio_track_, audio_parameters_.sample_rate(), + static_cast<int>(audio_parameters_.channels()), buffer_size_factor); + if (requested_buffer_size_bytes < 0) { + RTC_LOG(LS_ERROR) << "InitPlayout failed"; + return -1; + } + // Update UMA histograms for both the requested and actual buffer size. + // To avoid division by zero, we assume the sample rate is 48k if an invalid + // value is found. + const int sample_rate = audio_parameters_.sample_rate() <= 0 + ? 48000 + : audio_parameters_.sample_rate(); + // This calculation assumes that audio is mono. + const int requested_buffer_size_ms = + (requested_buffer_size_bytes * 1000) / (2 * sample_rate); + RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs", + requested_buffer_size_ms, 0, 1000, 100); + int actual_buffer_size_frames = + Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_); + if (actual_buffer_size_frames >= 0) { + const int actual_buffer_size_ms = + actual_buffer_size_frames * 1000 / sample_rate; + RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs", + actual_buffer_size_ms, 0, 1000, 100); + } + + initialized_ = true; + return 0; +} + +bool AudioTrackJni::PlayoutIsInitialized() const { + return initialized_; +} + +int32_t AudioTrackJni::StartPlayout() { + RTC_LOG(LS_INFO) << "StartPlayout"; + RTC_DCHECK(thread_checker_.IsCurrent()); + if (playing_) { + // Already playing. + return 0; + } + if (!initialized_) { + RTC_DLOG(LS_WARNING) + << "Playout can not start since InitPlayout must succeed first"; + return 0; + } + if (!Java_WebRtcAudioTrack_startPlayout(env_, j_audio_track_)) { + RTC_LOG(LS_ERROR) << "StartPlayout failed"; + return -1; + } + playing_ = true; + return 0; +} + +int32_t AudioTrackJni::StopPlayout() { + RTC_LOG(LS_INFO) << "StopPlayout"; + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!initialized_ || !playing_) { + return 0; + } + // Log the difference in initial and current buffer level. + const int current_buffer_size_frames = + Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_); + const int initial_buffer_size_frames = + Java_WebRtcAudioTrack_getInitialBufferSizeInFrames(env_, j_audio_track_); + const int sample_rate_hz = audio_parameters_.sample_rate(); + RTC_HISTOGRAM_COUNTS( + "WebRTC.Audio.AndroidNativeAudioBufferSizeDifferenceFromInitialMs", + (current_buffer_size_frames - initial_buffer_size_frames) * 1000 / + sample_rate_hz, + -500, 100, 100); + + if (!Java_WebRtcAudioTrack_stopPlayout(env_, j_audio_track_)) { + RTC_LOG(LS_ERROR) << "StopPlayout failed"; + return -1; + } + // If we don't detach here, we will hit a RTC_DCHECK next time StartPlayout() + // is called since it will create a new Java thread. + thread_checker_java_.Detach(); + initialized_ = false; + playing_ = false; + direct_buffer_address_ = nullptr; + return 0; +} + +bool AudioTrackJni::Playing() const { + return playing_; +} + +bool AudioTrackJni::SpeakerVolumeIsAvailable() { + return true; +} + +int AudioTrackJni::SetSpeakerVolume(uint32_t volume) { + RTC_LOG(LS_INFO) << "SetSpeakerVolume(" << volume << ")"; + RTC_DCHECK(thread_checker_.IsCurrent()); + return Java_WebRtcAudioTrack_setStreamVolume(env_, j_audio_track_, + static_cast<int>(volume)) + ? 0 + : -1; +} + +absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const { + RTC_DCHECK(thread_checker_.IsCurrent()); + return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_); +} + +absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const { + RTC_DCHECK(thread_checker_.IsCurrent()); + return 0; +} + +absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const { + RTC_DCHECK(thread_checker_.IsCurrent()); + const uint32_t volume = + Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_); + RTC_LOG(LS_INFO) << "SpeakerVolume: " << volume; + return volume; +} + +int AudioTrackJni::GetPlayoutUnderrunCount() { + return Java_WebRtcAudioTrack_GetPlayoutUnderrunCount(env_, j_audio_track_); +} + +// TODO(henrika): possibly add stereo support. +void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { + RTC_LOG(LS_INFO) << "AttachAudioBuffer"; + RTC_DCHECK(thread_checker_.IsCurrent()); + audio_device_buffer_ = audioBuffer; + const int sample_rate_hz = audio_parameters_.sample_rate(); + RTC_LOG(LS_INFO) << "SetPlayoutSampleRate(" << sample_rate_hz << ")"; + audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz); + const size_t channels = audio_parameters_.channels(); + RTC_LOG(LS_INFO) << "SetPlayoutChannels(" << channels << ")"; + audio_device_buffer_->SetPlayoutChannels(channels); +} + +void AudioTrackJni::CacheDirectBufferAddress( + JNIEnv* env, + const JavaParamRef<jobject>& byte_buffer) { + RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress"; + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(!direct_buffer_address_); + direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj()); + jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj()); + RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity; + direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity); + const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t); + frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / bytes_per_frame; + RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_; +} + +// This method is called on a high-priority thread from Java. The name of +// the thread is 'AudioRecordTrack'. +void AudioTrackJni::GetPlayoutData(JNIEnv* env, + size_t length) { + RTC_DCHECK(thread_checker_java_.IsCurrent()); + const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t); + RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame); + if (!audio_device_buffer_) { + RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called"; + return; + } + // Pull decoded data (in 16-bit PCM format) from jitter buffer. + int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_); + if (samples <= 0) { + RTC_LOG(LS_ERROR) << "AudioDeviceBuffer::RequestPlayoutData failed"; + return; + } + RTC_DCHECK_EQ(samples, frames_per_buffer_); + // Copy decoded data into common byte buffer to ensure that it can be + // written to the Java based audio track. + samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_); + RTC_DCHECK_EQ(length, bytes_per_frame * samples); +} + +} // namespace jni + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h new file mode 100644 index 0000000000..5ca907c42f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ + +#include <jni.h> + +#include <memory> + +#include "absl/types/optional.h" +#include "api/sequence_checker.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "sdk/android/src/jni/audio_device/audio_common.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" + +namespace webrtc { + +namespace jni { + +// Implements 16-bit mono PCM audio output support for Android using the Java +// AudioTrack interface. Most of the work is done by its Java counterpart in +// WebRtcAudioTrack.java. This class is created and lives on a thread in +// C++-land, but decoded audio buffers are requested on a high-priority +// thread managed by the Java class. +// +// An instance can be created on any thread, but must then be used on one and +// the same thread. All public methods must also be called on the same thread. A +// thread checker will RTC_DCHECK if any method is called on an invalid thread +// +// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed. +// Additional thread checking guarantees that no other (possibly non attached) +// thread is used. +class AudioTrackJni : public AudioOutput { + public: + static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioTrack( + JNIEnv* env, + const JavaRef<jobject>& j_context, + const JavaRef<jobject>& j_audio_manager); + + AudioTrackJni(JNIEnv* env, + const AudioParameters& audio_parameters, + const JavaRef<jobject>& j_webrtc_audio_track); + ~AudioTrackJni() override; + + int32_t Init() override; + int32_t Terminate() override; + + int32_t InitPlayout() override; + bool PlayoutIsInitialized() const override; + + int32_t StartPlayout() override; + int32_t StopPlayout() override; + bool Playing() const override; + + bool SpeakerVolumeIsAvailable() override; + int SetSpeakerVolume(uint32_t volume) override; + absl::optional<uint32_t> SpeakerVolume() const override; + absl::optional<uint32_t> MaxSpeakerVolume() const override; + absl::optional<uint32_t> MinSpeakerVolume() const override; + int GetPlayoutUnderrunCount() override; + + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; + + // Called from Java side so we can cache the address of the Java-manged + // `byte_buffer` in `direct_buffer_address_`. The size of the buffer + // is also stored in `direct_buffer_capacity_in_bytes_`. + // Called on the same thread as the creating thread. + void CacheDirectBufferAddress(JNIEnv* env, + const JavaParamRef<jobject>& byte_buffer); + // Called periodically by the Java based WebRtcAudioTrack object when + // playout has started. Each call indicates that `length` new bytes should + // be written to the memory area `direct_buffer_address_` for playout. + // This method is called on a high-priority thread from Java. The name of + // the thread is 'AudioTrackThread'. + void GetPlayoutData(JNIEnv* env, size_t length); + + private: + // Stores thread ID in constructor. + SequenceChecker thread_checker_; + + // Stores thread ID in first call to OnGetPlayoutData() from high-priority + // thread in Java. Detached during construction of this object. + SequenceChecker thread_checker_java_; + + // Wraps the Java specific parts of the AudioTrackJni class. + JNIEnv* env_ = nullptr; + ScopedJavaGlobalRef<jobject> j_audio_track_; + + // Contains audio parameters provided to this class at construction by the + // AudioManager. + const AudioParameters audio_parameters_; + + // Cached copy of address to direct audio buffer owned by `j_audio_track_`. + void* direct_buffer_address_; + + // Number of bytes in the direct audio buffer owned by `j_audio_track_`. + size_t direct_buffer_capacity_in_bytes_; + + // Number of audio frames per audio buffer. Each audio frame corresponds to + // one sample of PCM mono data at 16 bits per sample. Hence, each audio + // frame contains 2 bytes (given that the Java layer only supports mono). + // Example: 480 for 48000 Hz or 441 for 44100 Hz. + size_t frames_per_buffer_; + + bool initialized_; + + bool playing_; + + // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the + // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). + // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance + // and therefore outlives this object. + AudioDeviceBuffer* audio_device_buffer_; +}; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc new file mode 100644 index 0000000000..1c3cbe4bbe --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <memory> + +#include "sdk/android/generated_java_audio_jni/JavaAudioDeviceModule_jni.h" +#include "sdk/android/src/jni/audio_device/audio_record_jni.h" +#include "sdk/android/src/jni/audio_device/audio_track_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_JavaAudioDeviceModule_CreateAudioDeviceModule( + JNIEnv* env, + const JavaParamRef<jobject>& j_context, + const JavaParamRef<jobject>& j_audio_manager, + const JavaParamRef<jobject>& j_webrtc_audio_record, + const JavaParamRef<jobject>& j_webrtc_audio_track, + int input_sample_rate, + int output_sample_rate, + jboolean j_use_stereo_input, + jboolean j_use_stereo_output) { + AudioParameters input_parameters; + AudioParameters output_parameters; + GetAudioParameters(env, j_context, j_audio_manager, input_sample_rate, + output_sample_rate, j_use_stereo_input, + j_use_stereo_output, &input_parameters, + &output_parameters); + auto audio_input = std::make_unique<AudioRecordJni>( + env, input_parameters, kHighLatencyModeDelayEstimateInMilliseconds, + j_webrtc_audio_record); + auto audio_output = std::make_unique<AudioTrackJni>(env, output_parameters, + j_webrtc_audio_track); + return jlongFromPointer(CreateAudioDeviceModuleFromInputAndOutput( + AudioDeviceModule::kAndroidJavaAudio, + j_use_stereo_input, j_use_stereo_output, + kHighLatencyModeDelayEstimateInMilliseconds, + std::move(audio_input), std::move(audio_output)) + .release()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc new file mode 100644 index 0000000000..300019a161 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/opensles_common.h" + +#include <SLES/OpenSLES.h> + +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace jni { + +// Returns a string representation given an integer SL_RESULT_XXX code. +// The mapping can be found in <SLES/OpenSLES.h>. +const char* GetSLErrorString(size_t code) { + static const char* sl_error_strings[] = { + "SL_RESULT_SUCCESS", // 0 + "SL_RESULT_PRECONDITIONS_VIOLATED", // 1 + "SL_RESULT_PARAMETER_INVALID", // 2 + "SL_RESULT_MEMORY_FAILURE", // 3 + "SL_RESULT_RESOURCE_ERROR", // 4 + "SL_RESULT_RESOURCE_LOST", // 5 + "SL_RESULT_IO_ERROR", // 6 + "SL_RESULT_BUFFER_INSUFFICIENT", // 7 + "SL_RESULT_CONTENT_CORRUPTED", // 8 + "SL_RESULT_CONTENT_UNSUPPORTED", // 9 + "SL_RESULT_CONTENT_NOT_FOUND", // 10 + "SL_RESULT_PERMISSION_DENIED", // 11 + "SL_RESULT_FEATURE_UNSUPPORTED", // 12 + "SL_RESULT_INTERNAL_ERROR", // 13 + "SL_RESULT_UNKNOWN_ERROR", // 14 + "SL_RESULT_OPERATION_ABORTED", // 15 + "SL_RESULT_CONTROL_LOST", // 16 + }; + + if (code >= arraysize(sl_error_strings)) { + return "SL_RESULT_UNKNOWN_ERROR"; + } + return sl_error_strings[code]; +} + +SLDataFormat_PCM CreatePCMConfiguration(size_t channels, + int sample_rate, + size_t bits_per_sample) { + RTC_CHECK_EQ(bits_per_sample, SL_PCMSAMPLEFORMAT_FIXED_16); + SLDataFormat_PCM format; + format.formatType = SL_DATAFORMAT_PCM; + format.numChannels = static_cast<SLuint32>(channels); + // Note that, the unit of sample rate is actually in milliHertz and not Hertz. + switch (sample_rate) { + case 8000: + format.samplesPerSec = SL_SAMPLINGRATE_8; + break; + case 16000: + format.samplesPerSec = SL_SAMPLINGRATE_16; + break; + case 22050: + format.samplesPerSec = SL_SAMPLINGRATE_22_05; + break; + case 32000: + format.samplesPerSec = SL_SAMPLINGRATE_32; + break; + case 44100: + format.samplesPerSec = SL_SAMPLINGRATE_44_1; + break; + case 48000: + format.samplesPerSec = SL_SAMPLINGRATE_48; + break; + case 64000: + format.samplesPerSec = SL_SAMPLINGRATE_64; + break; + case 88200: + format.samplesPerSec = SL_SAMPLINGRATE_88_2; + break; + case 96000: + format.samplesPerSec = SL_SAMPLINGRATE_96; + break; + default: + RTC_CHECK(false) << "Unsupported sample rate: " << sample_rate; + break; + } + format.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16; + format.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16; + format.endianness = SL_BYTEORDER_LITTLEENDIAN; + if (format.numChannels == 1) { + format.channelMask = SL_SPEAKER_FRONT_CENTER; + } else if (format.numChannels == 2) { + format.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT; + } else { + RTC_CHECK(false) << "Unsupported number of channels: " + << format.numChannels; + } + return format; +} + +OpenSLEngineManager::OpenSLEngineManager() { + thread_checker_.Detach(); +} + +SLObjectItf OpenSLEngineManager::GetOpenSLEngine() { + RTC_LOG(LS_INFO) << "GetOpenSLEngine"; + RTC_DCHECK(thread_checker_.IsCurrent()); + // OpenSL ES for Android only supports a single engine per application. + // If one already has been created, return existing object instead of + // creating a new. + if (engine_object_.Get() != nullptr) { + RTC_LOG(LS_WARNING) + << "The OpenSL ES engine object has already been created"; + return engine_object_.Get(); + } + // Create the engine object in thread safe mode. + const SLEngineOption option[] = { + {SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE)}}; + SLresult result = + slCreateEngine(engine_object_.Receive(), 1, option, 0, NULL, NULL); + if (result != SL_RESULT_SUCCESS) { + RTC_LOG(LS_ERROR) << "slCreateEngine() failed: " + << GetSLErrorString(result); + engine_object_.Reset(); + return nullptr; + } + // Realize the SL Engine in synchronous mode. + result = engine_object_->Realize(engine_object_.Get(), SL_BOOLEAN_FALSE); + if (result != SL_RESULT_SUCCESS) { + RTC_LOG(LS_ERROR) << "Realize() failed: " << GetSLErrorString(result); + engine_object_.Reset(); + return nullptr; + } + // Finally return the SLObjectItf interface of the engine object. + return engine_object_.Get(); +} + +} // namespace jni + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h new file mode 100644 index 0000000000..9dd1e0f7d7 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_ + +#include <SLES/OpenSLES.h> +#include <stddef.h> + +#include "api/ref_counted_base.h" +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace jni { + +// Returns a string representation given an integer SL_RESULT_XXX code. +// The mapping can be found in <SLES/OpenSLES.h>. +const char* GetSLErrorString(size_t code); + +// Configures an SL_DATAFORMAT_PCM structure based on native audio parameters. +SLDataFormat_PCM CreatePCMConfiguration(size_t channels, + int sample_rate, + size_t bits_per_sample); + +// Helper class for using SLObjectItf interfaces. +template <typename SLType, typename SLDerefType> +class ScopedSLObject { + public: + ScopedSLObject() : obj_(nullptr) {} + + ~ScopedSLObject() { Reset(); } + + SLType* Receive() { + RTC_DCHECK(!obj_); + return &obj_; + } + + SLDerefType operator->() { return *obj_; } + + SLType Get() const { return obj_; } + + void Reset() { + if (obj_) { + (*obj_)->Destroy(obj_); + obj_ = nullptr; + } + } + + private: + SLType obj_; +}; + +typedef ScopedSLObject<SLObjectItf, const SLObjectItf_*> ScopedSLObjectItf; + +// Creates and realizes the main (global) Open SL engine object and returns +// a reference to it. The engine object is only created at the first call +// since OpenSL ES for Android only supports a single engine per application. +// Subsequent calls returns the already created engine. +// Note: This class must be used single threaded and this is enforced by a +// thread checker. +class OpenSLEngineManager + : public rtc::RefCountedNonVirtual<OpenSLEngineManager> { + public: + OpenSLEngineManager(); + ~OpenSLEngineManager() = default; + SLObjectItf GetOpenSLEngine(); + + private: + SequenceChecker thread_checker_; + // This object is the global entry point of the OpenSL ES API. + // After creating the engine object, the application can obtain this object‘s + // SLEngineItf interface. This interface contains creation methods for all + // the other object types in the API. None of these interface are realized + // by this class. It only provides access to the global engine object. + ScopedSLObjectItf engine_object_; +}; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc new file mode 100644 index 0000000000..6300a3abe1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc @@ -0,0 +1,446 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/opensles_player.h" + +#include <android/log.h> + +#include <memory> + +#include "api/array_view.h" +#include "modules/audio_device/fine_audio_buffer.h" +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/time_utils.h" +#include "sdk/android/src/jni/audio_device/audio_common.h" + +#define TAG "OpenSLESPlayer" +#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__) +#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__) +#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) +#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__) +#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) + +#define RETURN_ON_ERROR(op, ...) \ + do { \ + SLresult err = (op); \ + if (err != SL_RESULT_SUCCESS) { \ + ALOGE("%s failed: %s", #op, GetSLErrorString(err)); \ + return __VA_ARGS__; \ + } \ + } while (0) + +namespace webrtc { + +namespace jni { + +OpenSLESPlayer::OpenSLESPlayer( + const AudioParameters& audio_parameters, + rtc::scoped_refptr<OpenSLEngineManager> engine_manager) + : audio_parameters_(audio_parameters), + audio_device_buffer_(nullptr), + initialized_(false), + playing_(false), + buffer_index_(0), + engine_manager_(std::move(engine_manager)), + engine_(nullptr), + player_(nullptr), + simple_buffer_queue_(nullptr), + volume_(nullptr), + last_play_time_(0) { + ALOGD("ctor[tid=%d]", rtc::CurrentThreadId()); + // Use native audio output parameters provided by the audio manager and + // define the PCM format structure. + pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(), + audio_parameters_.sample_rate(), + audio_parameters_.bits_per_sample()); + // Detach from this thread since we want to use the checker to verify calls + // from the internal audio thread. + thread_checker_opensles_.Detach(); +} + +OpenSLESPlayer::~OpenSLESPlayer() { + ALOGD("dtor[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + Terminate(); + DestroyAudioPlayer(); + DestroyMix(); + engine_ = nullptr; + RTC_DCHECK(!engine_); + RTC_DCHECK(!output_mix_.Get()); + RTC_DCHECK(!player_); + RTC_DCHECK(!simple_buffer_queue_); + RTC_DCHECK(!volume_); +} + +int OpenSLESPlayer::Init() { + ALOGD("Init[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (audio_parameters_.channels() == 2) { + ALOGW("Stereo mode is enabled"); + } + return 0; +} + +int OpenSLESPlayer::Terminate() { + ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + StopPlayout(); + return 0; +} + +int OpenSLESPlayer::InitPlayout() { + ALOGD("InitPlayout[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(!initialized_); + RTC_DCHECK(!playing_); + if (!ObtainEngineInterface()) { + ALOGE("Failed to obtain SL Engine interface"); + return -1; + } + CreateMix(); + initialized_ = true; + buffer_index_ = 0; + return 0; +} + +bool OpenSLESPlayer::PlayoutIsInitialized() const { + return initialized_; +} + +int OpenSLESPlayer::StartPlayout() { + ALOGD("StartPlayout[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(initialized_); + RTC_DCHECK(!playing_); + if (fine_audio_buffer_) { + fine_audio_buffer_->ResetPlayout(); + } + // The number of lower latency audio players is limited, hence we create the + // audio player in Start() and destroy it in Stop(). + CreateAudioPlayer(); + // Fill up audio buffers to avoid initial glitch and to ensure that playback + // starts when mode is later changed to SL_PLAYSTATE_PLAYING. + // TODO(henrika): we can save some delay by only making one call to + // EnqueuePlayoutData. Most likely not worth the risk of adding a glitch. + last_play_time_ = rtc::Time(); + for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) { + EnqueuePlayoutData(true); + } + // Start streaming data by setting the play state to SL_PLAYSTATE_PLAYING. + // For a player object, when the object is in the SL_PLAYSTATE_PLAYING + // state, adding buffers will implicitly start playback. + RETURN_ON_ERROR((*player_)->SetPlayState(player_, SL_PLAYSTATE_PLAYING), -1); + playing_ = (GetPlayState() == SL_PLAYSTATE_PLAYING); + RTC_DCHECK(playing_); + return 0; +} + +int OpenSLESPlayer::StopPlayout() { + ALOGD("StopPlayout[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!initialized_ || !playing_) { + return 0; + } + // Stop playing by setting the play state to SL_PLAYSTATE_STOPPED. + RETURN_ON_ERROR((*player_)->SetPlayState(player_, SL_PLAYSTATE_STOPPED), -1); + // Clear the buffer queue to flush out any remaining data. + RETURN_ON_ERROR((*simple_buffer_queue_)->Clear(simple_buffer_queue_), -1); +#if RTC_DCHECK_IS_ON + // Verify that the buffer queue is in fact cleared as it should. + SLAndroidSimpleBufferQueueState buffer_queue_state; + (*simple_buffer_queue_)->GetState(simple_buffer_queue_, &buffer_queue_state); + RTC_DCHECK_EQ(0, buffer_queue_state.count); + RTC_DCHECK_EQ(0, buffer_queue_state.index); +#endif + // The number of lower latency audio players is limited, hence we create the + // audio player in Start() and destroy it in Stop(). + DestroyAudioPlayer(); + thread_checker_opensles_.Detach(); + initialized_ = false; + playing_ = false; + return 0; +} + +bool OpenSLESPlayer::Playing() const { + return playing_; +} + +bool OpenSLESPlayer::SpeakerVolumeIsAvailable() { + return false; +} + +int OpenSLESPlayer::SetSpeakerVolume(uint32_t volume) { + return -1; +} + +absl::optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const { + return absl::nullopt; +} + +absl::optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const { + return absl::nullopt; +} + +absl::optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const { + return absl::nullopt; +} + +void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { + ALOGD("AttachAudioBuffer"); + RTC_DCHECK(thread_checker_.IsCurrent()); + audio_device_buffer_ = audioBuffer; + const int sample_rate_hz = audio_parameters_.sample_rate(); + ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz); + audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz); + const size_t channels = audio_parameters_.channels(); + ALOGD("SetPlayoutChannels(%zu)", channels); + audio_device_buffer_->SetPlayoutChannels(channels); + RTC_CHECK(audio_device_buffer_); + AllocateDataBuffers(); +} + +void OpenSLESPlayer::AllocateDataBuffers() { + ALOGD("AllocateDataBuffers"); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(!simple_buffer_queue_); + RTC_CHECK(audio_device_buffer_); + // Create a modified audio buffer class which allows us to ask for any number + // of samples (and not only multiple of 10ms) to match the native OpenSL ES + // buffer size. The native buffer size corresponds to the + // PROPERTY_OUTPUT_FRAMES_PER_BUFFER property which is the number of audio + // frames that the HAL (Hardware Abstraction Layer) buffer can hold. It is + // recommended to construct audio buffers so that they contain an exact + // multiple of this number. If so, callbacks will occur at regular intervals, + // which reduces jitter. + const size_t buffer_size_in_samples = + audio_parameters_.frames_per_buffer() * audio_parameters_.channels(); + ALOGD("native buffer size: %zu", buffer_size_in_samples); + ALOGD("native buffer size in ms: %.2f", + audio_parameters_.GetBufferSizeInMilliseconds()); + fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_); + // Allocated memory for audio buffers. + for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) { + audio_buffers_[i].reset(new SLint16[buffer_size_in_samples]); + } +} + +bool OpenSLESPlayer::ObtainEngineInterface() { + ALOGD("ObtainEngineInterface"); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (engine_) + return true; + // Get access to (or create if not already existing) the global OpenSL Engine + // object. + SLObjectItf engine_object = engine_manager_->GetOpenSLEngine(); + if (engine_object == nullptr) { + ALOGE("Failed to access the global OpenSL engine"); + return false; + } + // Get the SL Engine Interface which is implicit. + RETURN_ON_ERROR( + (*engine_object)->GetInterface(engine_object, SL_IID_ENGINE, &engine_), + false); + return true; +} + +bool OpenSLESPlayer::CreateMix() { + ALOGD("CreateMix"); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(engine_); + if (output_mix_.Get()) + return true; + + // Create the ouput mix on the engine object. No interfaces will be used. + RETURN_ON_ERROR((*engine_)->CreateOutputMix(engine_, output_mix_.Receive(), 0, + nullptr, nullptr), + false); + RETURN_ON_ERROR(output_mix_->Realize(output_mix_.Get(), SL_BOOLEAN_FALSE), + false); + return true; +} + +void OpenSLESPlayer::DestroyMix() { + ALOGD("DestroyMix"); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!output_mix_.Get()) + return; + output_mix_.Reset(); +} + +bool OpenSLESPlayer::CreateAudioPlayer() { + ALOGD("CreateAudioPlayer"); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(output_mix_.Get()); + if (player_object_.Get()) + return true; + RTC_DCHECK(!player_); + RTC_DCHECK(!simple_buffer_queue_); + RTC_DCHECK(!volume_); + + // source: Android Simple Buffer Queue Data Locator is source. + SLDataLocator_AndroidSimpleBufferQueue simple_buffer_queue = { + SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, + static_cast<SLuint32>(kNumOfOpenSLESBuffers)}; + SLDataSource audio_source = {&simple_buffer_queue, &pcm_format_}; + + // sink: OutputMix-based data is sink. + SLDataLocator_OutputMix locator_output_mix = {SL_DATALOCATOR_OUTPUTMIX, + output_mix_.Get()}; + SLDataSink audio_sink = {&locator_output_mix, nullptr}; + + // Define interfaces that we indend to use and realize. + const SLInterfaceID interface_ids[] = {SL_IID_ANDROIDCONFIGURATION, + SL_IID_BUFFERQUEUE, SL_IID_VOLUME}; + const SLboolean interface_required[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, + SL_BOOLEAN_TRUE}; + + // Create the audio player on the engine interface. + RETURN_ON_ERROR( + (*engine_)->CreateAudioPlayer( + engine_, player_object_.Receive(), &audio_source, &audio_sink, + arraysize(interface_ids), interface_ids, interface_required), + false); + + // Use the Android configuration interface to set platform-specific + // parameters. Should be done before player is realized. + SLAndroidConfigurationItf player_config; + RETURN_ON_ERROR( + player_object_->GetInterface(player_object_.Get(), + SL_IID_ANDROIDCONFIGURATION, &player_config), + false); + // Set audio player configuration to SL_ANDROID_STREAM_VOICE which + // corresponds to android.media.AudioManager.STREAM_VOICE_CALL. + SLint32 stream_type = SL_ANDROID_STREAM_VOICE; + RETURN_ON_ERROR( + (*player_config) + ->SetConfiguration(player_config, SL_ANDROID_KEY_STREAM_TYPE, + &stream_type, sizeof(SLint32)), + false); + + // Realize the audio player object after configuration has been set. + RETURN_ON_ERROR( + player_object_->Realize(player_object_.Get(), SL_BOOLEAN_FALSE), false); + + // Get the SLPlayItf interface on the audio player. + RETURN_ON_ERROR( + player_object_->GetInterface(player_object_.Get(), SL_IID_PLAY, &player_), + false); + + // Get the SLAndroidSimpleBufferQueueItf interface on the audio player. + RETURN_ON_ERROR( + player_object_->GetInterface(player_object_.Get(), SL_IID_BUFFERQUEUE, + &simple_buffer_queue_), + false); + + // Register callback method for the Android Simple Buffer Queue interface. + // This method will be called when the native audio layer needs audio data. + RETURN_ON_ERROR((*simple_buffer_queue_) + ->RegisterCallback(simple_buffer_queue_, + SimpleBufferQueueCallback, this), + false); + + // Get the SLVolumeItf interface on the audio player. + RETURN_ON_ERROR(player_object_->GetInterface(player_object_.Get(), + SL_IID_VOLUME, &volume_), + false); + + // TODO(henrika): might not be required to set volume to max here since it + // seems to be default on most devices. Might be required for unit tests. + // RETURN_ON_ERROR((*volume_)->SetVolumeLevel(volume_, 0), false); + + return true; +} + +void OpenSLESPlayer::DestroyAudioPlayer() { + ALOGD("DestroyAudioPlayer"); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!player_object_.Get()) + return; + (*simple_buffer_queue_) + ->RegisterCallback(simple_buffer_queue_, nullptr, nullptr); + player_object_.Reset(); + player_ = nullptr; + simple_buffer_queue_ = nullptr; + volume_ = nullptr; +} + +// static +void OpenSLESPlayer::SimpleBufferQueueCallback( + SLAndroidSimpleBufferQueueItf caller, + void* context) { + OpenSLESPlayer* stream = reinterpret_cast<OpenSLESPlayer*>(context); + stream->FillBufferQueue(); +} + +void OpenSLESPlayer::FillBufferQueue() { + RTC_DCHECK(thread_checker_opensles_.IsCurrent()); + SLuint32 state = GetPlayState(); + if (state != SL_PLAYSTATE_PLAYING) { + ALOGW("Buffer callback in non-playing state!"); + return; + } + EnqueuePlayoutData(false); +} + +void OpenSLESPlayer::EnqueuePlayoutData(bool silence) { + // Check delta time between two successive callbacks and provide a warning + // if it becomes very large. + // TODO(henrika): using 150ms as upper limit but this value is rather random. + const uint32_t current_time = rtc::Time(); + const uint32_t diff = current_time - last_play_time_; + if (diff > 150) { + ALOGW("Bad OpenSL ES playout timing, dT=%u [ms]", diff); + } + last_play_time_ = current_time; + SLint8* audio_ptr8 = + reinterpret_cast<SLint8*>(audio_buffers_[buffer_index_].get()); + if (silence) { + RTC_DCHECK(thread_checker_.IsCurrent()); + // Avoid acquiring real audio data from WebRTC and fill the buffer with + // zeros instead. Used to prime the buffer with silence and to avoid asking + // for audio data from two different threads. + memset(audio_ptr8, 0, audio_parameters_.GetBytesPerBuffer()); + } else { + RTC_DCHECK(thread_checker_opensles_.IsCurrent()); + // Read audio data from the WebRTC source using the FineAudioBuffer object + // to adjust for differences in buffer size between WebRTC (10ms) and native + // OpenSL ES. Use hardcoded delay estimate since OpenSL ES does not support + // delay estimation. + fine_audio_buffer_->GetPlayoutData( + rtc::ArrayView<int16_t>(audio_buffers_[buffer_index_].get(), + audio_parameters_.frames_per_buffer() * + audio_parameters_.channels()), + 25); + } + // Enqueue the decoded audio buffer for playback. + SLresult err = (*simple_buffer_queue_) + ->Enqueue(simple_buffer_queue_, audio_ptr8, + audio_parameters_.GetBytesPerBuffer()); + if (SL_RESULT_SUCCESS != err) { + ALOGE("Enqueue failed: %d", err); + } + buffer_index_ = (buffer_index_ + 1) % kNumOfOpenSLESBuffers; +} + +SLuint32 OpenSLESPlayer::GetPlayState() const { + RTC_DCHECK(player_); + SLuint32 state; + SLresult err = (*player_)->GetPlayState(player_, &state); + if (SL_RESULT_SUCCESS != err) { + ALOGE("GetPlayState failed: %d", err); + } + return state; +} + +} // namespace jni + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h new file mode 100644 index 0000000000..8a22432309 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h @@ -0,0 +1,199 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_ + +#include <SLES/OpenSLES.h> +#include <SLES/OpenSLES_Android.h> +#include <SLES/OpenSLES_AndroidConfiguration.h> + +#include <memory> + +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_device/fine_audio_buffer.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "sdk/android/src/jni/audio_device/audio_common.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" +#include "sdk/android/src/jni/audio_device/opensles_common.h" + +namespace webrtc { + +class FineAudioBuffer; + +namespace jni { + +// Implements 16-bit mono PCM audio output support for Android using the +// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done. +// +// An instance can be created on any thread, but must then be used on one and +// the same thread. All public methods must also be called on the same thread. A +// thread checker will RTC_DCHECK if any method is called on an invalid thread. +// Decoded audio buffers are requested on a dedicated internal thread managed by +// the OpenSL ES layer. +// +// The existing design forces the user to call InitPlayout() after Stoplayout() +// to be able to call StartPlayout() again. This is inline with how the Java- +// based implementation works. +// +// OpenSL ES is a native C API which have no Dalvik-related overhead such as +// garbage collection pauses and it supports reduced audio output latency. +// If the device doesn't claim this feature but supports API level 9 (Android +// platform version 2.3) or later, then we can still use the OpenSL ES APIs but +// the output latency may be higher. +class OpenSLESPlayer : public AudioOutput { + public: + // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is + // required for lower latency. Beginning with API level 18 (Android 4.3), a + // buffer count of 1 is sufficient for lower latency. In addition, the buffer + // size and sample rate must be compatible with the device's native output + // configuration provided via the audio manager at construction. + // TODO(henrika): perhaps set this value dynamically based on OS version. + static const int kNumOfOpenSLESBuffers = 2; + + OpenSLESPlayer(const AudioParameters& audio_parameters, + rtc::scoped_refptr<OpenSLEngineManager> engine_manager); + ~OpenSLESPlayer() override; + + int Init() override; + int Terminate() override; + + int InitPlayout() override; + bool PlayoutIsInitialized() const override; + + int StartPlayout() override; + int StopPlayout() override; + bool Playing() const override; + + bool SpeakerVolumeIsAvailable() override; + int SetSpeakerVolume(uint32_t volume) override; + absl::optional<uint32_t> SpeakerVolume() const override; + absl::optional<uint32_t> MaxSpeakerVolume() const override; + absl::optional<uint32_t> MinSpeakerVolume() const override; + + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; + + int GetPlayoutUnderrunCount() override { return -1; } + + private: + // These callback methods are called when data is required for playout. + // They are both called from an internal "OpenSL ES thread" which is not + // attached to the Dalvik VM. + static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller, + void* context); + void FillBufferQueue(); + // Reads audio data in PCM format using the AudioDeviceBuffer. + // Can be called both on the main thread (during Start()) and from the + // internal audio thread while output streaming is active. + // If the `silence` flag is set, the audio is filled with zeros instead of + // asking the WebRTC layer for real audio data. This procedure is also known + // as audio priming. + void EnqueuePlayoutData(bool silence); + + // Allocate memory for audio buffers which will be used to render audio + // via the SLAndroidSimpleBufferQueueItf interface. + void AllocateDataBuffers(); + + // Obtaines the SL Engine Interface from the existing global Engine object. + // The interface exposes creation methods of all the OpenSL ES object types. + // This method defines the `engine_` member variable. + bool ObtainEngineInterface(); + + // Creates/destroys the output mix object. + bool CreateMix(); + void DestroyMix(); + + // Creates/destroys the audio player and the simple-buffer object. + // Also creates the volume object. + bool CreateAudioPlayer(); + void DestroyAudioPlayer(); + + SLuint32 GetPlayState() const; + + // Ensures that methods are called from the same thread as this object is + // created on. + SequenceChecker thread_checker_; + + // Stores thread ID in first call to SimpleBufferQueueCallback() from internal + // non-application thread which is not attached to the Dalvik JVM. + // Detached during construction of this object. + SequenceChecker thread_checker_opensles_; + + const AudioParameters audio_parameters_; + + // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the + // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). + AudioDeviceBuffer* audio_device_buffer_; + + bool initialized_; + bool playing_; + + // PCM-type format definition. + // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if + // 32-bit float representation is needed. + SLDataFormat_PCM pcm_format_; + + // Queue of audio buffers to be used by the player object for rendering + // audio. + std::unique_ptr<SLint16[]> audio_buffers_[kNumOfOpenSLESBuffers]; + + // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data + // in chunks of 10ms. It then allows for this data to be pulled in + // a finer or coarser granularity. I.e. interacting with this class instead + // of directly with the AudioDeviceBuffer one can ask for any number of + // audio data samples. + // Example: native buffer size can be 192 audio frames at 48kHz sample rate. + // WebRTC will provide 480 audio frames per 10ms but OpenSL ES asks for 192 + // in each callback (one every 4th ms). This class can then ask for 192 and + // the FineAudioBuffer will ask WebRTC for new data approximately only every + // second callback and also cache non-utilized audio. + std::unique_ptr<FineAudioBuffer> fine_audio_buffer_; + + // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue. + // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ... + int buffer_index_; + + const rtc::scoped_refptr<OpenSLEngineManager> engine_manager_; + // This interface exposes creation methods for all the OpenSL ES object types. + // It is the OpenSL ES API entry point. + SLEngineItf engine_; + + // Output mix object to be used by the player object. + ScopedSLObjectItf output_mix_; + + // The audio player media object plays out audio to the speakers. It also + // supports volume control. + ScopedSLObjectItf player_object_; + + // This interface is supported on the audio player and it controls the state + // of the audio player. + SLPlayItf player_; + + // The Android Simple Buffer Queue interface is supported on the audio player + // and it provides methods to send audio data from the source to the audio + // player for rendering. + SLAndroidSimpleBufferQueueItf simple_buffer_queue_; + + // This interface exposes controls for manipulating the object’s audio volume + // properties. This interface is supported on the Audio Player object. + SLVolumeItf volume_; + + // Last time the OpenSL ES layer asked for audio data to play out. + uint32_t last_play_time_; +}; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc new file mode 100644 index 0000000000..c426a8d92b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc @@ -0,0 +1,445 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/audio_device/opensles_recorder.h" + +#include <android/log.h> + +#include <memory> + +#include "api/array_view.h" +#include "modules/audio_device/fine_audio_buffer.h" +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/time_utils.h" +#include "sdk/android/src/jni/audio_device/audio_common.h" + +#define TAG "OpenSLESRecorder" +#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__) +#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__) +#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) +#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__) +#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) + +#define LOG_ON_ERROR(op) \ + [](SLresult err) { \ + if (err != SL_RESULT_SUCCESS) { \ + ALOGE("%s:%d %s failed: %s", __FILE__, __LINE__, #op, \ + GetSLErrorString(err)); \ + return true; \ + } \ + return false; \ + }(op) + +namespace webrtc { + +namespace jni { + +OpenSLESRecorder::OpenSLESRecorder( + const AudioParameters& audio_parameters, + rtc::scoped_refptr<OpenSLEngineManager> engine_manager) + : audio_parameters_(audio_parameters), + audio_device_buffer_(nullptr), + initialized_(false), + recording_(false), + engine_manager_(std::move(engine_manager)), + engine_(nullptr), + recorder_(nullptr), + simple_buffer_queue_(nullptr), + buffer_index_(0), + last_rec_time_(0) { + ALOGD("ctor[tid=%d]", rtc::CurrentThreadId()); + // Detach from this thread since we want to use the checker to verify calls + // from the internal audio thread. + thread_checker_opensles_.Detach(); + // Use native audio output parameters provided by the audio manager and + // define the PCM format structure. + pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(), + audio_parameters_.sample_rate(), + audio_parameters_.bits_per_sample()); +} + +OpenSLESRecorder::~OpenSLESRecorder() { + ALOGD("dtor[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + Terminate(); + DestroyAudioRecorder(); + engine_ = nullptr; + RTC_DCHECK(!engine_); + RTC_DCHECK(!recorder_); + RTC_DCHECK(!simple_buffer_queue_); +} + +int OpenSLESRecorder::Init() { + ALOGD("Init[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (audio_parameters_.channels() == 2) { + ALOGD("Stereo mode is enabled"); + } + return 0; +} + +int OpenSLESRecorder::Terminate() { + ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + StopRecording(); + return 0; +} + +int OpenSLESRecorder::InitRecording() { + ALOGD("InitRecording[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(!initialized_); + RTC_DCHECK(!recording_); + if (!ObtainEngineInterface()) { + ALOGE("Failed to obtain SL Engine interface"); + return -1; + } + CreateAudioRecorder(); + initialized_ = true; + buffer_index_ = 0; + return 0; +} + +bool OpenSLESRecorder::RecordingIsInitialized() const { + return initialized_; +} + +int OpenSLESRecorder::StartRecording() { + ALOGD("StartRecording[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(initialized_); + RTC_DCHECK(!recording_); + if (fine_audio_buffer_) { + fine_audio_buffer_->ResetRecord(); + } + // Add buffers to the queue before changing state to SL_RECORDSTATE_RECORDING + // to ensure that recording starts as soon as the state is modified. On some + // devices, SLAndroidSimpleBufferQueue::Clear() used in Stop() does not flush + // the buffers as intended and we therefore check the number of buffers + // already queued first. Enqueue() can return SL_RESULT_BUFFER_INSUFFICIENT + // otherwise. + int num_buffers_in_queue = GetBufferCount(); + for (int i = 0; i < kNumOfOpenSLESBuffers - num_buffers_in_queue; ++i) { + if (!EnqueueAudioBuffer()) { + recording_ = false; + return -1; + } + } + num_buffers_in_queue = GetBufferCount(); + RTC_DCHECK_EQ(num_buffers_in_queue, kNumOfOpenSLESBuffers); + LogBufferState(); + // Start audio recording by changing the state to SL_RECORDSTATE_RECORDING. + // Given that buffers are already enqueued, recording should start at once. + // The macro returns -1 if recording fails to start. + last_rec_time_ = rtc::Time(); + if (LOG_ON_ERROR( + (*recorder_)->SetRecordState(recorder_, SL_RECORDSTATE_RECORDING))) { + return -1; + } + recording_ = (GetRecordState() == SL_RECORDSTATE_RECORDING); + RTC_DCHECK(recording_); + return 0; +} + +int OpenSLESRecorder::StopRecording() { + ALOGD("StopRecording[tid=%d]", rtc::CurrentThreadId()); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!initialized_ || !recording_) { + return 0; + } + // Stop recording by setting the record state to SL_RECORDSTATE_STOPPED. + if (LOG_ON_ERROR( + (*recorder_)->SetRecordState(recorder_, SL_RECORDSTATE_STOPPED))) { + return -1; + } + // Clear the buffer queue to get rid of old data when resuming recording. + if (LOG_ON_ERROR((*simple_buffer_queue_)->Clear(simple_buffer_queue_))) { + return -1; + } + thread_checker_opensles_.Detach(); + initialized_ = false; + recording_ = false; + return 0; +} + +bool OpenSLESRecorder::Recording() const { + return recording_; +} + +void OpenSLESRecorder::AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) { + ALOGD("AttachAudioBuffer"); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_CHECK(audio_buffer); + audio_device_buffer_ = audio_buffer; + // Ensure that the audio device buffer is informed about the native sample + // rate used on the recording side. + const int sample_rate_hz = audio_parameters_.sample_rate(); + ALOGD("SetRecordingSampleRate(%d)", sample_rate_hz); + audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz); + // Ensure that the audio device buffer is informed about the number of + // channels preferred by the OS on the recording side. + const size_t channels = audio_parameters_.channels(); + ALOGD("SetRecordingChannels(%zu)", channels); + audio_device_buffer_->SetRecordingChannels(channels); + // Allocated memory for internal data buffers given existing audio parameters. + AllocateDataBuffers(); +} + +bool OpenSLESRecorder::IsAcousticEchoCancelerSupported() const { + return false; +} + +bool OpenSLESRecorder::IsNoiseSuppressorSupported() const { + return false; +} + +int OpenSLESRecorder::EnableBuiltInAEC(bool enable) { + ALOGD("EnableBuiltInAEC(%d)", enable); + RTC_DCHECK(thread_checker_.IsCurrent()); + ALOGE("Not implemented"); + return 0; +} + +int OpenSLESRecorder::EnableBuiltInNS(bool enable) { + ALOGD("EnableBuiltInNS(%d)", enable); + RTC_DCHECK(thread_checker_.IsCurrent()); + ALOGE("Not implemented"); + return 0; +} + +bool OpenSLESRecorder::ObtainEngineInterface() { + ALOGD("ObtainEngineInterface"); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (engine_) + return true; + // Get access to (or create if not already existing) the global OpenSL Engine + // object. + SLObjectItf engine_object = engine_manager_->GetOpenSLEngine(); + if (engine_object == nullptr) { + ALOGE("Failed to access the global OpenSL engine"); + return false; + } + // Get the SL Engine Interface which is implicit. + if (LOG_ON_ERROR( + (*engine_object) + ->GetInterface(engine_object, SL_IID_ENGINE, &engine_))) { + return false; + } + return true; +} + +bool OpenSLESRecorder::CreateAudioRecorder() { + ALOGD("CreateAudioRecorder"); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (recorder_object_.Get()) + return true; + RTC_DCHECK(!recorder_); + RTC_DCHECK(!simple_buffer_queue_); + + // Audio source configuration. + SLDataLocator_IODevice mic_locator = {SL_DATALOCATOR_IODEVICE, + SL_IODEVICE_AUDIOINPUT, + SL_DEFAULTDEVICEID_AUDIOINPUT, NULL}; + SLDataSource audio_source = {&mic_locator, NULL}; + + // Audio sink configuration. + SLDataLocator_AndroidSimpleBufferQueue buffer_queue = { + SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, + static_cast<SLuint32>(kNumOfOpenSLESBuffers)}; + SLDataSink audio_sink = {&buffer_queue, &pcm_format_}; + + // Create the audio recorder object (requires the RECORD_AUDIO permission). + // Do not realize the recorder yet. Set the configuration first. + const SLInterfaceID interface_id[] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE, + SL_IID_ANDROIDCONFIGURATION}; + const SLboolean interface_required[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; + if (LOG_ON_ERROR((*engine_)->CreateAudioRecorder( + engine_, recorder_object_.Receive(), &audio_source, &audio_sink, + arraysize(interface_id), interface_id, interface_required))) { + return false; + } + + // Configure the audio recorder (before it is realized). + SLAndroidConfigurationItf recorder_config; + if (LOG_ON_ERROR((recorder_object_->GetInterface(recorder_object_.Get(), + SL_IID_ANDROIDCONFIGURATION, + &recorder_config)))) { + return false; + } + + // Uses the default microphone tuned for audio communication. + // Note that, SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION leads to a fast + // track but also excludes usage of required effects like AEC, AGC and NS. + // SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION + SLint32 stream_type = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION; + if (LOG_ON_ERROR(((*recorder_config) + ->SetConfiguration(recorder_config, + SL_ANDROID_KEY_RECORDING_PRESET, + &stream_type, sizeof(SLint32))))) { + return false; + } + + // The audio recorder can now be realized (in synchronous mode). + if (LOG_ON_ERROR((recorder_object_->Realize(recorder_object_.Get(), + SL_BOOLEAN_FALSE)))) { + return false; + } + + // Get the implicit recorder interface (SL_IID_RECORD). + if (LOG_ON_ERROR((recorder_object_->GetInterface( + recorder_object_.Get(), SL_IID_RECORD, &recorder_)))) { + return false; + } + + // Get the simple buffer queue interface (SL_IID_ANDROIDSIMPLEBUFFERQUEUE). + // It was explicitly requested. + if (LOG_ON_ERROR((recorder_object_->GetInterface( + recorder_object_.Get(), SL_IID_ANDROIDSIMPLEBUFFERQUEUE, + &simple_buffer_queue_)))) { + return false; + } + + // Register the input callback for the simple buffer queue. + // This callback will be called when receiving new data from the device. + if (LOG_ON_ERROR(((*simple_buffer_queue_) + ->RegisterCallback(simple_buffer_queue_, + SimpleBufferQueueCallback, this)))) { + return false; + } + return true; +} + +void OpenSLESRecorder::DestroyAudioRecorder() { + ALOGD("DestroyAudioRecorder"); + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!recorder_object_.Get()) + return; + (*simple_buffer_queue_) + ->RegisterCallback(simple_buffer_queue_, nullptr, nullptr); + recorder_object_.Reset(); + recorder_ = nullptr; + simple_buffer_queue_ = nullptr; +} + +void OpenSLESRecorder::SimpleBufferQueueCallback( + SLAndroidSimpleBufferQueueItf buffer_queue, + void* context) { + OpenSLESRecorder* stream = static_cast<OpenSLESRecorder*>(context); + stream->ReadBufferQueue(); +} + +void OpenSLESRecorder::AllocateDataBuffers() { + ALOGD("AllocateDataBuffers"); + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(!simple_buffer_queue_); + RTC_CHECK(audio_device_buffer_); + // Create a modified audio buffer class which allows us to deliver any number + // of samples (and not only multiple of 10ms) to match the native audio unit + // buffer size. + ALOGD("frames per native buffer: %zu", audio_parameters_.frames_per_buffer()); + ALOGD("frames per 10ms buffer: %zu", + audio_parameters_.frames_per_10ms_buffer()); + ALOGD("bytes per native buffer: %zu", audio_parameters_.GetBytesPerBuffer()); + ALOGD("native sample rate: %d", audio_parameters_.sample_rate()); + RTC_DCHECK(audio_device_buffer_); + fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_); + // Allocate queue of audio buffers that stores recorded audio samples. + const int buffer_size_samples = + audio_parameters_.frames_per_buffer() * audio_parameters_.channels(); + audio_buffers_.reset(new std::unique_ptr<SLint16[]>[kNumOfOpenSLESBuffers]); + for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) { + audio_buffers_[i].reset(new SLint16[buffer_size_samples]); + } +} + +void OpenSLESRecorder::ReadBufferQueue() { + RTC_DCHECK(thread_checker_opensles_.IsCurrent()); + SLuint32 state = GetRecordState(); + if (state != SL_RECORDSTATE_RECORDING) { + ALOGW("Buffer callback in non-recording state!"); + return; + } + // Check delta time between two successive callbacks and provide a warning + // if it becomes very large. + // TODO(henrika): using 150ms as upper limit but this value is rather random. + const uint32_t current_time = rtc::Time(); + const uint32_t diff = current_time - last_rec_time_; + if (diff > 150) { + ALOGW("Bad OpenSL ES record timing, dT=%u [ms]", diff); + } + last_rec_time_ = current_time; + // Send recorded audio data to the WebRTC sink. + // TODO(henrika): fix delay estimates. It is OK to use fixed values for now + // since there is no support to turn off built-in EC in combination with + // OpenSL ES anyhow. Hence, as is, the WebRTC based AEC (which would use + // these estimates) will never be active. + fine_audio_buffer_->DeliverRecordedData( + rtc::ArrayView<const int16_t>( + audio_buffers_[buffer_index_].get(), + audio_parameters_.frames_per_buffer() * audio_parameters_.channels()), + 25); + // Enqueue the utilized audio buffer and use if for recording again. + EnqueueAudioBuffer(); +} + +bool OpenSLESRecorder::EnqueueAudioBuffer() { + SLresult err = + (*simple_buffer_queue_) + ->Enqueue( + simple_buffer_queue_, + reinterpret_cast<SLint8*>(audio_buffers_[buffer_index_].get()), + audio_parameters_.GetBytesPerBuffer()); + if (SL_RESULT_SUCCESS != err) { + ALOGE("Enqueue failed: %s", GetSLErrorString(err)); + return false; + } + buffer_index_ = (buffer_index_ + 1) % kNumOfOpenSLESBuffers; + return true; +} + +SLuint32 OpenSLESRecorder::GetRecordState() const { + RTC_DCHECK(recorder_); + SLuint32 state; + SLresult err = (*recorder_)->GetRecordState(recorder_, &state); + if (SL_RESULT_SUCCESS != err) { + ALOGE("GetRecordState failed: %s", GetSLErrorString(err)); + } + return state; +} + +SLAndroidSimpleBufferQueueState OpenSLESRecorder::GetBufferQueueState() const { + RTC_DCHECK(simple_buffer_queue_); + // state.count: Number of buffers currently in the queue. + // state.index: Index of the currently filling buffer. This is a linear index + // that keeps a cumulative count of the number of buffers recorded. + SLAndroidSimpleBufferQueueState state; + SLresult err = + (*simple_buffer_queue_)->GetState(simple_buffer_queue_, &state); + if (SL_RESULT_SUCCESS != err) { + ALOGE("GetState failed: %s", GetSLErrorString(err)); + } + return state; +} + +void OpenSLESRecorder::LogBufferState() const { + SLAndroidSimpleBufferQueueState state = GetBufferQueueState(); + ALOGD("state.count:%d state.index:%d", state.count, state.index); +} + +SLuint32 OpenSLESRecorder::GetBufferCount() { + SLAndroidSimpleBufferQueueState state = GetBufferQueueState(); + return state.count; +} + +} // namespace jni + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h new file mode 100644 index 0000000000..93c4e4eec9 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h @@ -0,0 +1,193 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_ + +#include <SLES/OpenSLES.h> +#include <SLES/OpenSLES_Android.h> +#include <SLES/OpenSLES_AndroidConfiguration.h> + +#include <memory> + +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_device/fine_audio_buffer.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "sdk/android/src/jni/audio_device/audio_common.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" +#include "sdk/android/src/jni/audio_device/opensles_common.h" + +namespace webrtc { + +class FineAudioBuffer; + +namespace jni { + +// Implements 16-bit mono PCM audio input support for Android using the +// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done. +// +// An instance can be created on any thread, but must then be used on one and +// the same thread. All public methods must also be called on the same thread. A +// thread checker will RTC_DCHECK if any method is called on an invalid thread. +// Recorded audio buffers are provided on a dedicated internal thread managed by +// the OpenSL ES layer. +// +// The existing design forces the user to call InitRecording() after +// StopRecording() to be able to call StartRecording() again. This is inline +// with how the Java-based implementation works. +// +// As of API level 21, lower latency audio input is supported on select devices. +// To take advantage of this feature, first confirm that lower latency output is +// available. The capability for lower latency output is a prerequisite for the +// lower latency input feature. Then, create an AudioRecorder with the same +// sample rate and buffer size as would be used for output. OpenSL ES interfaces +// for input effects preclude the lower latency path. +// See https://developer.android.com/ndk/guides/audio/opensl-prog-notes.html +// for more details. +class OpenSLESRecorder : public AudioInput { + public: + // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is + // required for lower latency. Beginning with API level 18 (Android 4.3), a + // buffer count of 1 is sufficient for lower latency. In addition, the buffer + // size and sample rate must be compatible with the device's native input + // configuration provided via the audio manager at construction. + // TODO(henrika): perhaps set this value dynamically based on OS version. + static const int kNumOfOpenSLESBuffers = 2; + + OpenSLESRecorder(const AudioParameters& audio_parameters, + rtc::scoped_refptr<OpenSLEngineManager> engine_manager); + ~OpenSLESRecorder() override; + + int Init() override; + int Terminate() override; + + int InitRecording() override; + bool RecordingIsInitialized() const override; + + int StartRecording() override; + int StopRecording() override; + bool Recording() const override; + + void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) override; + + // TODO(henrika): add support using OpenSL ES APIs when available. + bool IsAcousticEchoCancelerSupported() const override; + bool IsNoiseSuppressorSupported() const override; + int EnableBuiltInAEC(bool enable) override; + int EnableBuiltInNS(bool enable) override; + + private: + // Obtaines the SL Engine Interface from the existing global Engine object. + // The interface exposes creation methods of all the OpenSL ES object types. + // This method defines the `engine_` member variable. + bool ObtainEngineInterface(); + + // Creates/destroys the audio recorder and the simple-buffer queue object. + bool CreateAudioRecorder(); + void DestroyAudioRecorder(); + + // Allocate memory for audio buffers which will be used to capture audio + // via the SLAndroidSimpleBufferQueueItf interface. + void AllocateDataBuffers(); + + // These callback methods are called when data has been written to the input + // buffer queue. They are both called from an internal "OpenSL ES thread" + // which is not attached to the Dalvik VM. + static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller, + void* context); + void ReadBufferQueue(); + + // Wraps calls to SLAndroidSimpleBufferQueueState::Enqueue() and it can be + // called both on the main thread (but before recording has started) and from + // the internal audio thread while input streaming is active. It uses + // `simple_buffer_queue_` but no lock is needed since the initial calls from + // the main thread and the native callback thread are mutually exclusive. + bool EnqueueAudioBuffer(); + + // Returns the current recorder state. + SLuint32 GetRecordState() const; + + // Returns the current buffer queue state. + SLAndroidSimpleBufferQueueState GetBufferQueueState() const; + + // Number of buffers currently in the queue. + SLuint32 GetBufferCount(); + + // Prints a log message of the current queue state. Can be used for debugging + // purposes. + void LogBufferState() const; + + // Ensures that methods are called from the same thread as this object is + // created on. + SequenceChecker thread_checker_; + + // Stores thread ID in first call to SimpleBufferQueueCallback() from internal + // non-application thread which is not attached to the Dalvik JVM. + // Detached during construction of this object. + SequenceChecker thread_checker_opensles_; + + const AudioParameters audio_parameters_; + + // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the + // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). + AudioDeviceBuffer* audio_device_buffer_; + + // PCM-type format definition. + // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if + // 32-bit float representation is needed. + SLDataFormat_PCM pcm_format_; + + bool initialized_; + bool recording_; + + const rtc::scoped_refptr<OpenSLEngineManager> engine_manager_; + // This interface exposes creation methods for all the OpenSL ES object types. + // It is the OpenSL ES API entry point. + SLEngineItf engine_; + + // The audio recorder media object records audio to the destination specified + // by the data sink capturing it from the input specified by the data source. + ScopedSLObjectItf recorder_object_; + + // This interface is supported on the audio recorder object and it controls + // the state of the audio recorder. + SLRecordItf recorder_; + + // The Android Simple Buffer Queue interface is supported on the audio + // recorder. For recording, an app should enqueue empty buffers. When a + // registered callback sends notification that the system has finished writing + // data to the buffer, the app can read the buffer. + SLAndroidSimpleBufferQueueItf simple_buffer_queue_; + + // Consumes audio of native buffer size and feeds the WebRTC layer with 10ms + // chunks of audio. + std::unique_ptr<FineAudioBuffer> fine_audio_buffer_; + + // Queue of audio buffers to be used by the recorder object for capturing + // audio. They will be used in a Round-robin way and the size of each buffer + // is given by AudioParameters::frames_per_buffer(), i.e., it corresponds to + // the native OpenSL ES buffer size. + std::unique_ptr<std::unique_ptr<SLint16[]>[]> audio_buffers_; + + // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue. + // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ... + int buffer_index_; + + // Last time the OpenSL ES layer delivered recorded audio data. + uint32_t last_rec_time_; +}; + +} // namespace jni + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc new file mode 100644 index 0000000000..d445cc754e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc @@ -0,0 +1,27 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/generated_builtin_audio_codecs_jni/BuiltinAudioDecoderFactoryFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +#include "api/audio_codecs/builtin_audio_decoder_factory.h" + +namespace webrtc { +namespace jni { + +static jlong +JNI_BuiltinAudioDecoderFactoryFactory_CreateBuiltinAudioDecoderFactory( + JNIEnv* env) { + return NativeToJavaPointer(CreateBuiltinAudioDecoderFactory().release()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc new file mode 100644 index 0000000000..e5a4b10eee --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc @@ -0,0 +1,27 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/generated_builtin_audio_codecs_jni/BuiltinAudioEncoderFactoryFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +#include "api/audio_codecs/builtin_audio_encoder_factory.h" + +namespace webrtc { +namespace jni { + +static jlong +JNI_BuiltinAudioEncoderFactoryFactory_CreateBuiltinAudioEncoderFactory( + JNIEnv* env) { + return NativeToJavaPointer(CreateBuiltinAudioEncoderFactory().release()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc new file mode 100644 index 0000000000..1246d88c0b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc @@ -0,0 +1,25 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "modules/video_coding/codecs/av1/dav1d_decoder.h" +#include "sdk/android/generated_dav1d_jni/Dav1dDecoder_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_Dav1dDecoder_CreateDecoder(JNIEnv* jni) { + return jlongFromPointer(webrtc::CreateDav1dDecoder().release()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc b/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc new file mode 100644 index 0000000000..1bbc7031a0 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc @@ -0,0 +1,23 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <EGL/egl.h> + +#include "sdk/android/generated_video_egl_jni/EglBase10Impl_jni.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_EglBase10Impl_GetCurrentNativeEGLContext(JNIEnv* jni) { + return reinterpret_cast<jlong>(eglGetCurrentContext()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc new file mode 100644 index 0000000000..9bd73a4a51 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc @@ -0,0 +1,117 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/encoded_image.h" + +#include "api/video/encoded_image.h" +#include "rtc_base/time_utils.h" +#include "sdk/android/generated_video_jni/EncodedImage_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/scoped_java_ref_counted.h" + +namespace webrtc { +namespace jni { + +namespace { + +class JavaEncodedImageBuffer : public EncodedImageBufferInterface { + public: + JavaEncodedImageBuffer(JNIEnv* env, + const JavaRef<jobject>& j_encoded_image, + const uint8_t* payload, + size_t size) + : j_encoded_image_(ScopedJavaRefCounted::Retain(env, j_encoded_image)), + data_(const_cast<uint8_t*>(payload)), + size_(size) {} + + const uint8_t* data() const override { return data_; } + uint8_t* data() override { return data_; } + size_t size() const override { return size_; } + + private: + // The Java object owning the buffer. + const ScopedJavaRefCounted j_encoded_image_; + + // TODO(bugs.webrtc.org/9378): Make const, and delete above const_cast. + uint8_t* const data_; + size_t const size_; +}; +} // namespace + +ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env, + VideoFrameType frame_type) { + return Java_FrameType_fromNativeIndex(env, static_cast<int>(frame_type)); +} + +ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage( + JNIEnv* jni, + const EncodedImage& image) { + ScopedJavaLocalRef<jobject> buffer = NewDirectByteBuffer( + jni, const_cast<uint8_t*>(image.data()), image.size()); + ScopedJavaLocalRef<jobject> frame_type = + NativeToJavaFrameType(jni, image._frameType); + ScopedJavaLocalRef<jobject> qp; + if (image.qp_ != -1) + qp = NativeToJavaInteger(jni, image.qp_); + // TODO(bugs.webrtc.org/9378): Keep a reference to the C++ EncodedImage data, + // and use the releaseCallback to manage lifetime. + return Java_EncodedImage_Constructor( + jni, buffer, + /*releaseCallback=*/ScopedJavaGlobalRef<jobject>(nullptr), + static_cast<int>(image._encodedWidth), + static_cast<int>(image._encodedHeight), + image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec, frame_type, + static_cast<jint>(image.rotation_), qp); +} + +ScopedJavaLocalRef<jobjectArray> NativeToJavaFrameTypeArray( + JNIEnv* env, + const std::vector<VideoFrameType>& frame_types) { + return NativeToJavaObjectArray( + env, frame_types, org_webrtc_EncodedImage_00024FrameType_clazz(env), + &NativeToJavaFrameType); +} + +EncodedImage JavaToNativeEncodedImage(JNIEnv* env, + const JavaRef<jobject>& j_encoded_image) { + const JavaRef<jobject>& j_buffer = + Java_EncodedImage_getBuffer(env, j_encoded_image); + const uint8_t* buffer = + static_cast<uint8_t*>(env->GetDirectBufferAddress(j_buffer.obj())); + const size_t buffer_size = env->GetDirectBufferCapacity(j_buffer.obj()); + + EncodedImage frame; + frame.SetEncodedData(rtc::make_ref_counted<JavaEncodedImageBuffer>( + env, j_encoded_image, buffer, buffer_size)); + + frame._encodedWidth = Java_EncodedImage_getEncodedWidth(env, j_encoded_image); + frame._encodedHeight = + Java_EncodedImage_getEncodedHeight(env, j_encoded_image); + frame.rotation_ = + (VideoRotation)Java_EncodedImage_getRotation(env, j_encoded_image); + + frame.qp_ = JavaToNativeOptionalInt( + env, Java_EncodedImage_getQp(env, j_encoded_image)) + .value_or(-1); + + frame._frameType = + (VideoFrameType)Java_EncodedImage_getFrameType(env, j_encoded_image); + return frame; +} + +int64_t GetJavaEncodedImageCaptureTimeNs( + JNIEnv* env, + const JavaRef<jobject>& j_encoded_image) { + return Java_EncodedImage_getCaptureTimeNs(env, j_encoded_image); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h new file mode 100644 index 0000000000..fc6d06243c --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h @@ -0,0 +1,45 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_ +#define SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_ + +#include <jni.h> +#include <vector> + +#include "api/video/video_frame_type.h" + +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { + +class EncodedImage; + +namespace jni { + +ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env, + VideoFrameType frame_type); +ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage(JNIEnv* jni, + const EncodedImage& image); +ScopedJavaLocalRef<jobjectArray> NativeToJavaFrameTypeArray( + JNIEnv* env, + const std::vector<VideoFrameType>& frame_types); + +EncodedImage JavaToNativeEncodedImage(JNIEnv* env, + const JavaRef<jobject>& j_encoded_image); + +int64_t GetJavaEncodedImageCaptureTimeNs( + JNIEnv* jni, + const JavaRef<jobject>& j_encoded_image); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc b/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc new file mode 100644 index 0000000000..882df95b82 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc @@ -0,0 +1,27 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/h264_profile_level_id.h" +#include "sdk/android/generated_video_jni/H264Utils_jni.h" +#include "sdk/android/src/jni/video_codec_info.h" + +namespace webrtc { +namespace jni { + +static jboolean JNI_H264Utils_IsSameH264Profile( + JNIEnv* env, + const JavaParamRef<jobject>& params1, + const JavaParamRef<jobject>& params2) { + return H264IsSameProfile(JavaToNativeStringMap(env, params1), + JavaToNativeStringMap(env, params2)); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc new file mode 100644 index 0000000000..95dcd66bb5 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc @@ -0,0 +1,63 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/generated_video_jni/JavaI420Buffer_jni.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +namespace webrtc { +namespace jni { + +static void JNI_JavaI420Buffer_CropAndScaleI420( + JNIEnv* jni, + const JavaParamRef<jobject>& j_src_y, + jint src_stride_y, + const JavaParamRef<jobject>& j_src_u, + jint src_stride_u, + const JavaParamRef<jobject>& j_src_v, + jint src_stride_v, + jint crop_x, + jint crop_y, + jint crop_width, + jint crop_height, + const JavaParamRef<jobject>& j_dst_y, + jint dst_stride_y, + const JavaParamRef<jobject>& j_dst_u, + jint dst_stride_u, + const JavaParamRef<jobject>& j_dst_v, + jint dst_stride_v, + jint scale_width, + jint scale_height) { + uint8_t const* src_y = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj())); + uint8_t const* src_u = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj())); + uint8_t const* src_v = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj())); + uint8_t* dst_y = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj())); + uint8_t* dst_u = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj())); + uint8_t* dst_v = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj())); + + // Perform cropping using pointer arithmetic. + src_y += crop_x + crop_y * src_stride_y; + src_u += crop_x / 2 + crop_y / 2 * src_stride_u; + src_v += crop_x / 2 + crop_y / 2 * src_stride_v; + + bool ret = libyuv::I420Scale( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width, + crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, scale_width, scale_height, libyuv::kFilterBox); + RTC_DCHECK_EQ(ret, 0) << "I420Scale failed"; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc new file mode 100644 index 0000000000..3764f8deeb --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc @@ -0,0 +1,45 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/ref_count.h" +#include "sdk/android/generated_base_jni/JniCommon_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static void JNI_JniCommon_AddRef(JNIEnv* jni, + jlong j_native_ref_counted_pointer) { + reinterpret_cast<rtc::RefCountInterface*>(j_native_ref_counted_pointer) + ->AddRef(); +} + +static void JNI_JniCommon_ReleaseRef(JNIEnv* jni, + jlong j_native_ref_counted_pointer) { + reinterpret_cast<rtc::RefCountInterface*>(j_native_ref_counted_pointer) + ->Release(); +} + +static ScopedJavaLocalRef<jobject> JNI_JniCommon_AllocateByteBuffer( + JNIEnv* jni, + jint size) { + void* new_data = ::operator new(size); + return NewDirectByteBuffer(jni, new_data, size); +} + +static void JNI_JniCommon_FreeByteBuffer( + JNIEnv* jni, + const JavaParamRef<jobject>& byte_buffer) { + void* data = jni->GetDirectBufferAddress(byte_buffer.obj()); + ::operator delete(data); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc new file mode 100644 index 0000000000..dc34849d1b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc @@ -0,0 +1,80 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/jni_generator_helper.h" + +#include "sdk/android/native_api/jni/class_loader.h" + +namespace webrtc { + +// If `atomic_class_id` set, it'll return immediately. Otherwise, it will look +// up the class and store it. If there's a race, we take care to only store one +// global reference (and the duplicated effort will happen only once). +jclass LazyGetClass(JNIEnv* env, + const char* class_name, + std::atomic<jclass>* atomic_class_id) { + const jclass value = std::atomic_load(atomic_class_id); + if (value) + return value; + webrtc::ScopedJavaGlobalRef<jclass> clazz(webrtc::GetClass(env, class_name)); + RTC_CHECK(!clazz.is_null()) << class_name; + jclass cas_result = nullptr; + if (std::atomic_compare_exchange_strong(atomic_class_id, &cas_result, + clazz.obj())) { + // We sucessfully stored `clazz` in `atomic_class_id`, so we are + // intentionally leaking the global ref since it's now stored there. + return clazz.Release(); + } else { + // Some other thread came before us and stored a global pointer in + // `atomic_class_id`. Relase our global ref and return the ref from the + // other thread. + return cas_result; + } +} + +// If `atomic_method_id` set, it'll return immediately. Otherwise, it will look +// up the method id and store it. If there's a race, it's ok since the values +// are the same (and the duplicated effort will happen only once). +template <MethodID::Type type> +jmethodID MethodID::LazyGet(JNIEnv* env, + jclass clazz, + const char* method_name, + const char* jni_signature, + std::atomic<jmethodID>* atomic_method_id) { + const jmethodID value = std::atomic_load(atomic_method_id); + if (value) + return value; + auto get_method_ptr = type == MethodID::TYPE_STATIC + ? &JNIEnv::GetStaticMethodID + : &JNIEnv::GetMethodID; + jmethodID id = (env->*get_method_ptr)(clazz, method_name, jni_signature); + CHECK_EXCEPTION(env) << "error during GetMethodID: " << method_name << ", " + << jni_signature; + RTC_CHECK(id) << method_name << ", " << jni_signature; + std::atomic_store(atomic_method_id, id); + return id; +} + +// Various template instantiations. +template jmethodID MethodID::LazyGet<MethodID::TYPE_STATIC>( + JNIEnv* env, + jclass clazz, + const char* method_name, + const char* jni_signature, + std::atomic<jmethodID>* atomic_method_id); + +template jmethodID MethodID::LazyGet<MethodID::TYPE_INSTANCE>( + JNIEnv* env, + jclass clazz, + const char* method_name, + const char* jni_signature, + std::atomic<jmethodID>* atomic_method_id); + +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h new file mode 100644 index 0000000000..23695ca8c7 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h @@ -0,0 +1,168 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +// Do not include this file directly. It's intended to be used only by the JNI +// generation script. We are exporting types in strange namespaces in order to +// be compatible with the generated code targeted for Chromium. + +#ifndef SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_ +#define SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_ + +#include <jni.h> +#include <atomic> + +#include "rtc_base/checks.h" +#include "sdk/android/native_api/jni/jni_int_wrapper.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +#define CHECK_CLAZZ(env, jcaller, clazz, ...) RTC_DCHECK(clazz); +#define CHECK_NATIVE_PTR(env, jcaller, native_ptr, method_name, ...) \ + RTC_DCHECK(native_ptr) << method_name; + +#define BASE_EXPORT +#define JNI_REGISTRATION_EXPORT __attribute__((visibility("default"))) + +#if defined(WEBRTC_ARCH_X86) +// Dalvik JIT generated code doesn't guarantee 16-byte stack alignment on +// x86 - use force_align_arg_pointer to realign the stack at the JNI +// boundary. crbug.com/655248 +#define JNI_GENERATOR_EXPORT \ + __attribute__((force_align_arg_pointer)) extern "C" JNIEXPORT JNICALL +#else +#define JNI_GENERATOR_EXPORT extern "C" JNIEXPORT JNICALL +#endif + +#define CHECK_EXCEPTION(jni) \ + RTC_CHECK(!jni->ExceptionCheck()) \ + << (jni->ExceptionDescribe(), jni->ExceptionClear(), "") + +namespace webrtc { + +// This function will initialize `atomic_class_id` to contain a global ref to +// the given class, and will return that ref on subsequent calls. The caller is +// responsible to zero-initialize `atomic_class_id`. It's fine to +// simultaneously call this on multiple threads referencing the same +// `atomic_method_id`. +jclass LazyGetClass(JNIEnv* env, + const char* class_name, + std::atomic<jclass>* atomic_class_id); + +// This class is a wrapper for JNIEnv Get(Static)MethodID. +class MethodID { + public: + enum Type { + TYPE_STATIC, + TYPE_INSTANCE, + }; + + // This function will initialize `atomic_method_id` to contain a ref to + // the given method, and will return that ref on subsequent calls. The caller + // is responsible to zero-initialize `atomic_method_id`. It's fine to + // simultaneously call this on multiple threads referencing the same + // `atomic_method_id`. + template <Type type> + static jmethodID LazyGet(JNIEnv* env, + jclass clazz, + const char* method_name, + const char* jni_signature, + std::atomic<jmethodID>* atomic_method_id); +}; + +} // namespace webrtc + +// Re-export relevant classes into the namespaces the script expects. +namespace base { +namespace android { + +using webrtc::JavaParamRef; +using webrtc::JavaRef; +using webrtc::ScopedJavaLocalRef; +using webrtc::LazyGetClass; +using webrtc::MethodID; + +} // namespace android +} // namespace base + +namespace jni_generator { +inline void CheckException(JNIEnv* env) { + CHECK_EXCEPTION(env); +} + +// A 32 bit number could be an address on stack. Random 64 bit marker on the +// stack is much less likely to be present on stack. +constexpr uint64_t kJniStackMarkerValue = 0xbdbdef1bebcade1b; + +// Context about the JNI call with exception checked to be stored in stack. +struct BASE_EXPORT JniJavaCallContextUnchecked { + inline JniJavaCallContextUnchecked() { +// TODO(ssid): Implement for other architectures. +#if defined(__arm__) || defined(__aarch64__) + // This assumes that this method does not increment the stack pointer. + asm volatile("mov %0, sp" : "=r"(sp)); +#else + sp = 0; +#endif + } + + // Force no inline to reduce code size. + template <base::android::MethodID::Type type> + void Init(JNIEnv* env, + jclass clazz, + const char* method_name, + const char* jni_signature, + std::atomic<jmethodID>* atomic_method_id) { + env1 = env; + + // Make sure compiler doesn't optimize out the assignment. + memcpy(&marker, &kJniStackMarkerValue, sizeof(kJniStackMarkerValue)); + // Gets PC of the calling function. + pc = reinterpret_cast<uintptr_t>(__builtin_return_address(0)); + + method_id = base::android::MethodID::LazyGet<type>( + env, clazz, method_name, jni_signature, atomic_method_id); + } + + ~JniJavaCallContextUnchecked() { + // Reset so that spurious marker finds are avoided. + memset(&marker, 0, sizeof(marker)); + } + + uint64_t marker; + uintptr_t sp; + uintptr_t pc; + + JNIEnv* env1; + jmethodID method_id; +}; + +// Context about the JNI call with exception unchecked to be stored in stack. +struct BASE_EXPORT JniJavaCallContextChecked { + // Force no inline to reduce code size. + template <base::android::MethodID::Type type> + void Init(JNIEnv* env, + jclass clazz, + const char* method_name, + const char* jni_signature, + std::atomic<jmethodID>* atomic_method_id) { + base.Init<type>(env, clazz, method_name, jni_signature, atomic_method_id); + // Reset `pc` to correct caller. + base.pc = reinterpret_cast<uintptr_t>(__builtin_return_address(0)); + } + + ~JniJavaCallContextChecked() { jni_generator::CheckException(base.env1); } + + JniJavaCallContextUnchecked base; +}; + +static_assert(sizeof(JniJavaCallContextChecked) == + sizeof(JniJavaCallContextUnchecked), + "Stack unwinder cannot work with structs of different sizes."); +} // namespace jni_generator + +#endif // SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc new file mode 100644 index 0000000000..53399abab1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc @@ -0,0 +1,51 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "sdk/android/src/jni/jni_helpers.h" + +#include <vector> + +#include "sdk/android/native_api/jni/java_types.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef<jobject> NewDirectByteBuffer(JNIEnv* env, + void* address, + jlong capacity) { + ScopedJavaLocalRef<jobject> buffer( + env, env->NewDirectByteBuffer(address, capacity)); + CHECK_EXCEPTION(env) << "error NewDirectByteBuffer"; + return buffer; +} + +jobject NewGlobalRef(JNIEnv* jni, jobject o) { + jobject ret = jni->NewGlobalRef(o); + CHECK_EXCEPTION(jni) << "error during NewGlobalRef"; + RTC_CHECK(ret); + return ret; +} + +void DeleteGlobalRef(JNIEnv* jni, jobject o) { + jni->DeleteGlobalRef(o); + CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef"; +} + +// Scope Java local references to the lifetime of this object. Use in all C++ +// callbacks (i.e. entry points that don't originate in a Java callstack +// through a "native" method call). +ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) { + RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame"; +} +ScopedLocalRefFrame::~ScopedLocalRefFrame() { + jni_->PopLocalFrame(nullptr); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h new file mode 100644 index 0000000000..7a2f27b99d --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h @@ -0,0 +1,80 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contain convenience functions and classes for JNI. +// Before using any of the methods, InitGlobalJniVariables must be called. + +#ifndef SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_ +#define SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_ + +#include <jni.h> +#include <string> + +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/jvm.h" + +// Convenience macro defining JNI-accessible methods in the org.webrtc package. +// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter. +#if defined(WEBRTC_ARCH_X86) +// Dalvik JIT generated code doesn't guarantee 16-byte stack alignment on +// x86 - use force_align_arg_pointer to realign the stack at the JNI +// boundary. crbug.com/655248 +#define JNI_FUNCTION_DECLARATION(rettype, name, ...) \ + __attribute__((force_align_arg_pointer)) extern "C" JNIEXPORT rettype \ + JNICALL Java_org_webrtc_##name(__VA_ARGS__) +#else +#define JNI_FUNCTION_DECLARATION(rettype, name, ...) \ + extern "C" JNIEXPORT rettype JNICALL Java_org_webrtc_##name(__VA_ARGS__) +#endif + +namespace webrtc { +namespace jni { + +// TODO(sakal): Remove once clients have migrated. +using ::webrtc::JavaToStdMapStrings; + +// Deprecated, use NativeToJavaPointer. +inline long jlongFromPointer(void* ptr) { + return NativeToJavaPointer(ptr); +} + +ScopedJavaLocalRef<jobject> NewDirectByteBuffer(JNIEnv* env, + void* address, + jlong capacity); + +jobject NewGlobalRef(JNIEnv* jni, jobject o); + +void DeleteGlobalRef(JNIEnv* jni, jobject o); + +// Scope Java local references to the lifetime of this object. Use in all C++ +// callbacks (i.e. entry points that don't originate in a Java callstack +// through a "native" method call). +class ScopedLocalRefFrame { + public: + explicit ScopedLocalRefFrame(JNIEnv* jni); + ~ScopedLocalRefFrame(); + + private: + JNIEnv* jni_; +}; + +} // namespace jni +} // namespace webrtc + +// TODO(magjed): Remove once external clients are updated. +namespace webrtc_jni { + +using webrtc::AttachCurrentThreadIfNeeded; +using webrtc::jni::InitGlobalJniVariables; + +} // namespace webrtc_jni + +#endif // SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc new file mode 100644 index 0000000000..a1829ad0b1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> +#undef JNIEXPORT +#define JNIEXPORT __attribute__((visibility("default"))) + +#include "rtc_base/ssl_adapter.h" +#include "sdk/android/native_api/jni/class_loader.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) { + jint ret = InitGlobalJniVariables(jvm); + RTC_DCHECK_GE(ret, 0); + if (ret < 0) + return -1; + + RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; + webrtc::InitClassLoader(GetEnv()); + + return ret; +} + +extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) { + RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/jvm.cc b/third_party/libwebrtc/sdk/android/src/jni/jvm.cc new file mode 100644 index 0000000000..4cf1aa5e8e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/jvm.cc @@ -0,0 +1,133 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/jvm.h" + +#include <asm/unistd.h> +#include <pthread.h> +#include <sys/prctl.h> +#include <sys/syscall.h> +#include <unistd.h> + +#include <string> + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace jni { + +static JavaVM* g_jvm = nullptr; + +static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT; + +// Key for per-thread JNIEnv* data. Non-NULL in threads attached to `g_jvm` by +// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that +// were attached by the JVM because of a Java->native call. +static pthread_key_t g_jni_ptr; + +JavaVM* GetJVM() { + RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?"; + return g_jvm; +} + +// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached. +JNIEnv* GetEnv() { + void* env = nullptr; + jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6); + RTC_CHECK(((env != nullptr) && (status == JNI_OK)) || + ((env == nullptr) && (status == JNI_EDETACHED))) + << "Unexpected GetEnv return: " << status << ":" << env; + return reinterpret_cast<JNIEnv*>(env); +} + +static void ThreadDestructor(void* prev_jni_ptr) { + // This function only runs on threads where `g_jni_ptr` is non-NULL, meaning + // we were responsible for originally attaching the thread, so are responsible + // for detaching it now. However, because some JVM implementations (notably + // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism, + // the JVMs accounting info for this thread may already be wiped out by the + // time this is called. Thus it may appear we are already detached even though + // it was our responsibility to detach! Oh well. + if (!GetEnv()) + return; + + RTC_CHECK(GetEnv() == prev_jni_ptr) + << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv(); + jint status = g_jvm->DetachCurrentThread(); + RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status; + RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???"; +} + +static void CreateJNIPtrKey() { + RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor)) + << "pthread_key_create"; +} + +jint InitGlobalJniVariables(JavaVM* jvm) { + RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!"; + g_jvm = jvm; + RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?"; + + RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once"; + + JNIEnv* jni = nullptr; + if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK) + return -1; + + return JNI_VERSION_1_6; +} + +// Return thread ID as a string. +static std::string GetThreadId() { + char buf[21]; // Big enough to hold a kuint64max plus terminating NULL. + RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld", + static_cast<long>(syscall(__NR_gettid))), + sizeof(buf)) + << "Thread id is bigger than uint64??"; + return std::string(buf); +} + +// Return the current thread's name. +static std::string GetThreadName() { + char name[17] = {0}; + if (prctl(PR_GET_NAME, name) != 0) + return std::string("<noname>"); + return std::string(name); +} + +// Return a |JNIEnv*| usable on this thread. Attaches to `g_jvm` if necessary. +JNIEnv* AttachCurrentThreadIfNeeded() { + JNIEnv* jni = GetEnv(); + if (jni) + return jni; + RTC_CHECK(!pthread_getspecific(g_jni_ptr)) + << "TLS has a JNIEnv* but not attached?"; + + std::string name(GetThreadName() + " - " + GetThreadId()); + JavaVMAttachArgs args; + args.version = JNI_VERSION_1_6; + args.name = &name[0]; + args.group = nullptr; +// Deal with difference in signatures between Oracle's jni.h and Android's. +#ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec! + void* env = nullptr; +#else + JNIEnv* env = nullptr; +#endif + RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args)) + << "Failed to attach thread"; + RTC_CHECK(env) << "AttachCurrentThread handed back NULL!"; + jni = reinterpret_cast<JNIEnv*>(env); + RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific"; + return jni; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/jvm.h b/third_party/libwebrtc/sdk/android/src/jni/jvm.h new file mode 100644 index 0000000000..296a7fee1d --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/jvm.h @@ -0,0 +1,32 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_JVM_H_ +#define SDK_ANDROID_SRC_JNI_JVM_H_ + +#include <jni.h> + +namespace webrtc { +namespace jni { + +jint InitGlobalJniVariables(JavaVM* jvm); + +// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached. +JNIEnv* GetEnv(); + +JavaVM* GetJVM(); + +// Return a |JNIEnv*| usable on this thread. Attaches to `g_jvm` if necessary. +JNIEnv* AttachCurrentThreadIfNeeded(); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_JVM_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc new file mode 100644 index 0000000000..143055f79b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc @@ -0,0 +1,29 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" +#include "sdk/android/generated_libaom_av1_decoder_if_supported_jni/LibaomAv1Decoder_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_LibaomAv1Decoder_CreateDecoder(JNIEnv* jni) { + return jlongFromPointer(webrtc::CreateLibaomAv1Decoder().release()); +} + +static jboolean JNI_LibaomAv1Decoder_IsSupported(JNIEnv* jni) { + return webrtc::kIsLibaomAv1DecoderSupported; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc new file mode 100644 index 0000000000..400c3124fe --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc @@ -0,0 +1,25 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include "sdk/android/generated_libaom_av1_encoder_jni/LibaomAv1Encoder_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) { + return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc new file mode 100644 index 0000000000..84394d8ee5 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc @@ -0,0 +1,42 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "sdk/android/src/jni/logging/log_sink.h" + +#include "absl/strings/string_view.h" +#include "sdk/android/generated_logging_jni/JNILogging_jni.h" + +namespace webrtc { +namespace jni { + +JNILogSink::JNILogSink(JNIEnv* env, const JavaRef<jobject>& j_logging) + : j_logging_(env, j_logging) {} +JNILogSink::~JNILogSink() = default; + +void JNILogSink::OnLogMessage(const std::string& msg) { + RTC_DCHECK_NOTREACHED(); +} + +void JNILogSink::OnLogMessage(const std::string& msg, + rtc::LoggingSeverity severity, + const char* tag) { + OnLogMessage(absl::string_view{msg}, severity, tag); +} + +void JNILogSink::OnLogMessage(absl::string_view msg, + rtc::LoggingSeverity severity, + const char* tag) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_JNILogging_logToInjectable( + env, j_logging_, NativeToJavaString(env, std::string(msg)), + NativeToJavaInteger(env, severity), NativeToJavaString(env, tag)); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h new file mode 100644 index 0000000000..8e681ac3ea --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h @@ -0,0 +1,43 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_ +#define SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_ + +#include <string> + +#include "absl/strings/string_view.h" +#include "rtc_base/logging.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class JNILogSink : public rtc::LogSink { + public: + JNILogSink(JNIEnv* env, const JavaRef<jobject>& j_logging); + ~JNILogSink() override; + + void OnLogMessage(const std::string& msg) override; + void OnLogMessage(const std::string& msg, + rtc::LoggingSeverity severity, + const char* tag) override; + void OnLogMessage(absl::string_view msg, + rtc::LoggingSeverity severity, + const char* tag) override; + + private: + const ScopedJavaGlobalRef<jobject> j_logging_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc new file mode 100644 index 0000000000..f8eb48422b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/native_capturer_observer.h" + +#include "rtc_base/logging.h" +#include "sdk/android/generated_video_jni/NativeCapturerObserver_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/android_video_track_source.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver( + JNIEnv* env, + rtc::scoped_refptr<AndroidVideoTrackSource> native_source) { + return Java_NativeCapturerObserver_Constructor( + env, NativeToJavaPointer(native_source.release())); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h new file mode 100644 index 0000000000..51acf41f03 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_ +#define SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_ + +#include <jni.h> + +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/android_video_track_source.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver( + JNIEnv* env, + rtc::scoped_refptr<AndroidVideoTrackSource> native_source); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc new file mode 100644 index 0000000000..d0e7972446 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc @@ -0,0 +1,80 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> +#include <vector> + +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +#include "rtc_base/checks.h" +#include "sdk/android/generated_video_jni/NV12Buffer_jni.h" + +namespace webrtc { +namespace jni { + +static void JNI_NV12Buffer_CropAndScale(JNIEnv* jni, + jint crop_x, + jint crop_y, + jint crop_width, + jint crop_height, + jint scale_width, + jint scale_height, + const JavaParamRef<jobject>& j_src, + jint src_width, + jint src_height, + jint src_stride, + jint src_slice_height, + const JavaParamRef<jobject>& j_dst_y, + jint dst_stride_y, + const JavaParamRef<jobject>& j_dst_u, + jint dst_stride_u, + const JavaParamRef<jobject>& j_dst_v, + jint dst_stride_v) { + const int src_stride_y = src_stride; + const int src_stride_uv = src_stride; + const int crop_chroma_x = crop_x / 2; + const int crop_chroma_y = crop_y / 2; + const int crop_chroma_width = (crop_width + 1) / 2; + const int crop_chroma_height = (crop_height + 1) / 2; + const int tmp_stride_u = crop_chroma_width; + const int tmp_stride_v = crop_chroma_width; + const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v); + + uint8_t const* src_y = + static_cast<uint8_t const*>(jni->GetDirectBufferAddress(j_src.obj())); + uint8_t const* src_uv = src_y + src_slice_height * src_stride_y; + + uint8_t* dst_y = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj())); + uint8_t* dst_u = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj())); + uint8_t* dst_v = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj())); + + // Crop using pointer arithmetic. + src_y += crop_x + crop_y * src_stride_y; + src_uv += 2 * crop_chroma_x + crop_chroma_y * src_stride_uv; + + std::vector<uint8_t> tmp_buffer(tmp_size); + uint8_t* tmp_u = tmp_buffer.data(); + uint8_t* tmp_v = tmp_u + crop_chroma_height * tmp_stride_u; + + libyuv::SplitUVPlane(src_uv, src_stride_uv, tmp_u, tmp_stride_u, tmp_v, + tmp_stride_v, crop_chroma_width, crop_chroma_height); + + libyuv::I420Scale(src_y, src_stride_y, tmp_u, tmp_stride_u, tmp_v, + tmp_stride_v, crop_width, crop_height, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, scale_width, + scale_height, libyuv::kFilterBox); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc new file mode 100644 index 0000000000..10e3316f33 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc @@ -0,0 +1,72 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> +#include <vector> + +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "rtc_base/checks.h" +#include "sdk/android/generated_video_jni/NV21Buffer_jni.h" + +namespace webrtc { +namespace jni { + +static void JNI_NV21Buffer_CropAndScale(JNIEnv* jni, + jint crop_x, + jint crop_y, + jint crop_width, + jint crop_height, + jint scale_width, + jint scale_height, + const JavaParamRef<jbyteArray>& j_src, + jint src_width, + jint src_height, + const JavaParamRef<jobject>& j_dst_y, + jint dst_stride_y, + const JavaParamRef<jobject>& j_dst_u, + jint dst_stride_u, + const JavaParamRef<jobject>& j_dst_v, + jint dst_stride_v) { + const int src_stride_y = src_width; + const int src_stride_uv = src_width; + const int crop_chroma_x = crop_x / 2; + const int crop_chroma_y = crop_y / 2; + + jboolean was_copy; + jbyte* src_bytes = jni->GetByteArrayElements(j_src.obj(), &was_copy); + RTC_DCHECK(!was_copy); + uint8_t const* src_y = reinterpret_cast<uint8_t const*>(src_bytes); + uint8_t const* src_uv = src_y + src_height * src_stride_y; + + uint8_t* dst_y = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj())); + uint8_t* dst_u = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj())); + uint8_t* dst_v = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj())); + + // Crop using pointer arithmetic. + src_y += crop_x + crop_y * src_stride_y; + src_uv += 2 * crop_chroma_x + crop_chroma_y * src_stride_uv; + + NV12ToI420Scaler scaler; + // U- and V-planes are swapped because this is NV21 not NV12. + scaler.NV12ToI420Scale(src_y, src_stride_y, src_uv, src_stride_uv, crop_width, + crop_height, dst_y, dst_stride_y, dst_v, dst_stride_v, + dst_u, dst_stride_u, scale_width, scale_height); + + jni->ReleaseByteArrayElements(j_src.obj(), src_bytes, JNI_ABORT); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc new file mode 100644 index 0000000000..7f3dddbb28 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/add_ice_candidate_observer.h" + +#include <utility> + +#include "sdk/android/generated_peerconnection_jni/AddIceObserver_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/media_constraints.h" + +namespace webrtc { +namespace jni { + +AddIceCandidateObserverJni::AddIceCandidateObserverJni( + JNIEnv* env, + const JavaRef<jobject>& j_observer) + : j_observer_global_(env, j_observer) {} + +void AddIceCandidateObserverJni::OnComplete(webrtc::RTCError error) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + if (error.ok()) { + Java_AddIceObserver_onAddSuccess(env, j_observer_global_); + } else { + Java_AddIceObserver_onAddFailure(env, j_observer_global_, + NativeToJavaString(env, error.message())); + } +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h new file mode 100644 index 0000000000..1128385389 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h @@ -0,0 +1,38 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_ +#define SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_ + +#include <memory> +#include <string> + +#include "api/peer_connection_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class AddIceCandidateObserverJni final + : public rtc::RefCountedNonVirtual<AddIceCandidateObserverJni> { + public: + AddIceCandidateObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer); + ~AddIceCandidateObserverJni() = default; + + void OnComplete(RTCError error); + + private: + const ScopedJavaGlobalRef<jobject> j_observer_global_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h b/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h new file mode 100644 index 0000000000..609c1b056e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h @@ -0,0 +1,12 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// TODO(sakal): Remove this file once clients have update to the native API. +#include "sdk/android/src/jni/android_network_monitor.h" diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc new file mode 100644 index 0000000000..74c8b5547a --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc @@ -0,0 +1,23 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/audio.h" + +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { +namespace jni { + +rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing() { + return AudioProcessingBuilder().Create(); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h new file mode 100644 index 0000000000..7a79bed986 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_AUDIO_H_ +#define SDK_ANDROID_SRC_JNI_PC_AUDIO_H_ + +#include "api/scoped_refptr.h" +// Adding 'nogncheck' to disable the gn include headers check. +// We don't want this target depend on audio related targets +#include "modules/audio_processing/include/audio_processing.h" // nogncheck + +namespace webrtc { +namespace jni { + +rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing(); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_AUDIO_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc new file mode 100644 index 0000000000..b00287eaae --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc @@ -0,0 +1,26 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/media_stream_interface.h" +#include "sdk/android/generated_peerconnection_jni/AudioTrack_jni.h" + +namespace webrtc { +namespace jni { + +static void JNI_AudioTrack_SetVolume(JNIEnv*, + jlong j_p, + jdouble volume) { + rtc::scoped_refptr<AudioSourceInterface> source( + reinterpret_cast<AudioTrackInterface*>(j_p)->GetSource()); + source->SetVolume(volume); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc new file mode 100644 index 0000000000..b937a0d03a --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc @@ -0,0 +1,73 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/log_sinks.h" +#include "sdk/android/generated_peerconnection_jni/CallSessionFileRotatingLogSink_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_CallSessionFileRotatingLogSink_AddSink( + JNIEnv* jni, + const JavaParamRef<jstring>& j_dirPath, + jint j_maxFileSize, + jint j_severity) { + std::string dir_path = JavaToStdString(jni, j_dirPath); + rtc::CallSessionFileRotatingLogSink* sink = + new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize); + if (!sink->Init()) { + RTC_LOG_V(rtc::LoggingSeverity::LS_WARNING) + << "Failed to init CallSessionFileRotatingLogSink for path " + << dir_path; + delete sink; + return 0; + } + rtc::LogMessage::AddLogToStream( + sink, static_cast<rtc::LoggingSeverity>(j_severity)); + return jlongFromPointer(sink); +} + +static void JNI_CallSessionFileRotatingLogSink_DeleteSink( + JNIEnv* jni, + jlong j_sink) { + rtc::CallSessionFileRotatingLogSink* sink = + reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink); + rtc::LogMessage::RemoveLogToStream(sink); + delete sink; +} + +static ScopedJavaLocalRef<jbyteArray> +JNI_CallSessionFileRotatingLogSink_GetLogData( + JNIEnv* jni, + const JavaParamRef<jstring>& j_dirPath) { + std::string dir_path = JavaToStdString(jni, j_dirPath); + rtc::CallSessionFileRotatingStreamReader file_reader(dir_path); + size_t log_size = file_reader.GetSize(); + if (log_size == 0) { + RTC_LOG_V(rtc::LoggingSeverity::LS_WARNING) + << "CallSessionFileRotatingStream returns 0 size for path " << dir_path; + return ScopedJavaLocalRef<jbyteArray>(jni, jni->NewByteArray(0)); + } + + // TODO(nisse, sakal): To avoid copying, change api to use ByteBuffer. + std::unique_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size))); + size_t read = file_reader.ReadAll(buffer.get(), log_size); + + ScopedJavaLocalRef<jbyteArray> result = + ScopedJavaLocalRef<jbyteArray>(jni, jni->NewByteArray(read)); + jni->SetByteArrayRegion(result.obj(), 0, read, buffer.get()); + + return result; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc new file mode 100644 index 0000000000..af5f195d98 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc @@ -0,0 +1,43 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/crypto_options.h" + +#include "sdk/android/generated_peerconnection_jni/CryptoOptions_jni.h" + +namespace webrtc { +namespace jni { + +absl::optional<CryptoOptions> JavaToNativeOptionalCryptoOptions( + JNIEnv* jni, + const JavaRef<jobject>& j_crypto_options) { + if (j_crypto_options.is_null()) { + return absl::nullopt; + } + + ScopedJavaLocalRef<jobject> j_srtp = + Java_CryptoOptions_getSrtp(jni, j_crypto_options); + ScopedJavaLocalRef<jobject> j_sframe = + Java_CryptoOptions_getSFrame(jni, j_crypto_options); + + CryptoOptions native_crypto_options; + native_crypto_options.srtp.enable_gcm_crypto_suites = + Java_Srtp_getEnableGcmCryptoSuites(jni, j_srtp); + native_crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = + Java_Srtp_getEnableAes128Sha1_32CryptoCipher(jni, j_srtp); + native_crypto_options.srtp.enable_encrypted_rtp_header_extensions = + Java_Srtp_getEnableEncryptedRtpHeaderExtensions(jni, j_srtp); + native_crypto_options.sframe.require_frame_encryption = + Java_SFrame_getRequireFrameEncryption(jni, j_sframe); + return absl::optional<CryptoOptions>(native_crypto_options); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h new file mode 100644 index 0000000000..a9c8f2609a --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h @@ -0,0 +1,30 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_ +#define SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_ + +#include <jni.h> + +#include "absl/types/optional.h" +#include "api/crypto/crypto_options.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +absl::optional<CryptoOptions> JavaToNativeOptionalCryptoOptions( + JNIEnv* jni, + const JavaRef<jobject>& j_crypto_options); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc new file mode 100644 index 0000000000..3552974443 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc @@ -0,0 +1,155 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <memory> + +#include <limits> + +#include "api/data_channel_interface.h" +#include "rtc_base/logging.h" +#include "sdk/android/generated_peerconnection_jni/DataChannel_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/data_channel.h" + +namespace webrtc { +namespace jni { + +namespace { +// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver +// and dispatching the callback from C++ back to Java. +class DataChannelObserverJni : public DataChannelObserver { + public: + DataChannelObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer); + ~DataChannelObserverJni() override {} + + void OnBufferedAmountChange(uint64_t previous_amount) override; + void OnStateChange() override; + void OnMessage(const DataBuffer& buffer) override; + + private: + const ScopedJavaGlobalRef<jobject> j_observer_global_; +}; + +DataChannelObserverJni::DataChannelObserverJni( + JNIEnv* jni, + const JavaRef<jobject>& j_observer) + : j_observer_global_(jni, j_observer) {} + +void DataChannelObserverJni::OnBufferedAmountChange(uint64_t previous_amount) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onBufferedAmountChange(env, j_observer_global_, + previous_amount); +} + +void DataChannelObserverJni::OnStateChange() { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onStateChange(env, j_observer_global_); +} + +void DataChannelObserverJni::OnMessage(const DataBuffer& buffer) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> byte_buffer = NewDirectByteBuffer( + env, const_cast<char*>(buffer.data.data<char>()), buffer.data.size()); + ScopedJavaLocalRef<jobject> j_buffer = + Java_Buffer_Constructor(env, byte_buffer, buffer.binary); + Java_Observer_onMessage(env, j_observer_global_, j_buffer); +} + +DataChannelInterface* ExtractNativeDC(JNIEnv* jni, + const JavaParamRef<jobject>& j_dc) { + return reinterpret_cast<DataChannelInterface*>( + Java_DataChannel_getNativeDataChannel(jni, j_dc)); +} + +} // namespace + +DataChannelInit JavaToNativeDataChannelInit(JNIEnv* env, + const JavaRef<jobject>& j_init) { + DataChannelInit init; + init.ordered = Java_Init_getOrdered(env, j_init); + init.maxRetransmitTime = Java_Init_getMaxRetransmitTimeMs(env, j_init); + init.maxRetransmits = Java_Init_getMaxRetransmits(env, j_init); + init.protocol = JavaToStdString(env, Java_Init_getProtocol(env, j_init)); + init.negotiated = Java_Init_getNegotiated(env, j_init); + init.id = Java_Init_getId(env, j_init); + return init; +} + +ScopedJavaLocalRef<jobject> WrapNativeDataChannel( + JNIEnv* env, + rtc::scoped_refptr<DataChannelInterface> channel) { + if (!channel) + return nullptr; + // Channel is now owned by Java object, and will be freed from there. + return Java_DataChannel_Constructor(env, jlongFromPointer(channel.release())); +} + +static jlong JNI_DataChannel_RegisterObserver( + JNIEnv* jni, + const JavaParamRef<jobject>& j_dc, + const JavaParamRef<jobject>& j_observer) { + auto observer = std::make_unique<DataChannelObserverJni>(jni, j_observer); + ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get()); + return jlongFromPointer(observer.release()); +} + +static void JNI_DataChannel_UnregisterObserver( + JNIEnv* jni, + const JavaParamRef<jobject>& j_dc, + jlong native_observer) { + ExtractNativeDC(jni, j_dc)->UnregisterObserver(); + delete reinterpret_cast<DataChannelObserverJni*>(native_observer); +} + +static ScopedJavaLocalRef<jstring> JNI_DataChannel_Label( + JNIEnv* jni, + const JavaParamRef<jobject>& j_dc) { + return NativeToJavaString(jni, ExtractNativeDC(jni, j_dc)->label()); +} + +static jint JNI_DataChannel_Id(JNIEnv* jni, const JavaParamRef<jobject>& j_dc) { + int id = ExtractNativeDC(jni, j_dc)->id(); + RTC_CHECK_LE(id, std::numeric_limits<int32_t>::max()) + << "id overflowed jint!"; + return static_cast<jint>(id); +} + +static ScopedJavaLocalRef<jobject> JNI_DataChannel_State( + JNIEnv* jni, + const JavaParamRef<jobject>& j_dc) { + return Java_State_fromNativeIndex(jni, ExtractNativeDC(jni, j_dc)->state()); +} + +static jlong JNI_DataChannel_BufferedAmount(JNIEnv* jni, + const JavaParamRef<jobject>& j_dc) { + uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount(); + RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max()) + << "buffered_amount overflowed jlong!"; + return static_cast<jlong>(buffered_amount); +} + +static void JNI_DataChannel_Close(JNIEnv* jni, + const JavaParamRef<jobject>& j_dc) { + ExtractNativeDC(jni, j_dc)->Close(); +} + +static jboolean JNI_DataChannel_Send(JNIEnv* jni, + const JavaParamRef<jobject>& j_dc, + const JavaParamRef<jbyteArray>& data, + jboolean binary) { + std::vector<int8_t> buffer = JavaToNativeByteArray(jni, data); + bool ret = ExtractNativeDC(jni, j_dc)->Send( + DataBuffer(rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()), binary)); + return ret; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h new file mode 100644 index 0000000000..9da1b67dae --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_ +#define SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_ + +namespace webrtc { +namespace jni { + +DataChannelInit JavaToNativeDataChannelInit(JNIEnv* env, + const JavaRef<jobject>& j_init); + +ScopedJavaLocalRef<jobject> WrapNativeDataChannel( + JNIEnv* env, + rtc::scoped_refptr<DataChannelInterface> channel); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc new file mode 100644 index 0000000000..13cb027f6d --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc @@ -0,0 +1,55 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/dtmf_sender_interface.h" +#include "sdk/android/generated_peerconnection_jni/DtmfSender_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jboolean JNI_DtmfSender_CanInsertDtmf(JNIEnv* jni, + jlong j_dtmf_sender_pointer) { + return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer) + ->CanInsertDtmf(); +} + +static jboolean JNI_DtmfSender_InsertDtmf(JNIEnv* jni, + jlong j_dtmf_sender_pointer, + const JavaParamRef<jstring>& tones, + jint duration, + jint inter_tone_gap) { + return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer) + ->InsertDtmf(JavaToStdString(jni, tones), duration, inter_tone_gap); +} + +static ScopedJavaLocalRef<jstring> JNI_DtmfSender_Tones( + JNIEnv* jni, + jlong j_dtmf_sender_pointer) { + return NativeToJavaString( + jni, + reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)->tones()); +} + +static jint JNI_DtmfSender_Duration(JNIEnv* jni, + jlong j_dtmf_sender_pointer) { + return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer) + ->duration(); +} + +static jint JNI_DtmfSender_InterToneGap(JNIEnv* jni, + jlong j_dtmf_sender_pointer) { + return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer) + ->inter_tone_gap(); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc new file mode 100644 index 0000000000..af92ff8e89 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc @@ -0,0 +1,259 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/ice_candidate.h" + +#include <string> + +#include "pc/webrtc_sdp.h" +#include "sdk/android/generated_peerconnection_jni/IceCandidate_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" +#include "sdk/android/src/jni/pc/peer_connection.h" + +namespace webrtc { +namespace jni { + +namespace { + +ScopedJavaLocalRef<jobject> CreateJavaIceCandidate(JNIEnv* env, + const std::string& sdp_mid, + int sdp_mline_index, + const std::string& sdp, + const std::string server_url, + int adapterType) { + return Java_IceCandidate_Constructor( + env, NativeToJavaString(env, sdp_mid), sdp_mline_index, + NativeToJavaString(env, sdp), NativeToJavaString(env, server_url), + NativeToJavaAdapterType(env, adapterType)); +} + +} // namespace + +cricket::Candidate JavaToNativeCandidate(JNIEnv* jni, + const JavaRef<jobject>& j_candidate) { + std::string sdp_mid = + JavaToStdString(jni, Java_IceCandidate_getSdpMid(jni, j_candidate)); + std::string sdp = + JavaToStdString(jni, Java_IceCandidate_getSdp(jni, j_candidate)); + cricket::Candidate candidate; + if (!SdpDeserializeCandidate(sdp_mid, sdp, &candidate, NULL)) { + RTC_LOG(LS_ERROR) << "SdpDescrializeCandidate failed with sdp " << sdp; + } + return candidate; +} + +ScopedJavaLocalRef<jobject> NativeToJavaCandidate( + JNIEnv* env, + const cricket::Candidate& candidate) { + std::string sdp = SdpSerializeCandidate(candidate); + RTC_CHECK(!sdp.empty()) << "got an empty ICE candidate"; + // sdp_mline_index is not used, pass an invalid value -1. + return CreateJavaIceCandidate(env, candidate.transport_name(), + -1 /* sdp_mline_index */, sdp, + "" /* server_url */, candidate.network_type()); +} + +ScopedJavaLocalRef<jobject> NativeToJavaIceCandidate( + JNIEnv* env, + const IceCandidateInterface& candidate) { + std::string sdp; + RTC_CHECK(candidate.ToString(&sdp)) << "got so far: " << sdp; + return CreateJavaIceCandidate(env, candidate.sdp_mid(), + candidate.sdp_mline_index(), sdp, + candidate.candidate().url(), 0); +} + +ScopedJavaLocalRef<jobjectArray> NativeToJavaCandidateArray( + JNIEnv* jni, + const std::vector<cricket::Candidate>& candidates) { + return NativeToJavaObjectArray(jni, candidates, + org_webrtc_IceCandidate_clazz(jni), + &NativeToJavaCandidate); +} + +PeerConnectionInterface::IceTransportsType JavaToNativeIceTransportsType( + JNIEnv* jni, + const JavaRef<jobject>& j_ice_transports_type) { + std::string enum_name = GetJavaEnumName(jni, j_ice_transports_type); + + if (enum_name == "ALL") + return PeerConnectionInterface::kAll; + + if (enum_name == "RELAY") + return PeerConnectionInterface::kRelay; + + if (enum_name == "NOHOST") + return PeerConnectionInterface::kNoHost; + + if (enum_name == "NONE") + return PeerConnectionInterface::kNone; + + RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name; + return PeerConnectionInterface::kAll; +} + +PeerConnectionInterface::BundlePolicy JavaToNativeBundlePolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_bundle_policy) { + std::string enum_name = GetJavaEnumName(jni, j_bundle_policy); + + if (enum_name == "BALANCED") + return PeerConnectionInterface::kBundlePolicyBalanced; + + if (enum_name == "MAXBUNDLE") + return PeerConnectionInterface::kBundlePolicyMaxBundle; + + if (enum_name == "MAXCOMPAT") + return PeerConnectionInterface::kBundlePolicyMaxCompat; + + RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name; + return PeerConnectionInterface::kBundlePolicyBalanced; +} + +PeerConnectionInterface::RtcpMuxPolicy JavaToNativeRtcpMuxPolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_rtcp_mux_policy) { + std::string enum_name = GetJavaEnumName(jni, j_rtcp_mux_policy); + + if (enum_name == "NEGOTIATE") + return PeerConnectionInterface::kRtcpMuxPolicyNegotiate; + + if (enum_name == "REQUIRE") + return PeerConnectionInterface::kRtcpMuxPolicyRequire; + + RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name; + return PeerConnectionInterface::kRtcpMuxPolicyNegotiate; +} + +PeerConnectionInterface::TcpCandidatePolicy JavaToNativeTcpCandidatePolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_tcp_candidate_policy) { + std::string enum_name = GetJavaEnumName(jni, j_tcp_candidate_policy); + + if (enum_name == "ENABLED") + return PeerConnectionInterface::kTcpCandidatePolicyEnabled; + + if (enum_name == "DISABLED") + return PeerConnectionInterface::kTcpCandidatePolicyDisabled; + + RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name; + return PeerConnectionInterface::kTcpCandidatePolicyEnabled; +} + +PeerConnectionInterface::CandidateNetworkPolicy +JavaToNativeCandidateNetworkPolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_candidate_network_policy) { + std::string enum_name = GetJavaEnumName(jni, j_candidate_network_policy); + + if (enum_name == "ALL") + return PeerConnectionInterface::kCandidateNetworkPolicyAll; + + if (enum_name == "LOW_COST") + return PeerConnectionInterface::kCandidateNetworkPolicyLowCost; + + RTC_CHECK(false) << "Unexpected CandidateNetworkPolicy enum_name " + << enum_name; + return PeerConnectionInterface::kCandidateNetworkPolicyAll; +} + +rtc::KeyType JavaToNativeKeyType(JNIEnv* jni, + const JavaRef<jobject>& j_key_type) { + std::string enum_name = GetJavaEnumName(jni, j_key_type); + + if (enum_name == "RSA") + return rtc::KT_RSA; + if (enum_name == "ECDSA") + return rtc::KT_ECDSA; + + RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name; + return rtc::KT_ECDSA; +} + +PeerConnectionInterface::ContinualGatheringPolicy +JavaToNativeContinualGatheringPolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_gathering_policy) { + std::string enum_name = GetJavaEnumName(jni, j_gathering_policy); + if (enum_name == "GATHER_ONCE") + return PeerConnectionInterface::GATHER_ONCE; + + if (enum_name == "GATHER_CONTINUALLY") + return PeerConnectionInterface::GATHER_CONTINUALLY; + + RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name " + << enum_name; + return PeerConnectionInterface::GATHER_ONCE; +} + +webrtc::PortPrunePolicy JavaToNativePortPrunePolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_port_prune_policy) { + std::string enum_name = GetJavaEnumName(jni, j_port_prune_policy); + if (enum_name == "NO_PRUNE") { + return webrtc::NO_PRUNE; + } + if (enum_name == "PRUNE_BASED_ON_PRIORITY") { + return webrtc::PRUNE_BASED_ON_PRIORITY; + } + if (enum_name == "KEEP_FIRST_READY") { + return webrtc::KEEP_FIRST_READY; + } + + RTC_CHECK(false) << " Unexpected PortPrunePolicy enum name " << enum_name; + + return webrtc::NO_PRUNE; +} + +PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_ice_server_tls_cert_policy) { + std::string enum_name = GetJavaEnumName(jni, j_ice_server_tls_cert_policy); + + if (enum_name == "TLS_CERT_POLICY_SECURE") + return PeerConnectionInterface::kTlsCertPolicySecure; + + if (enum_name == "TLS_CERT_POLICY_INSECURE_NO_CHECK") + return PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck; + + RTC_CHECK(false) << "Unexpected TlsCertPolicy enum_name " << enum_name; + return PeerConnectionInterface::kTlsCertPolicySecure; +} + +absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference( + JNIEnv* jni, + const JavaRef<jobject>& j_network_preference) { + std::string enum_name = GetJavaEnumName(jni, j_network_preference); + + if (enum_name == "UNKNOWN") + return absl::nullopt; + + if (enum_name == "ETHERNET") + return rtc::ADAPTER_TYPE_ETHERNET; + + if (enum_name == "WIFI") + return rtc::ADAPTER_TYPE_WIFI; + + if (enum_name == "CELLULAR") + return rtc::ADAPTER_TYPE_CELLULAR; + + if (enum_name == "VPN") + return rtc::ADAPTER_TYPE_VPN; + + if (enum_name == "LOOPBACK") + return rtc::ADAPTER_TYPE_LOOPBACK; + + RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name; + return absl::nullopt; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h new file mode 100644 index 0000000000..4bdeea61c6 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h @@ -0,0 +1,89 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_ +#define SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_ + +#include <vector> + +#include "api/data_channel_interface.h" +#include "api/jsep.h" +#include "api/jsep_ice_candidate.h" +#include "api/peer_connection_interface.h" +#include "api/rtp_parameters.h" +#include "rtc_base/ssl_identity.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +cricket::Candidate JavaToNativeCandidate(JNIEnv* jni, + const JavaRef<jobject>& j_candidate); + +ScopedJavaLocalRef<jobject> NativeToJavaCandidate( + JNIEnv* env, + const cricket::Candidate& candidate); + +ScopedJavaLocalRef<jobject> NativeToJavaIceCandidate( + JNIEnv* env, + const IceCandidateInterface& candidate); + +ScopedJavaLocalRef<jobjectArray> NativeToJavaCandidateArray( + JNIEnv* jni, + const std::vector<cricket::Candidate>& candidates); + +/***************************************************** + * Below are all things that go into RTCConfiguration. + *****************************************************/ +PeerConnectionInterface::IceTransportsType JavaToNativeIceTransportsType( + JNIEnv* jni, + const JavaRef<jobject>& j_ice_transports_type); + +PeerConnectionInterface::BundlePolicy JavaToNativeBundlePolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_bundle_policy); + +PeerConnectionInterface::RtcpMuxPolicy JavaToNativeRtcpMuxPolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_rtcp_mux_policy); + +PeerConnectionInterface::TcpCandidatePolicy JavaToNativeTcpCandidatePolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_tcp_candidate_policy); + +PeerConnectionInterface::CandidateNetworkPolicy +JavaToNativeCandidateNetworkPolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_candidate_network_policy); + +rtc::KeyType JavaToNativeKeyType(JNIEnv* jni, + const JavaRef<jobject>& j_key_type); + +PeerConnectionInterface::ContinualGatheringPolicy +JavaToNativeContinualGatheringPolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_gathering_policy); + +webrtc::PortPrunePolicy JavaToNativePortPrunePolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_port_prune_policy); + +PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy( + JNIEnv* jni, + const JavaRef<jobject>& j_ice_server_tls_cert_policy); + +absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference( + JNIEnv* jni, + const JavaRef<jobject>& j_network_preference); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc new file mode 100644 index 0000000000..7b35ca051c --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc @@ -0,0 +1,59 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <memory> + +#include "rtc_base/logging.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +JNI_FUNCTION_DECLARATION(void, + Logging_nativeEnableLogToDebugOutput, + JNIEnv* jni, + jclass, + jint nativeSeverity) { + if (nativeSeverity >= rtc::LS_VERBOSE && nativeSeverity <= rtc::LS_NONE) { + rtc::LogMessage::LogToDebug( + static_cast<rtc::LoggingSeverity>(nativeSeverity)); + } +} + +JNI_FUNCTION_DECLARATION(void, + Logging_nativeEnableLogThreads, + JNIEnv* jni, + jclass) { + rtc::LogMessage::LogThreads(true); +} + +JNI_FUNCTION_DECLARATION(void, + Logging_nativeEnableLogTimeStamps, + JNIEnv* jni, + jclass) { + rtc::LogMessage::LogTimestamps(true); +} + +JNI_FUNCTION_DECLARATION(void, + Logging_nativeLog, + JNIEnv* jni, + jclass, + jint j_severity, + jstring j_tag, + jstring j_message) { + std::string message = JavaToStdString(jni, JavaParamRef<jstring>(j_message)); + std::string tag = JavaToStdString(jni, JavaParamRef<jstring>(j_tag)); + RTC_LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag.c_str()) + << message; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc new file mode 100644 index 0000000000..4e1a3ba406 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc @@ -0,0 +1,51 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/media_constraints.h" + +#include <memory> + +#include "sdk/android/generated_peerconnection_jni/MediaConstraints_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +namespace { + +// Helper for translating a List<Pair<String, String>> to a Constraints. +MediaConstraints::Constraints PopulateConstraintsFromJavaPairList( + JNIEnv* env, + const JavaRef<jobject>& j_list) { + MediaConstraints::Constraints constraints; + for (const JavaRef<jobject>& entry : Iterable(env, j_list)) { + constraints.emplace_back( + JavaToStdString(env, Java_KeyValuePair_getKey(env, entry)), + JavaToStdString(env, Java_KeyValuePair_getValue(env, entry))); + } + return constraints; +} + +} // namespace + +// Copies all needed data so Java object is no longer needed at return. +std::unique_ptr<MediaConstraints> JavaToNativeMediaConstraints( + JNIEnv* env, + const JavaRef<jobject>& j_constraints) { + return std::make_unique<MediaConstraints>( + PopulateConstraintsFromJavaPairList( + env, Java_MediaConstraints_getMandatory(env, j_constraints)), + PopulateConstraintsFromJavaPairList( + env, Java_MediaConstraints_getOptional(env, j_constraints))); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h new file mode 100644 index 0000000000..68cedc7f2d --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h @@ -0,0 +1,30 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_ +#define SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_ + +#include <jni.h> +#include <memory> + +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/media_constraints.h" + +namespace webrtc { +namespace jni { + +std::unique_ptr<MediaConstraints> JavaToNativeMediaConstraints( + JNIEnv* env, + const JavaRef<jobject>& j_constraints); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc new file mode 100644 index 0000000000..e20f28f310 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc @@ -0,0 +1,24 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/media_stream_interface.h" +#include "sdk/android/generated_peerconnection_jni/MediaSource_jni.h" + +namespace webrtc { +namespace jni { + +static ScopedJavaLocalRef<jobject> JNI_MediaSource_GetState(JNIEnv* jni, + jlong j_p) { + return Java_State_fromNativeIndex( + jni, reinterpret_cast<MediaSourceInterface*>(j_p)->state()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc new file mode 100644 index 0000000000..20d59a6f8f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc @@ -0,0 +1,152 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/media_stream.h" + +#include <memory> + +#include "sdk/android/generated_peerconnection_jni/MediaStream_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +JavaMediaStream::JavaMediaStream( + JNIEnv* env, + rtc::scoped_refptr<MediaStreamInterface> media_stream) + : j_media_stream_( + env, + Java_MediaStream_Constructor(env, + jlongFromPointer(media_stream.get()))) { + // Create an observer to update the Java stream when the native stream's set + // of tracks changes. + observer_.reset(new MediaStreamObserver( + media_stream.get(), + [this](AudioTrackInterface* audio_track, + MediaStreamInterface* media_stream) { + OnAudioTrackAddedToStream(audio_track, media_stream); + }, + [this](AudioTrackInterface* audio_track, + MediaStreamInterface* media_stream) { + OnAudioTrackRemovedFromStream(audio_track, media_stream); + }, + [this](VideoTrackInterface* video_track, + MediaStreamInterface* media_stream) { + OnVideoTrackAddedToStream(video_track, media_stream); + }, + [this](VideoTrackInterface* video_track, + MediaStreamInterface* media_stream) { + OnVideoTrackRemovedFromStream(video_track, media_stream); + })); + for (rtc::scoped_refptr<AudioTrackInterface> track : + media_stream->GetAudioTracks()) { + Java_MediaStream_addNativeAudioTrack(env, j_media_stream_, + jlongFromPointer(track.release())); + } + for (rtc::scoped_refptr<VideoTrackInterface> track : + media_stream->GetVideoTracks()) { + Java_MediaStream_addNativeVideoTrack(env, j_media_stream_, + jlongFromPointer(track.release())); + } + // `j_media_stream` holds one reference. Corresponding Release() is in + // MediaStream_free, triggered by MediaStream.dispose(). + media_stream.release(); +} + +JavaMediaStream::~JavaMediaStream() { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + // Remove the observer first, so it doesn't react to events during deletion. + observer_ = nullptr; + Java_MediaStream_dispose(env, j_media_stream_); +} + +void JavaMediaStream::OnAudioTrackAddedToStream(AudioTrackInterface* track, + MediaStreamInterface* stream) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedLocalRefFrame local_ref_frame(env); + track->AddRef(); + Java_MediaStream_addNativeAudioTrack(env, j_media_stream_, + jlongFromPointer(track)); +} + +void JavaMediaStream::OnVideoTrackAddedToStream(VideoTrackInterface* track, + MediaStreamInterface* stream) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedLocalRefFrame local_ref_frame(env); + track->AddRef(); + Java_MediaStream_addNativeVideoTrack(env, j_media_stream_, + jlongFromPointer(track)); +} + +void JavaMediaStream::OnAudioTrackRemovedFromStream( + AudioTrackInterface* track, + MediaStreamInterface* stream) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedLocalRefFrame local_ref_frame(env); + Java_MediaStream_removeAudioTrack(env, j_media_stream_, + jlongFromPointer(track)); +} + +void JavaMediaStream::OnVideoTrackRemovedFromStream( + VideoTrackInterface* track, + MediaStreamInterface* stream) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedLocalRefFrame local_ref_frame(env); + Java_MediaStream_removeVideoTrack(env, j_media_stream_, + jlongFromPointer(track)); +} + +jclass GetMediaStreamClass(JNIEnv* env) { + return org_webrtc_MediaStream_clazz(env); +} + +static jboolean JNI_MediaStream_AddAudioTrackToNativeStream( + JNIEnv* jni, + jlong pointer, + jlong j_audio_track_pointer) { + return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack( + rtc::scoped_refptr<AudioTrackInterface>( + reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer))); +} + +static jboolean JNI_MediaStream_AddVideoTrackToNativeStream( + JNIEnv* jni, + jlong pointer, + jlong j_video_track_pointer) { + return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack( + rtc::scoped_refptr<VideoTrackInterface>( + reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer))); +} + +static jboolean JNI_MediaStream_RemoveAudioTrack(JNIEnv* jni, + jlong pointer, + jlong j_audio_track_pointer) { + return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack( + rtc::scoped_refptr<AudioTrackInterface>( + reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer))); +} + +static jboolean JNI_MediaStream_RemoveVideoTrack(JNIEnv* jni, + jlong pointer, + jlong j_video_track_pointer) { + return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack( + rtc::scoped_refptr<VideoTrackInterface>( + reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer))); +} + +static ScopedJavaLocalRef<jstring> JNI_MediaStream_GetId(JNIEnv* jni, + jlong j_p) { + return NativeToJavaString(jni, + reinterpret_cast<MediaStreamInterface*>(j_p)->id()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h new file mode 100644 index 0000000000..efa177c43e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h @@ -0,0 +1,54 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_ +#define SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_ + +#include <jni.h> +#include <memory> + +#include "api/media_stream_interface.h" +#include "pc/media_stream_observer.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class JavaMediaStream { + public: + explicit JavaMediaStream( + JNIEnv* env, + rtc::scoped_refptr<MediaStreamInterface> media_stream); + ~JavaMediaStream(); + + const ScopedJavaGlobalRef<jobject>& j_media_stream() { + return j_media_stream_; + } + + private: + void OnAudioTrackAddedToStream(AudioTrackInterface* track, + MediaStreamInterface* stream); + void OnVideoTrackAddedToStream(VideoTrackInterface* track, + MediaStreamInterface* stream); + void OnAudioTrackRemovedFromStream(AudioTrackInterface* track, + MediaStreamInterface* stream); + void OnVideoTrackRemovedFromStream(VideoTrackInterface* track, + MediaStreamInterface* stream); + + ScopedJavaGlobalRef<jobject> j_media_stream_; + std::unique_ptr<MediaStreamObserver> observer_; +}; + +jclass GetMediaStreamClass(JNIEnv* env); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc new file mode 100644 index 0000000000..928f10c03a --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc @@ -0,0 +1,67 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/media_stream_track.h" + +#include "api/media_stream_interface.h" +#include "sdk/android/generated_peerconnection_jni/MediaStreamTrack_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef<jobject> NativeToJavaMediaType( + JNIEnv* jni, + cricket::MediaType media_type) { + return Java_MediaType_fromNativeIndex(jni, media_type); +} + +cricket::MediaType JavaToNativeMediaType(JNIEnv* jni, + const JavaRef<jobject>& j_media_type) { + return static_cast<cricket::MediaType>( + Java_MediaType_getNative(jni, j_media_type)); +} + +static ScopedJavaLocalRef<jstring> JNI_MediaStreamTrack_GetId( + JNIEnv* jni, + jlong j_p) { + return NativeToJavaString( + jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id()); +} + +static ScopedJavaLocalRef<jstring> JNI_MediaStreamTrack_GetKind( + JNIEnv* jni, + jlong j_p) { + return NativeToJavaString( + jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind()); +} + +static jboolean JNI_MediaStreamTrack_GetEnabled(JNIEnv* jni, + jlong j_p) { + return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled(); +} + +static ScopedJavaLocalRef<jobject> JNI_MediaStreamTrack_GetState( + JNIEnv* jni, + jlong j_p) { + return Java_State_fromNativeIndex( + jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state()); +} + +static jboolean JNI_MediaStreamTrack_SetEnabled(JNIEnv* jni, + jlong j_p, + jboolean enabled) { + return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->set_enabled( + enabled); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h new file mode 100644 index 0000000000..8bfe302db7 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h @@ -0,0 +1,31 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_ +#define SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_ + +#include <jni.h> + +#include "api/media_types.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef<jobject> NativeToJavaMediaType( + JNIEnv* jni, + cricket::MediaType media_type); +cricket::MediaType JavaToNativeMediaType(JNIEnv* jni, + const JavaRef<jobject>& j_media_type); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc new file mode 100644 index 0000000000..d595c481f8 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc @@ -0,0 +1,31 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/owned_factory_and_threads.h" + +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +OwnedFactoryAndThreads::OwnedFactoryAndThreads( + std::unique_ptr<rtc::SocketFactory> socket_factory, + std::unique_ptr<rtc::Thread> network_thread, + std::unique_ptr<rtc::Thread> worker_thread, + std::unique_ptr<rtc::Thread> signaling_thread, + const rtc::scoped_refptr<PeerConnectionFactoryInterface>& factory) + : socket_factory_(std::move(socket_factory)), + network_thread_(std::move(network_thread)), + worker_thread_(std::move(worker_thread)), + signaling_thread_(std::move(signaling_thread)), + factory_(factory) {} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h new file mode 100644 index 0000000000..7dc9443ea5 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h @@ -0,0 +1,60 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_ +#define SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_ + +#include <jni.h> +#include <memory> +#include <utility> + +#include "api/peer_connection_interface.h" +#include "rtc_base/thread.h" + +namespace webrtc { +namespace jni { + +// Helper struct for working around the fact that CreatePeerConnectionFactory() +// comes in two flavors: either entirely automagical (constructing its own +// threads and deleting them on teardown, but no external codec factory support) +// or entirely manual (requires caller to delete threads after factory +// teardown). This struct takes ownership of its ctor's arguments to present a +// single thing for Java to hold and eventually free. +class OwnedFactoryAndThreads { + public: + OwnedFactoryAndThreads( + std::unique_ptr<rtc::SocketFactory> socket_factory, + std::unique_ptr<rtc::Thread> network_thread, + std::unique_ptr<rtc::Thread> worker_thread, + std::unique_ptr<rtc::Thread> signaling_thread, + const rtc::scoped_refptr<PeerConnectionFactoryInterface>& factory); + + ~OwnedFactoryAndThreads() = default; + + PeerConnectionFactoryInterface* factory() { return factory_.get(); } + rtc::SocketFactory* socket_factory() { return socket_factory_.get(); } + rtc::Thread* network_thread() { return network_thread_.get(); } + rtc::Thread* signaling_thread() { return signaling_thread_.get(); } + rtc::Thread* worker_thread() { return worker_thread_.get(); } + + private: + // Usually implemented by the SocketServer associated with the network thread, + // so needs to outlive the network thread. + const std::unique_ptr<rtc::SocketFactory> socket_factory_; + const std::unique_ptr<rtc::Thread> network_thread_; + const std::unique_ptr<rtc::Thread> worker_thread_; + const std::unique_ptr<rtc::Thread> signaling_thread_; + const rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc new file mode 100644 index 0000000000..502763a2d0 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc @@ -0,0 +1,917 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Lifecycle notes: objects are owned where they will be called; in other words +// FooObservers are owned by C++-land, and user-callable objects (e.g. +// PeerConnection and VideoTrack) are owned by Java-land. +// When this file (or other files in this directory) allocates C++ +// RefCountInterfaces it AddRef()s an artificial ref simulating the jlong held +// in Java-land, and then Release()s the ref in the respective free call. +// Sometimes this AddRef is implicit in the construction of a scoped_refptr<> +// which is then .release()d. Any persistent (non-local) references from C++ to +// Java must be global or weak (in which case they must be checked before use)! +// +// Exception notes: pretty much all JNI calls can throw Java exceptions, so each +// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck() +// call. In this file this is done in CHECK_EXCEPTION, making for much easier +// debugging in case of failure (the alternative is to wait for control to +// return to the Java frame that called code in this file, at which point it's +// impossible to tell which JNI call broke). + +#include "sdk/android/src/jni/pc/peer_connection.h" + +#include <limits> +#include <memory> +#include <string> +#include <utility> + +#include "api/peer_connection_interface.h" +#include "api/rtc_event_log_output_file.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h" +#include "sdk/android/generated_peerconnection_jni/IceCandidateErrorEvent_jni.h" +#include "sdk/android/generated_peerconnection_jni/PeerConnection_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/add_ice_candidate_observer.h" +#include "sdk/android/src/jni/pc/crypto_options.h" +#include "sdk/android/src/jni/pc/data_channel.h" +#include "sdk/android/src/jni/pc/ice_candidate.h" +#include "sdk/android/src/jni/pc/media_constraints.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" +#include "sdk/android/src/jni/pc/rtc_certificate.h" +#include "sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h" +#include "sdk/android/src/jni/pc/rtp_sender.h" +#include "sdk/android/src/jni/pc/sdp_observer.h" +#include "sdk/android/src/jni/pc/session_description.h" +#include "sdk/android/src/jni/pc/stats_observer.h" +#include "sdk/android/src/jni/pc/turn_customizer.h" + +namespace webrtc { +namespace jni { + +namespace { + +PeerConnectionInterface* ExtractNativePC(JNIEnv* jni, + const JavaRef<jobject>& j_pc) { + return reinterpret_cast<OwnedPeerConnection*>( + Java_PeerConnection_getNativeOwnedPeerConnection(jni, j_pc)) + ->pc(); +} + +PeerConnectionInterface::IceServers JavaToNativeIceServers( + JNIEnv* jni, + const JavaRef<jobject>& j_ice_servers) { + PeerConnectionInterface::IceServers ice_servers; + for (const JavaRef<jobject>& j_ice_server : Iterable(jni, j_ice_servers)) { + ScopedJavaLocalRef<jobject> j_ice_server_tls_cert_policy = + Java_IceServer_getTlsCertPolicy(jni, j_ice_server); + ScopedJavaLocalRef<jobject> urls = + Java_IceServer_getUrls(jni, j_ice_server); + ScopedJavaLocalRef<jstring> username = + Java_IceServer_getUsername(jni, j_ice_server); + ScopedJavaLocalRef<jstring> password = + Java_IceServer_getPassword(jni, j_ice_server); + PeerConnectionInterface::TlsCertPolicy tls_cert_policy = + JavaToNativeTlsCertPolicy(jni, j_ice_server_tls_cert_policy); + ScopedJavaLocalRef<jstring> hostname = + Java_IceServer_getHostname(jni, j_ice_server); + ScopedJavaLocalRef<jobject> tls_alpn_protocols = + Java_IceServer_getTlsAlpnProtocols(jni, j_ice_server); + ScopedJavaLocalRef<jobject> tls_elliptic_curves = + Java_IceServer_getTlsEllipticCurves(jni, j_ice_server); + PeerConnectionInterface::IceServer server; + server.urls = JavaListToNativeVector<std::string, jstring>( + jni, urls, &JavaToNativeString); + server.username = JavaToNativeString(jni, username); + server.password = JavaToNativeString(jni, password); + server.tls_cert_policy = tls_cert_policy; + server.hostname = JavaToNativeString(jni, hostname); + server.tls_alpn_protocols = JavaListToNativeVector<std::string, jstring>( + jni, tls_alpn_protocols, &JavaToNativeString); + server.tls_elliptic_curves = JavaListToNativeVector<std::string, jstring>( + jni, tls_elliptic_curves, &JavaToNativeString); + ice_servers.push_back(server); + } + return ice_servers; +} + +SdpSemantics JavaToNativeSdpSemantics(JNIEnv* jni, + const JavaRef<jobject>& j_sdp_semantics) { + std::string enum_name = GetJavaEnumName(jni, j_sdp_semantics); + + if (enum_name == "PLAN_B") + return SdpSemantics::kPlanB_DEPRECATED; + + if (enum_name == "UNIFIED_PLAN") + return SdpSemantics::kUnifiedPlan; + + RTC_DCHECK_NOTREACHED(); + return SdpSemantics::kUnifiedPlan; +} + +ScopedJavaLocalRef<jobject> NativeToJavaCandidatePairChange( + JNIEnv* env, + const cricket::CandidatePairChangeEvent& event) { + const auto& selected_pair = event.selected_candidate_pair; + return Java_CandidatePairChangeEvent_Constructor( + env, NativeToJavaCandidate(env, selected_pair.local_candidate()), + NativeToJavaCandidate(env, selected_pair.remote_candidate()), + static_cast<int>(event.last_data_received_ms), + NativeToJavaString(env, event.reason), + static_cast<int>(event.estimated_disconnected_time_ms)); +} + +} // namespace + +ScopedJavaLocalRef<jobject> NativeToJavaAdapterType(JNIEnv* env, + int adapterType) { + return Java_AdapterType_fromNativeIndex(env, adapterType); +} + +void JavaToNativeRTCConfiguration( + JNIEnv* jni, + const JavaRef<jobject>& j_rtc_config, + PeerConnectionInterface::RTCConfiguration* rtc_config) { + ScopedJavaLocalRef<jobject> j_ice_transports_type = + Java_RTCConfiguration_getIceTransportsType(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_bundle_policy = + Java_RTCConfiguration_getBundlePolicy(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_rtcp_mux_policy = + Java_RTCConfiguration_getRtcpMuxPolicy(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_rtc_certificate = + Java_RTCConfiguration_getCertificate(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_tcp_candidate_policy = + Java_RTCConfiguration_getTcpCandidatePolicy(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_candidate_network_policy = + Java_RTCConfiguration_getCandidateNetworkPolicy(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_ice_servers = + Java_RTCConfiguration_getIceServers(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_continual_gathering_policy = + Java_RTCConfiguration_getContinualGatheringPolicy(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_turn_port_prune_policy = + Java_RTCConfiguration_getTurnPortPrunePolicy(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_turn_customizer = + Java_RTCConfiguration_getTurnCustomizer(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_network_preference = + Java_RTCConfiguration_getNetworkPreference(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_sdp_semantics = + Java_RTCConfiguration_getSdpSemantics(jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_crypto_options = + Java_RTCConfiguration_getCryptoOptions(jni, j_rtc_config); + + rtc_config->type = JavaToNativeIceTransportsType(jni, j_ice_transports_type); + rtc_config->bundle_policy = JavaToNativeBundlePolicy(jni, j_bundle_policy); + rtc_config->rtcp_mux_policy = + JavaToNativeRtcpMuxPolicy(jni, j_rtcp_mux_policy); + if (!j_rtc_certificate.is_null()) { + rtc::scoped_refptr<rtc::RTCCertificate> certificate = + rtc::RTCCertificate::FromPEM( + JavaToNativeRTCCertificatePEM(jni, j_rtc_certificate)); + RTC_CHECK(certificate != nullptr) << "supplied certificate is malformed."; + rtc_config->certificates.push_back(certificate); + } + rtc_config->tcp_candidate_policy = + JavaToNativeTcpCandidatePolicy(jni, j_tcp_candidate_policy); + rtc_config->candidate_network_policy = + JavaToNativeCandidateNetworkPolicy(jni, j_candidate_network_policy); + rtc_config->servers = JavaToNativeIceServers(jni, j_ice_servers); + rtc_config->audio_jitter_buffer_max_packets = + Java_RTCConfiguration_getAudioJitterBufferMaxPackets(jni, j_rtc_config); + rtc_config->audio_jitter_buffer_fast_accelerate = + Java_RTCConfiguration_getAudioJitterBufferFastAccelerate(jni, + j_rtc_config); + rtc_config->ice_connection_receiving_timeout = + Java_RTCConfiguration_getIceConnectionReceivingTimeout(jni, j_rtc_config); + rtc_config->ice_backup_candidate_pair_ping_interval = + Java_RTCConfiguration_getIceBackupCandidatePairPingInterval(jni, + j_rtc_config); + rtc_config->continual_gathering_policy = + JavaToNativeContinualGatheringPolicy(jni, j_continual_gathering_policy); + rtc_config->ice_candidate_pool_size = + Java_RTCConfiguration_getIceCandidatePoolSize(jni, j_rtc_config); + rtc_config->prune_turn_ports = + Java_RTCConfiguration_getPruneTurnPorts(jni, j_rtc_config); + rtc_config->turn_port_prune_policy = + JavaToNativePortPrunePolicy(jni, j_turn_port_prune_policy); + rtc_config->presume_writable_when_fully_relayed = + Java_RTCConfiguration_getPresumeWritableWhenFullyRelayed(jni, + j_rtc_config); + rtc_config->surface_ice_candidates_on_ice_transport_type_changed = + Java_RTCConfiguration_getSurfaceIceCandidatesOnIceTransportTypeChanged( + jni, j_rtc_config); + ScopedJavaLocalRef<jobject> j_ice_check_interval_strong_connectivity = + Java_RTCConfiguration_getIceCheckIntervalStrongConnectivity(jni, + j_rtc_config); + rtc_config->ice_check_interval_strong_connectivity = + JavaToNativeOptionalInt(jni, j_ice_check_interval_strong_connectivity); + ScopedJavaLocalRef<jobject> j_ice_check_interval_weak_connectivity = + Java_RTCConfiguration_getIceCheckIntervalWeakConnectivity(jni, + j_rtc_config); + rtc_config->ice_check_interval_weak_connectivity = + JavaToNativeOptionalInt(jni, j_ice_check_interval_weak_connectivity); + ScopedJavaLocalRef<jobject> j_ice_check_min_interval = + Java_RTCConfiguration_getIceCheckMinInterval(jni, j_rtc_config); + rtc_config->ice_check_min_interval = + JavaToNativeOptionalInt(jni, j_ice_check_min_interval); + ScopedJavaLocalRef<jobject> j_ice_unwritable_timeout = + Java_RTCConfiguration_getIceUnwritableTimeout(jni, j_rtc_config); + rtc_config->ice_unwritable_timeout = + JavaToNativeOptionalInt(jni, j_ice_unwritable_timeout); + ScopedJavaLocalRef<jobject> j_ice_unwritable_min_checks = + Java_RTCConfiguration_getIceUnwritableMinChecks(jni, j_rtc_config); + rtc_config->ice_unwritable_min_checks = + JavaToNativeOptionalInt(jni, j_ice_unwritable_min_checks); + ScopedJavaLocalRef<jobject> j_stun_candidate_keepalive_interval = + Java_RTCConfiguration_getStunCandidateKeepaliveInterval(jni, + j_rtc_config); + rtc_config->stun_candidate_keepalive_interval = + JavaToNativeOptionalInt(jni, j_stun_candidate_keepalive_interval); + ScopedJavaLocalRef<jobject> j_stable_writable_connection_ping_interval_ms = + Java_RTCConfiguration_getStableWritableConnectionPingIntervalMs( + jni, j_rtc_config); + rtc_config->stable_writable_connection_ping_interval_ms = + JavaToNativeOptionalInt(jni, + j_stable_writable_connection_ping_interval_ms); + rtc_config->disable_ipv6_on_wifi = + Java_RTCConfiguration_getDisableIPv6OnWifi(jni, j_rtc_config); + rtc_config->max_ipv6_networks = + Java_RTCConfiguration_getMaxIPv6Networks(jni, j_rtc_config); + + rtc_config->turn_customizer = GetNativeTurnCustomizer(jni, j_turn_customizer); + + rtc_config->disable_ipv6 = + Java_RTCConfiguration_getDisableIpv6(jni, j_rtc_config); + rtc_config->media_config.enable_dscp = + Java_RTCConfiguration_getEnableDscp(jni, j_rtc_config); + rtc_config->media_config.video.enable_cpu_adaptation = + Java_RTCConfiguration_getEnableCpuOveruseDetection(jni, j_rtc_config); + rtc_config->media_config.video.suspend_below_min_bitrate = + Java_RTCConfiguration_getSuspendBelowMinBitrate(jni, j_rtc_config); + rtc_config->screencast_min_bitrate = JavaToNativeOptionalInt( + jni, Java_RTCConfiguration_getScreencastMinBitrate(jni, j_rtc_config)); + rtc_config->combined_audio_video_bwe = JavaToNativeOptionalBool( + jni, Java_RTCConfiguration_getCombinedAudioVideoBwe(jni, j_rtc_config)); + rtc_config->network_preference = + JavaToNativeNetworkPreference(jni, j_network_preference); + rtc_config->sdp_semantics = JavaToNativeSdpSemantics(jni, j_sdp_semantics); + rtc_config->active_reset_srtp_params = + Java_RTCConfiguration_getActiveResetSrtpParams(jni, j_rtc_config); + rtc_config->crypto_options = + JavaToNativeOptionalCryptoOptions(jni, j_crypto_options); + + rtc_config->allow_codec_switching = JavaToNativeOptionalBool( + jni, Java_RTCConfiguration_getAllowCodecSwitching(jni, j_rtc_config)); + + rtc_config->offer_extmap_allow_mixed = + Java_RTCConfiguration_getOfferExtmapAllowMixed(jni, j_rtc_config); + rtc_config->enable_implicit_rollback = + Java_RTCConfiguration_getEnableImplicitRollback(jni, j_rtc_config); + + ScopedJavaLocalRef<jstring> j_turn_logging_id = + Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config); + if (!IsNull(jni, j_turn_logging_id)) { + rtc_config->turn_logging_id = JavaToNativeString(jni, j_turn_logging_id); + } +} + +rtc::KeyType GetRtcConfigKeyType(JNIEnv* env, + const JavaRef<jobject>& j_rtc_config) { + return JavaToNativeKeyType( + env, Java_RTCConfiguration_getKeyType(env, j_rtc_config)); +} + +PeerConnectionObserverJni::PeerConnectionObserverJni( + JNIEnv* jni, + const JavaRef<jobject>& j_observer) + : j_observer_global_(jni, j_observer) {} + +PeerConnectionObserverJni::~PeerConnectionObserverJni() = default; + +void PeerConnectionObserverJni::OnIceCandidate( + const IceCandidateInterface* candidate) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onIceCandidate(env, j_observer_global_, + NativeToJavaIceCandidate(env, *candidate)); +} + +void PeerConnectionObserverJni::OnIceCandidateError( + const std::string& address, + int port, + const std::string& url, + int error_code, + const std::string& error_text) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> event = Java_IceCandidateErrorEvent_Constructor( + env, NativeToJavaString(env, address), port, NativeToJavaString(env, url), + error_code, NativeToJavaString(env, error_text)); + Java_Observer_onIceCandidateError(env, j_observer_global_, event); +} + +void PeerConnectionObserverJni::OnIceCandidatesRemoved( + const std::vector<cricket::Candidate>& candidates) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onIceCandidatesRemoved( + env, j_observer_global_, NativeToJavaCandidateArray(env, candidates)); +} + +void PeerConnectionObserverJni::OnSignalingChange( + PeerConnectionInterface::SignalingState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onSignalingChange( + env, j_observer_global_, + Java_SignalingState_fromNativeIndex(env, new_state)); +} + +void PeerConnectionObserverJni::OnIceConnectionChange( + PeerConnectionInterface::IceConnectionState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onIceConnectionChange( + env, j_observer_global_, + Java_IceConnectionState_fromNativeIndex(env, new_state)); +} + +void PeerConnectionObserverJni::OnStandardizedIceConnectionChange( + PeerConnectionInterface::IceConnectionState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onStandardizedIceConnectionChange( + env, j_observer_global_, + Java_IceConnectionState_fromNativeIndex(env, new_state)); +} + +void PeerConnectionObserverJni::OnConnectionChange( + PeerConnectionInterface::PeerConnectionState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onConnectionChange(env, j_observer_global_, + Java_PeerConnectionState_fromNativeIndex( + env, static_cast<int>(new_state))); +} + +void PeerConnectionObserverJni::OnIceConnectionReceivingChange(bool receiving) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onIceConnectionReceivingChange(env, j_observer_global_, + receiving); +} + +void PeerConnectionObserverJni::OnIceSelectedCandidatePairChanged( + const cricket::CandidatePairChangeEvent& event) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onSelectedCandidatePairChanged( + env, j_observer_global_, NativeToJavaCandidatePairChange(env, event)); +} + +void PeerConnectionObserverJni::OnIceGatheringChange( + PeerConnectionInterface::IceGatheringState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onIceGatheringChange( + env, j_observer_global_, + Java_IceGatheringState_fromNativeIndex(env, new_state)); +} + +void PeerConnectionObserverJni::OnAddStream( + rtc::scoped_refptr<MediaStreamInterface> stream) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onAddStream( + env, j_observer_global_, + GetOrCreateJavaStream(env, stream).j_media_stream()); +} + +void PeerConnectionObserverJni::OnRemoveStream( + rtc::scoped_refptr<MediaStreamInterface> stream) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get()); + RTC_CHECK(it != remote_streams_.end()) + << "unexpected stream: " << stream.get(); + Java_Observer_onRemoveStream(env, j_observer_global_, + it->second.j_media_stream()); + remote_streams_.erase(it); +} + +void PeerConnectionObserverJni::OnDataChannel( + rtc::scoped_refptr<DataChannelInterface> channel) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onDataChannel(env, j_observer_global_, + WrapNativeDataChannel(env, channel)); +} + +void PeerConnectionObserverJni::OnRenegotiationNeeded() { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onRenegotiationNeeded(env, j_observer_global_); +} + +void PeerConnectionObserverJni::OnAddTrack( + rtc::scoped_refptr<RtpReceiverInterface> receiver, + const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> j_rtp_receiver = + NativeToJavaRtpReceiver(env, receiver); + rtp_receivers_.emplace_back(env, j_rtp_receiver); + + Java_Observer_onAddTrack(env, j_observer_global_, j_rtp_receiver, + NativeToJavaMediaStreamArray(env, streams)); +} + +void PeerConnectionObserverJni::OnRemoveTrack( + rtc::scoped_refptr<RtpReceiverInterface> receiver) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> j_rtp_receiver = + NativeToJavaRtpReceiver(env, receiver); + rtp_receivers_.emplace_back(env, j_rtp_receiver); + + Java_Observer_onRemoveTrack(env, j_observer_global_, j_rtp_receiver); +} + +void PeerConnectionObserverJni::OnTrack( + rtc::scoped_refptr<RtpTransceiverInterface> transceiver) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> j_rtp_transceiver = + NativeToJavaRtpTransceiver(env, transceiver); + rtp_transceivers_.emplace_back(env, j_rtp_transceiver); + + Java_Observer_onTrack(env, j_observer_global_, j_rtp_transceiver); +} + +// If the NativeToJavaStreamsMap contains the stream, return it. +// Otherwise, create a new Java MediaStream. +JavaMediaStream& PeerConnectionObserverJni::GetOrCreateJavaStream( + JNIEnv* env, + const rtc::scoped_refptr<MediaStreamInterface>& stream) { + NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get()); + if (it == remote_streams_.end()) { + it = remote_streams_ + .emplace(std::piecewise_construct, + std::forward_as_tuple(stream.get()), + std::forward_as_tuple(env, stream)) + .first; + } + return it->second; +} + +ScopedJavaLocalRef<jobjectArray> +PeerConnectionObserverJni::NativeToJavaMediaStreamArray( + JNIEnv* jni, + const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) { + return NativeToJavaObjectArray( + jni, streams, GetMediaStreamClass(jni), + [this](JNIEnv* env, rtc::scoped_refptr<MediaStreamInterface> stream) + -> const ScopedJavaGlobalRef<jobject>& { + return GetOrCreateJavaStream(env, stream).j_media_stream(); + }); +} + +OwnedPeerConnection::OwnedPeerConnection( + rtc::scoped_refptr<PeerConnectionInterface> peer_connection, + std::unique_ptr<PeerConnectionObserver> observer) + : OwnedPeerConnection(peer_connection, + std::move(observer), + nullptr /* constraints */) {} + +OwnedPeerConnection::OwnedPeerConnection( + rtc::scoped_refptr<PeerConnectionInterface> peer_connection, + std::unique_ptr<PeerConnectionObserver> observer, + std::unique_ptr<MediaConstraints> constraints) + : peer_connection_(peer_connection), + observer_(std::move(observer)), + constraints_(std::move(constraints)) {} + +OwnedPeerConnection::~OwnedPeerConnection() { + // Ensure that PeerConnection is destroyed before the observer. + peer_connection_ = nullptr; +} + +static jlong JNI_PeerConnection_CreatePeerConnectionObserver( + JNIEnv* jni, + const JavaParamRef<jobject>& j_observer) { + return jlongFromPointer(new PeerConnectionObserverJni(jni, j_observer)); +} + +static void JNI_PeerConnection_FreeOwnedPeerConnection(JNIEnv*, jlong j_p) { + delete reinterpret_cast<OwnedPeerConnection*>(j_p); +} + +static jlong JNI_PeerConnection_GetNativePeerConnection( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + return jlongFromPointer(ExtractNativePC(jni, j_pc)); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetLocalDescription( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + PeerConnectionInterface* pc = ExtractNativePC(jni, j_pc); + // It's only safe to operate on SessionDescriptionInterface on the + // signaling thread, but `jni` may only be used on the current thread, so we + // must do this odd dance. + std::string sdp; + std::string type; + pc->signaling_thread()->Invoke<void>(RTC_FROM_HERE, [pc, &sdp, &type] { + const SessionDescriptionInterface* desc = pc->local_description(); + if (desc) { + RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; + type = desc->type(); + } + }); + return sdp.empty() ? nullptr : NativeToJavaSessionDescription(jni, sdp, type); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetRemoteDescription( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + PeerConnectionInterface* pc = ExtractNativePC(jni, j_pc); + // It's only safe to operate on SessionDescriptionInterface on the + // signaling thread, but `jni` may only be used on the current thread, so we + // must do this odd dance. + std::string sdp; + std::string type; + pc->signaling_thread()->Invoke<void>(RTC_FROM_HERE, [pc, &sdp, &type] { + const SessionDescriptionInterface* desc = pc->remote_description(); + if (desc) { + RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; + type = desc->type(); + } + }); + return sdp.empty() ? nullptr : NativeToJavaSessionDescription(jni, sdp, type); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetCertificate( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + const PeerConnectionInterface::RTCConfiguration rtc_config = + ExtractNativePC(jni, j_pc)->GetConfiguration(); + rtc::scoped_refptr<rtc::RTCCertificate> certificate = + rtc_config.certificates[0]; + return NativeToJavaRTCCertificatePEM(jni, certificate->ToPEM()); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_CreateDataChannel( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jstring>& j_label, + const JavaParamRef<jobject>& j_init) { + DataChannelInit init = JavaToNativeDataChannelInit(jni, j_init); + auto result = ExtractNativePC(jni, j_pc)->CreateDataChannelOrError( + JavaToNativeString(jni, j_label), &init); + if (!result.ok()) { + return WrapNativeDataChannel(jni, nullptr); + } + return WrapNativeDataChannel(jni, result.MoveValue()); +} + +static void JNI_PeerConnection_CreateOffer( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_observer, + const JavaParamRef<jobject>& j_constraints) { + std::unique_ptr<MediaConstraints> constraints = + JavaToNativeMediaConstraints(jni, j_constraints); + auto observer = rtc::make_ref_counted<CreateSdpObserverJni>( + jni, j_observer, std::move(constraints)); + PeerConnectionInterface::RTCOfferAnswerOptions options; + CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options); + ExtractNativePC(jni, j_pc)->CreateOffer(observer.get(), options); +} + +static void JNI_PeerConnection_CreateAnswer( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_observer, + const JavaParamRef<jobject>& j_constraints) { + std::unique_ptr<MediaConstraints> constraints = + JavaToNativeMediaConstraints(jni, j_constraints); + auto observer = rtc::make_ref_counted<CreateSdpObserverJni>( + jni, j_observer, std::move(constraints)); + PeerConnectionInterface::RTCOfferAnswerOptions options; + CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options); + ExtractNativePC(jni, j_pc)->CreateAnswer(observer.get(), options); +} + +static void JNI_PeerConnection_SetLocalDescriptionAutomatically( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_observer) { + auto observer = + rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer); + ExtractNativePC(jni, j_pc)->SetLocalDescription(observer); +} + +static void JNI_PeerConnection_SetLocalDescription( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_observer, + const JavaParamRef<jobject>& j_sdp) { + auto observer = + rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer); + ExtractNativePC(jni, j_pc)->SetLocalDescription( + JavaToNativeSessionDescription(jni, j_sdp), observer); +} + +static void JNI_PeerConnection_SetRemoteDescription( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_observer, + const JavaParamRef<jobject>& j_sdp) { + auto observer = + rtc::make_ref_counted<SetRemoteSdpObserverJni>(jni, j_observer); + ExtractNativePC(jni, j_pc)->SetRemoteDescription( + JavaToNativeSessionDescription(jni, j_sdp), observer); +} + +static void JNI_PeerConnection_RestartIce(JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + ExtractNativePC(jni, j_pc)->RestartIce(); +} + +static void JNI_PeerConnection_SetAudioPlayout( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + jboolean playout) { + ExtractNativePC(jni, j_pc)->SetAudioPlayout(playout); +} + +static void JNI_PeerConnection_SetAudioRecording( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + jboolean recording) { + ExtractNativePC(jni, j_pc)->SetAudioRecording(recording); +} + +static jboolean JNI_PeerConnection_SetConfiguration( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_rtc_config) { + // Need to merge constraints into RTCConfiguration again, which are stored + // in the OwnedPeerConnection object. + OwnedPeerConnection* owned_pc = reinterpret_cast<OwnedPeerConnection*>( + Java_PeerConnection_getNativeOwnedPeerConnection(jni, j_pc)); + PeerConnectionInterface::RTCConfiguration rtc_config( + PeerConnectionInterface::RTCConfigurationType::kAggressive); + JavaToNativeRTCConfiguration(jni, j_rtc_config, &rtc_config); + if (owned_pc->constraints()) { + CopyConstraintsIntoRtcConfiguration(owned_pc->constraints(), &rtc_config); + } + return owned_pc->pc()->SetConfiguration(rtc_config).ok(); +} + +static jboolean JNI_PeerConnection_AddIceCandidate( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jstring>& j_sdp_mid, + jint j_sdp_mline_index, + const JavaParamRef<jstring>& j_candidate_sdp) { + std::string sdp_mid = JavaToNativeString(jni, j_sdp_mid); + std::string sdp = JavaToNativeString(jni, j_candidate_sdp); + std::unique_ptr<IceCandidateInterface> candidate( + CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr)); + return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get()); +} + +static void JNI_PeerConnection_AddIceCandidateWithObserver( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jstring>& j_sdp_mid, + jint j_sdp_mline_index, + const JavaParamRef<jstring>& j_candidate_sdp, + const JavaParamRef<jobject>& j_observer) { + std::string sdp_mid = JavaToNativeString(jni, j_sdp_mid); + std::string sdp = JavaToNativeString(jni, j_candidate_sdp); + std::unique_ptr<IceCandidateInterface> candidate( + CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr)); + + rtc::scoped_refptr<AddIceCandidateObserverJni> observer( + new AddIceCandidateObserverJni(jni, j_observer)); + ExtractNativePC(jni, j_pc)->AddIceCandidate( + std::move(candidate), + [observer](RTCError error) { observer->OnComplete(error); }); +} + +static jboolean JNI_PeerConnection_RemoveIceCandidates( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobjectArray>& j_candidates) { + std::vector<cricket::Candidate> candidates = + JavaToNativeVector<cricket::Candidate>(jni, j_candidates, + &JavaToNativeCandidate); + return ExtractNativePC(jni, j_pc)->RemoveIceCandidates(candidates); +} + +static jboolean JNI_PeerConnection_AddLocalStream( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + jlong native_stream) { + return ExtractNativePC(jni, j_pc)->AddStream( + reinterpret_cast<MediaStreamInterface*>(native_stream)); +} + +static void JNI_PeerConnection_RemoveLocalStream( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + jlong native_stream) { + ExtractNativePC(jni, j_pc)->RemoveStream( + reinterpret_cast<MediaStreamInterface*>(native_stream)); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_CreateSender( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jstring>& j_kind, + const JavaParamRef<jstring>& j_stream_id) { + std::string kind = JavaToNativeString(jni, j_kind); + std::string stream_id = JavaToNativeString(jni, j_stream_id); + rtc::scoped_refptr<RtpSenderInterface> sender = + ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id); + return NativeToJavaRtpSender(jni, sender); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetSenders( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetSenders(), + &NativeToJavaRtpSender); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetReceivers( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetReceivers(), + &NativeToJavaRtpReceiver); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetTransceivers( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetTransceivers(), + &NativeToJavaRtpTransceiver); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTrack( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const jlong native_track, + const JavaParamRef<jobject>& j_stream_labels) { + RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> result = + ExtractNativePC(jni, j_pc)->AddTrack( + rtc::scoped_refptr<MediaStreamTrackInterface>( + reinterpret_cast<MediaStreamTrackInterface*>(native_track)), + JavaListToNativeVector<std::string, jstring>(jni, j_stream_labels, + &JavaToNativeString)); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "Failed to add track: " << result.error().message(); + return nullptr; + } else { + return NativeToJavaRtpSender(jni, result.MoveValue()); + } +} + +static jboolean JNI_PeerConnection_RemoveTrack( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + jlong native_sender) { + return ExtractNativePC(jni, j_pc) + ->RemoveTrackOrError(rtc::scoped_refptr<RtpSenderInterface>( + reinterpret_cast<RtpSenderInterface*>(native_sender))) + .ok(); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTransceiverWithTrack( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + jlong native_track, + const JavaParamRef<jobject>& j_init) { + RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result = + ExtractNativePC(jni, j_pc)->AddTransceiver( + rtc::scoped_refptr<MediaStreamTrackInterface>( + reinterpret_cast<MediaStreamTrackInterface*>(native_track)), + JavaToNativeRtpTransceiverInit(jni, j_init)); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "Failed to add transceiver: " + << result.error().message(); + return nullptr; + } else { + return NativeToJavaRtpTransceiver(jni, result.MoveValue()); + } +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTransceiverOfType( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_media_type, + const JavaParamRef<jobject>& j_init) { + RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result = + ExtractNativePC(jni, j_pc)->AddTransceiver( + JavaToNativeMediaType(jni, j_media_type), + JavaToNativeRtpTransceiverInit(jni, j_init)); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "Failed to add transceiver: " + << result.error().message(); + return nullptr; + } else { + return NativeToJavaRtpTransceiver(jni, result.MoveValue()); + } +} + +static jboolean JNI_PeerConnection_OldGetStats( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_observer, + jlong native_track) { + auto observer = rtc::make_ref_counted<StatsObserverJni>(jni, j_observer); + return ExtractNativePC(jni, j_pc)->GetStats( + observer.get(), + reinterpret_cast<MediaStreamTrackInterface*>(native_track), + PeerConnectionInterface::kStatsOutputLevelStandard); +} + +static void JNI_PeerConnection_NewGetStats( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_callback) { + auto callback = + rtc::make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback); + ExtractNativePC(jni, j_pc)->GetStats(callback.get()); +} + +static jboolean JNI_PeerConnection_SetBitrate( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + const JavaParamRef<jobject>& j_min, + const JavaParamRef<jobject>& j_current, + const JavaParamRef<jobject>& j_max) { + BitrateSettings params; + params.min_bitrate_bps = JavaToNativeOptionalInt(jni, j_min); + params.start_bitrate_bps = JavaToNativeOptionalInt(jni, j_current); + params.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max); + return ExtractNativePC(jni, j_pc)->SetBitrate(params).ok(); +} + +static jboolean JNI_PeerConnection_StartRtcEventLog( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc, + int file_descriptor, + int max_size_bytes) { + // TODO(eladalon): It would be better to not allow negative values into PC. + const size_t max_size = (max_size_bytes < 0) + ? RtcEventLog::kUnlimitedOutput + : rtc::saturated_cast<size_t>(max_size_bytes); + FILE* f = fdopen(file_descriptor, "wb"); + if (!f) { + close(file_descriptor); + return false; + } + return ExtractNativePC(jni, j_pc)->StartRtcEventLog( + std::make_unique<RtcEventLogOutputFile>(f, max_size)); +} + +static void JNI_PeerConnection_StopRtcEventLog( + JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + ExtractNativePC(jni, j_pc)->StopRtcEventLog(); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_SignalingState( + JNIEnv* env, + const JavaParamRef<jobject>& j_pc) { + return Java_SignalingState_fromNativeIndex( + env, ExtractNativePC(env, j_pc)->signaling_state()); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_IceConnectionState( + JNIEnv* env, + const JavaParamRef<jobject>& j_pc) { + return Java_IceConnectionState_fromNativeIndex( + env, ExtractNativePC(env, j_pc)->ice_connection_state()); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_ConnectionState( + JNIEnv* env, + const JavaParamRef<jobject>& j_pc) { + return Java_PeerConnectionState_fromNativeIndex( + env, + static_cast<int>(ExtractNativePC(env, j_pc)->peer_connection_state())); +} + +static ScopedJavaLocalRef<jobject> JNI_PeerConnection_IceGatheringState( + JNIEnv* env, + const JavaParamRef<jobject>& j_pc) { + return Java_IceGatheringState_fromNativeIndex( + env, ExtractNativePC(env, j_pc)->ice_gathering_state()); +} + +static void JNI_PeerConnection_Close(JNIEnv* jni, + const JavaParamRef<jobject>& j_pc) { + ExtractNativePC(jni, j_pc)->Close(); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h new file mode 100644 index 0000000000..9976e8e4f5 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h @@ -0,0 +1,141 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_ +#define SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_ + +#include <map> +#include <memory> +#include <vector> + +#include "api/peer_connection_interface.h" +#include "pc/media_stream_observer.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/media_constraints.h" +#include "sdk/android/src/jni/pc/media_stream.h" +#include "sdk/android/src/jni/pc/rtp_receiver.h" +#include "sdk/android/src/jni/pc/rtp_transceiver.h" + +namespace webrtc { +namespace jni { + +void JavaToNativeRTCConfiguration( + JNIEnv* jni, + const JavaRef<jobject>& j_rtc_config, + PeerConnectionInterface::RTCConfiguration* rtc_config); + +rtc::KeyType GetRtcConfigKeyType(JNIEnv* env, + const JavaRef<jobject>& j_rtc_config); + +ScopedJavaLocalRef<jobject> NativeToJavaAdapterType(JNIEnv* env, + int adapterType); + +// Adapter between the C++ PeerConnectionObserver interface and the Java +// PeerConnection.Observer interface. Wraps an instance of the Java interface +// and dispatches C++ callbacks to Java. +class PeerConnectionObserverJni : public PeerConnectionObserver { + public: + PeerConnectionObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer); + ~PeerConnectionObserverJni() override; + + // Implementation of PeerConnectionObserver interface, which propagates + // the callbacks to the Java observer. + void OnIceCandidate(const IceCandidateInterface* candidate) override; + void OnIceCandidateError(const std::string& address, + int port, + const std::string& url, + int error_code, + const std::string& error_text) override; + + void OnIceCandidatesRemoved( + const std::vector<cricket::Candidate>& candidates) override; + void OnSignalingChange( + PeerConnectionInterface::SignalingState new_state) override; + void OnIceConnectionChange( + PeerConnectionInterface::IceConnectionState new_state) override; + void OnStandardizedIceConnectionChange( + PeerConnectionInterface::IceConnectionState new_state) override; + void OnConnectionChange( + PeerConnectionInterface::PeerConnectionState new_state) override; + void OnIceConnectionReceivingChange(bool receiving) override; + void OnIceGatheringChange( + PeerConnectionInterface::IceGatheringState new_state) override; + void OnIceSelectedCandidatePairChanged( + const cricket::CandidatePairChangeEvent& event) override; + void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override; + void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override; + void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> channel) override; + void OnRenegotiationNeeded() override; + void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver, + const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& + streams) override; + void OnTrack( + rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override; + void OnRemoveTrack( + rtc::scoped_refptr<RtpReceiverInterface> receiver) override; + + private: + typedef std::map<MediaStreamInterface*, JavaMediaStream> + NativeToJavaStreamsMap; + typedef std::map<MediaStreamTrackInterface*, RtpReceiverInterface*> + NativeMediaStreamTrackToNativeRtpReceiver; + + // If the NativeToJavaStreamsMap contains the stream, return it. + // Otherwise, create a new Java MediaStream. Returns a global jobject. + JavaMediaStream& GetOrCreateJavaStream( + JNIEnv* env, + const rtc::scoped_refptr<MediaStreamInterface>& stream); + + // Converts array of streams, creating or re-using Java streams as necessary. + ScopedJavaLocalRef<jobjectArray> NativeToJavaMediaStreamArray( + JNIEnv* jni, + const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams); + + const ScopedJavaGlobalRef<jobject> j_observer_global_; + + // C++ -> Java remote streams. + NativeToJavaStreamsMap remote_streams_; + std::vector<JavaRtpReceiverGlobalOwner> rtp_receivers_; + // Holds a reference to the Java transceivers given to the AddTrack + // callback, so that the shared ownership by the Java object will be + // properly disposed. + std::vector<JavaRtpTransceiverGlobalOwner> rtp_transceivers_; +}; + +// PeerConnection doesn't take ownership of the observer. In Java API, we don't +// want the client to have to manually dispose the observer. To solve this, this +// wrapper class is used for object ownership. +// +// Also stores reference to the deprecated PeerConnection constraints for now. +class OwnedPeerConnection { + public: + OwnedPeerConnection( + rtc::scoped_refptr<PeerConnectionInterface> peer_connection, + std::unique_ptr<PeerConnectionObserver> observer); + // Deprecated. PC constraints are deprecated. + OwnedPeerConnection( + rtc::scoped_refptr<PeerConnectionInterface> peer_connection, + std::unique_ptr<PeerConnectionObserver> observer, + std::unique_ptr<MediaConstraints> constraints); + ~OwnedPeerConnection(); + + PeerConnectionInterface* pc() const { return peer_connection_.get(); } + const MediaConstraints* constraints() const { return constraints_.get(); } + + private: + rtc::scoped_refptr<PeerConnectionInterface> peer_connection_; + std::unique_ptr<PeerConnectionObserver> observer_; + std::unique_ptr<MediaConstraints> constraints_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc new file mode 100644 index 0000000000..fafcad3caf --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -0,0 +1,550 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/peer_connection_factory.h" + +#include <memory> +#include <utility> + +#include "absl/memory/memory.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "media/base/media_engine.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/utility/include/jvm_android.h" +// We don't depend on the audio processing module implementation. +// The user may pass in a nullptr. +#include "api/call/call_factory_interface.h" +#include "api/rtc_event_log/rtc_event_log_factory.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "media/engine/webrtc_media_engine.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/event_tracer.h" +#include "rtc_base/physical_socket_server.h" +#include "rtc_base/thread.h" +#include "sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/native_api/stacktrace/stacktrace.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/logging/log_sink.h" +#include "sdk/android/src/jni/pc/android_network_monitor.h" +#include "sdk/android/src/jni/pc/audio.h" +#include "sdk/android/src/jni/pc/ice_candidate.h" +#include "sdk/android/src/jni/pc/owned_factory_and_threads.h" +#include "sdk/android/src/jni/pc/peer_connection.h" +#include "sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h" +#include "sdk/android/src/jni/pc/video.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { +namespace jni { + +namespace { + +// Take ownership of the jlong reference and cast it into an rtc::scoped_refptr. +template <typename T> +rtc::scoped_refptr<T> TakeOwnershipOfRefPtr(jlong j_pointer) { + T* ptr = reinterpret_cast<T*>(j_pointer); + rtc::scoped_refptr<T> refptr; + refptr.swap(&ptr); + return refptr; +} + +// Take ownership of the jlong reference and cast it into a std::unique_ptr. +template <typename T> +std::unique_ptr<T> TakeOwnershipOfUniquePtr(jlong native_pointer) { + return std::unique_ptr<T>(reinterpret_cast<T*>(native_pointer)); +} + +typedef void (*JavaMethodPointer)(JNIEnv*, const JavaRef<jobject>&); + +// Post a message on the given thread that will call the Java method on the +// given Java object. +void PostJavaCallback(JNIEnv* env, + rtc::Thread* queue, + const rtc::Location& posted_from, + const JavaRef<jobject>& j_object, + JavaMethodPointer java_method_pointer) { + // One-off message handler that calls the Java method on the specified Java + // object before deleting itself. + class JavaAsyncCallback : public rtc::MessageHandler { + public: + JavaAsyncCallback(JNIEnv* env, + const JavaRef<jobject>& j_object, + JavaMethodPointer java_method_pointer) + : j_object_(env, j_object), java_method_pointer_(java_method_pointer) {} + + void OnMessage(rtc::Message*) override { + java_method_pointer_(AttachCurrentThreadIfNeeded(), j_object_); + // The message has been delivered, clean up after ourself. + delete this; + } + + private: + ScopedJavaGlobalRef<jobject> j_object_; + JavaMethodPointer java_method_pointer_; + }; + + queue->Post(posted_from, + new JavaAsyncCallback(env, j_object, java_method_pointer)); +} + +absl::optional<PeerConnectionFactoryInterface::Options> +JavaToNativePeerConnectionFactoryOptions(JNIEnv* jni, + const JavaRef<jobject>& j_options) { + if (j_options.is_null()) + return absl::nullopt; + + PeerConnectionFactoryInterface::Options native_options; + + // This doesn't necessarily match the c++ version of this struct; feel free + // to add more parameters as necessary. + native_options.network_ignore_mask = + Java_Options_getNetworkIgnoreMask(jni, j_options); + native_options.disable_encryption = + Java_Options_getDisableEncryption(jni, j_options); + native_options.disable_network_monitor = + Java_Options_getDisableNetworkMonitor(jni, j_options); + + return native_options; +} + +// Place static objects into a container that gets leaked so we avoid +// non-trivial destructor. +struct StaticObjectContainer { + // Field trials initialization string + std::unique_ptr<std::string> field_trials_init_string; + // Set in PeerConnectionFactory_InjectLoggable(). + std::unique_ptr<JNILogSink> jni_log_sink; +}; + +StaticObjectContainer& GetStaticObjects() { + static StaticObjectContainer* static_objects = new StaticObjectContainer(); + return *static_objects; +} + +ScopedJavaLocalRef<jobject> NativeToScopedJavaPeerConnectionFactory( + JNIEnv* env, + rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf, + std::unique_ptr<rtc::SocketFactory> socket_factory, + std::unique_ptr<rtc::Thread> network_thread, + std::unique_ptr<rtc::Thread> worker_thread, + std::unique_ptr<rtc::Thread> signaling_thread) { + OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads( + std::move(socket_factory), std::move(network_thread), + std::move(worker_thread), std::move(signaling_thread), pcf); + + ScopedJavaLocalRef<jobject> j_pcf = Java_PeerConnectionFactory_Constructor( + env, NativeToJavaPointer(owned_factory)); + + PostJavaCallback(env, owned_factory->network_thread(), RTC_FROM_HERE, j_pcf, + &Java_PeerConnectionFactory_onNetworkThreadReady); + PostJavaCallback(env, owned_factory->worker_thread(), RTC_FROM_HERE, j_pcf, + &Java_PeerConnectionFactory_onWorkerThreadReady); + PostJavaCallback(env, owned_factory->signaling_thread(), RTC_FROM_HERE, j_pcf, + &Java_PeerConnectionFactory_onSignalingThreadReady); + + return j_pcf; +} + +PeerConnectionFactoryInterface* PeerConnectionFactoryFromJava(jlong j_p) { + return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory(); +} + +} // namespace + +// Note: Some of the video-specific PeerConnectionFactory methods are +// implemented in "video.cc". This is done so that if an application +// doesn't need video support, it can just link with "null_video.cc" +// instead of "video.cc", which doesn't bring in the video-specific +// dependencies. + +// Set in PeerConnectionFactory_initializeAndroidGlobals(). +static bool factory_static_initialized = false; + +jobject NativeToJavaPeerConnectionFactory( + JNIEnv* jni, + rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf, + std::unique_ptr<rtc::SocketFactory> socket_factory, + std::unique_ptr<rtc::Thread> network_thread, + std::unique_ptr<rtc::Thread> worker_thread, + std::unique_ptr<rtc::Thread> signaling_thread) { + return NativeToScopedJavaPeerConnectionFactory( + jni, pcf, std::move(socket_factory), std::move(network_thread), + std::move(worker_thread), std::move(signaling_thread)) + .Release(); +} + +static void JNI_PeerConnectionFactory_InitializeAndroidGlobals(JNIEnv* jni) { + if (!factory_static_initialized) { + JVM::Initialize(GetJVM()); + factory_static_initialized = true; + } +} + +static void JNI_PeerConnectionFactory_InitializeFieldTrials( + JNIEnv* jni, + const JavaParamRef<jstring>& j_trials_init_string) { + std::unique_ptr<std::string>& field_trials_init_string = + GetStaticObjects().field_trials_init_string; + + if (j_trials_init_string.is_null()) { + field_trials_init_string = nullptr; + field_trial::InitFieldTrialsFromString(nullptr); + return; + } + field_trials_init_string = std::make_unique<std::string>( + JavaToNativeString(jni, j_trials_init_string)); + RTC_LOG(LS_INFO) << "initializeFieldTrials: " << *field_trials_init_string; + field_trial::InitFieldTrialsFromString(field_trials_init_string->c_str()); +} + +static void JNI_PeerConnectionFactory_InitializeInternalTracer(JNIEnv* jni) { + rtc::tracing::SetupInternalTracer(); +} + +static ScopedJavaLocalRef<jstring> +JNI_PeerConnectionFactory_FindFieldTrialsFullName( + JNIEnv* jni, + const JavaParamRef<jstring>& j_name) { + return NativeToJavaString( + jni, field_trial::FindFullName(JavaToStdString(jni, j_name))); +} + +static jboolean JNI_PeerConnectionFactory_StartInternalTracingCapture( + JNIEnv* jni, + const JavaParamRef<jstring>& j_event_tracing_filename) { + if (j_event_tracing_filename.is_null()) + return false; + + const char* init_string = + jni->GetStringUTFChars(j_event_tracing_filename.obj(), NULL); + RTC_LOG(LS_INFO) << "Starting internal tracing to: " << init_string; + bool ret = rtc::tracing::StartInternalCapture(init_string); + jni->ReleaseStringUTFChars(j_event_tracing_filename.obj(), init_string); + return ret; +} + +static void JNI_PeerConnectionFactory_StopInternalTracingCapture(JNIEnv* jni) { + rtc::tracing::StopInternalCapture(); +} + +static void JNI_PeerConnectionFactory_ShutdownInternalTracer(JNIEnv* jni) { + rtc::tracing::ShutdownInternalTracer(); +} + +// Following parameters are optional: +// `audio_device_module`, `jencoder_factory`, `jdecoder_factory`, +// `audio_processor`, `fec_controller_factory`, +// `network_state_predictor_factory`, `neteq_factory`. +ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava( + JNIEnv* jni, + const JavaParamRef<jobject>& jcontext, + const JavaParamRef<jobject>& joptions, + rtc::scoped_refptr<AudioDeviceModule> audio_device_module, + rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory, + rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory, + const JavaParamRef<jobject>& jencoder_factory, + const JavaParamRef<jobject>& jdecoder_factory, + rtc::scoped_refptr<AudioProcessing> audio_processor, + std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory, + std::unique_ptr<NetworkControllerFactoryInterface> + network_controller_factory, + std::unique_ptr<NetworkStatePredictorFactoryInterface> + network_state_predictor_factory, + std::unique_ptr<NetEqFactory> neteq_factory) { + // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but + // ThreadManager only WrapCurrentThread()s the thread where it is first + // created. Since the semantics around when auto-wrapping happens in + // webrtc/rtc_base/ are convoluted, we simply wrap here to avoid having to + // think about ramifications of auto-wrapping there. + rtc::ThreadManager::Instance()->WrapCurrentThread(); + + auto socket_server = std::make_unique<rtc::PhysicalSocketServer>(); + auto network_thread = std::make_unique<rtc::Thread>(socket_server.get()); + network_thread->SetName("network_thread", nullptr); + RTC_CHECK(network_thread->Start()) << "Failed to start thread"; + + std::unique_ptr<rtc::Thread> worker_thread = rtc::Thread::Create(); + worker_thread->SetName("worker_thread", nullptr); + RTC_CHECK(worker_thread->Start()) << "Failed to start thread"; + + std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create(); + signaling_thread->SetName("signaling_thread", NULL); + RTC_CHECK(signaling_thread->Start()) << "Failed to start thread"; + + const absl::optional<PeerConnectionFactoryInterface::Options> options = + JavaToNativePeerConnectionFactoryOptions(jni, joptions); + + PeerConnectionFactoryDependencies dependencies; + // TODO(bugs.webrtc.org/13145): Also add socket_server.get() to the + // dependencies. + dependencies.network_thread = network_thread.get(); + dependencies.worker_thread = worker_thread.get(); + dependencies.signaling_thread = signaling_thread.get(); + dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); + dependencies.call_factory = CreateCallFactory(); + dependencies.event_log_factory = std::make_unique<RtcEventLogFactory>( + dependencies.task_queue_factory.get()); + dependencies.fec_controller_factory = std::move(fec_controller_factory); + dependencies.network_controller_factory = + std::move(network_controller_factory); + dependencies.network_state_predictor_factory = + std::move(network_state_predictor_factory); + dependencies.neteq_factory = std::move(neteq_factory); + if (!(options && options->disable_network_monitor)) { + dependencies.network_monitor_factory = + std::make_unique<AndroidNetworkMonitorFactory>(); + } + + cricket::MediaEngineDependencies media_dependencies; + media_dependencies.task_queue_factory = dependencies.task_queue_factory.get(); + media_dependencies.adm = std::move(audio_device_module); + media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory); + media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory); + media_dependencies.audio_processing = std::move(audio_processor); + media_dependencies.video_encoder_factory = + absl::WrapUnique(CreateVideoEncoderFactory(jni, jencoder_factory)); + media_dependencies.video_decoder_factory = + absl::WrapUnique(CreateVideoDecoderFactory(jni, jdecoder_factory)); + dependencies.media_engine = + cricket::CreateMediaEngine(std::move(media_dependencies)); + + rtc::scoped_refptr<PeerConnectionFactoryInterface> factory = + CreateModularPeerConnectionFactory(std::move(dependencies)); + + RTC_CHECK(factory) << "Failed to create the peer connection factory; " + "WebRTC/libjingle init likely failed on this device"; + // TODO(honghaiz): Maybe put the options as the argument of + // CreatePeerConnectionFactory. + if (options) + factory->SetOptions(*options); + + return NativeToScopedJavaPeerConnectionFactory( + jni, factory, std::move(socket_server), std::move(network_thread), + std::move(worker_thread), std::move(signaling_thread)); +} + +static ScopedJavaLocalRef<jobject> +JNI_PeerConnectionFactory_CreatePeerConnectionFactory( + JNIEnv* jni, + const JavaParamRef<jobject>& jcontext, + const JavaParamRef<jobject>& joptions, + jlong native_audio_device_module, + jlong native_audio_encoder_factory, + jlong native_audio_decoder_factory, + const JavaParamRef<jobject>& jencoder_factory, + const JavaParamRef<jobject>& jdecoder_factory, + jlong native_audio_processor, + jlong native_fec_controller_factory, + jlong native_network_controller_factory, + jlong native_network_state_predictor_factory, + jlong native_neteq_factory) { + rtc::scoped_refptr<AudioProcessing> audio_processor( + reinterpret_cast<AudioProcessing*>(native_audio_processor)); + return CreatePeerConnectionFactoryForJava( + jni, jcontext, joptions, + rtc::scoped_refptr<AudioDeviceModule>( + reinterpret_cast<AudioDeviceModule*>(native_audio_device_module)), + TakeOwnershipOfRefPtr<AudioEncoderFactory>(native_audio_encoder_factory), + TakeOwnershipOfRefPtr<AudioDecoderFactory>(native_audio_decoder_factory), + jencoder_factory, jdecoder_factory, + audio_processor ? audio_processor : CreateAudioProcessing(), + TakeOwnershipOfUniquePtr<FecControllerFactoryInterface>( + native_fec_controller_factory), + TakeOwnershipOfUniquePtr<NetworkControllerFactoryInterface>( + native_network_controller_factory), + TakeOwnershipOfUniquePtr<NetworkStatePredictorFactoryInterface>( + native_network_state_predictor_factory), + TakeOwnershipOfUniquePtr<NetEqFactory>(native_neteq_factory)); +} + +static void JNI_PeerConnectionFactory_FreeFactory(JNIEnv*, + jlong j_p) { + delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p); + field_trial::InitFieldTrialsFromString(nullptr); + GetStaticObjects().field_trials_init_string = nullptr; +} + +static jlong JNI_PeerConnectionFactory_CreateLocalMediaStream( + JNIEnv* jni, + jlong native_factory, + const JavaParamRef<jstring>& label) { + rtc::scoped_refptr<MediaStreamInterface> stream( + PeerConnectionFactoryFromJava(native_factory) + ->CreateLocalMediaStream(JavaToStdString(jni, label))); + return jlongFromPointer(stream.release()); +} + +static jlong JNI_PeerConnectionFactory_CreateAudioSource( + JNIEnv* jni, + jlong native_factory, + const JavaParamRef<jobject>& j_constraints) { + std::unique_ptr<MediaConstraints> constraints = + JavaToNativeMediaConstraints(jni, j_constraints); + cricket::AudioOptions options; + CopyConstraintsIntoAudioOptions(constraints.get(), &options); + rtc::scoped_refptr<AudioSourceInterface> source( + PeerConnectionFactoryFromJava(native_factory) + ->CreateAudioSource(options)); + return jlongFromPointer(source.release()); +} + +jlong JNI_PeerConnectionFactory_CreateAudioTrack( + JNIEnv* jni, + jlong native_factory, + const JavaParamRef<jstring>& id, + jlong native_source) { + rtc::scoped_refptr<AudioTrackInterface> track( + PeerConnectionFactoryFromJava(native_factory) + ->CreateAudioTrack( + JavaToStdString(jni, id), + reinterpret_cast<AudioSourceInterface*>(native_source))); + return jlongFromPointer(track.release()); +} + +static jboolean JNI_PeerConnectionFactory_StartAecDump( + JNIEnv* jni, + jlong native_factory, + jint file_descriptor, + jint filesize_limit_bytes) { + FILE* f = fdopen(file_descriptor, "wb"); + if (!f) { + close(file_descriptor); + return false; + } + + return PeerConnectionFactoryFromJava(native_factory) + ->StartAecDump(f, filesize_limit_bytes); +} + +static void JNI_PeerConnectionFactory_StopAecDump(JNIEnv* jni, + jlong native_factory) { + PeerConnectionFactoryFromJava(native_factory)->StopAecDump(); +} + +static jlong JNI_PeerConnectionFactory_CreatePeerConnection( + JNIEnv* jni, + jlong factory, + const JavaParamRef<jobject>& j_rtc_config, + const JavaParamRef<jobject>& j_constraints, + jlong observer_p, + const JavaParamRef<jobject>& j_sslCertificateVerifier) { + std::unique_ptr<PeerConnectionObserver> observer( + reinterpret_cast<PeerConnectionObserver*>(observer_p)); + + PeerConnectionInterface::RTCConfiguration rtc_config( + PeerConnectionInterface::RTCConfigurationType::kAggressive); + JavaToNativeRTCConfiguration(jni, j_rtc_config, &rtc_config); + + if (rtc_config.certificates.empty()) { + // Generate non-default certificate. + rtc::KeyType key_type = GetRtcConfigKeyType(jni, j_rtc_config); + if (key_type != rtc::KT_DEFAULT) { + rtc::scoped_refptr<rtc::RTCCertificate> certificate = + rtc::RTCCertificateGenerator::GenerateCertificate( + rtc::KeyParams(key_type), absl::nullopt); + if (!certificate) { + RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: " + << key_type; + return 0; + } + rtc_config.certificates.push_back(certificate); + } + } + + std::unique_ptr<MediaConstraints> constraints; + if (!j_constraints.is_null()) { + constraints = JavaToNativeMediaConstraints(jni, j_constraints); + CopyConstraintsIntoRtcConfiguration(constraints.get(), &rtc_config); + } + + PeerConnectionDependencies peer_connection_dependencies(observer.get()); + if (!j_sslCertificateVerifier.is_null()) { + peer_connection_dependencies.tls_cert_verifier = + std::make_unique<SSLCertificateVerifierWrapper>( + jni, j_sslCertificateVerifier); + } + + auto result = + PeerConnectionFactoryFromJava(factory)->CreatePeerConnectionOrError( + rtc_config, std::move(peer_connection_dependencies)); + if (!result.ok()) + return 0; + + return jlongFromPointer(new OwnedPeerConnection( + result.MoveValue(), std::move(observer), std::move(constraints))); +} + +static jlong JNI_PeerConnectionFactory_CreateVideoSource( + JNIEnv* jni, + jlong native_factory, + jboolean is_screencast, + jboolean align_timestamps) { + OwnedFactoryAndThreads* factory = + reinterpret_cast<OwnedFactoryAndThreads*>(native_factory); + return jlongFromPointer(CreateVideoSource(jni, factory->signaling_thread(), + factory->worker_thread(), + is_screencast, align_timestamps)); +} + +static jlong JNI_PeerConnectionFactory_CreateVideoTrack( + JNIEnv* jni, + jlong native_factory, + const JavaParamRef<jstring>& id, + jlong native_source) { + rtc::scoped_refptr<VideoTrackInterface> track = + PeerConnectionFactoryFromJava(native_factory) + ->CreateVideoTrack( + JavaToStdString(jni, id), + reinterpret_cast<VideoTrackSourceInterface*>(native_source)); + return jlongFromPointer(track.release()); +} + +static jlong JNI_PeerConnectionFactory_GetNativePeerConnectionFactory( + JNIEnv* jni, + jlong native_factory) { + return jlongFromPointer(PeerConnectionFactoryFromJava(native_factory)); +} + +static void JNI_PeerConnectionFactory_InjectLoggable( + JNIEnv* jni, + const JavaParamRef<jobject>& j_logging, + jint nativeSeverity) { + std::unique_ptr<JNILogSink>& jni_log_sink = GetStaticObjects().jni_log_sink; + + // If there is already a LogSink, remove it from LogMessage. + if (jni_log_sink) { + rtc::LogMessage::RemoveLogToStream(jni_log_sink.get()); + } + jni_log_sink = std::make_unique<JNILogSink>(jni, j_logging); + rtc::LogMessage::AddLogToStream( + jni_log_sink.get(), static_cast<rtc::LoggingSeverity>(nativeSeverity)); + rtc::LogMessage::LogToDebug(rtc::LS_NONE); +} + +static void JNI_PeerConnectionFactory_DeleteLoggable(JNIEnv* jni) { + std::unique_ptr<JNILogSink>& jni_log_sink = GetStaticObjects().jni_log_sink; + + if (jni_log_sink) { + rtc::LogMessage::RemoveLogToStream(jni_log_sink.get()); + jni_log_sink.reset(); + } +} + +static void JNI_PeerConnectionFactory_PrintStackTrace(JNIEnv* env, jint tid) { + RTC_LOG(LS_WARNING) << StackTraceToString(GetStackTrace(tid)); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h new file mode 100644 index 0000000000..b5d5e5dcb7 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h @@ -0,0 +1,33 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_ +#define SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_ + +#include <jni.h> +#include "api/peer_connection_interface.h" +#include "rtc_base/thread.h" + +namespace webrtc { +namespace jni { + +// Creates java PeerConnectionFactory with specified `pcf`. +jobject NativeToJavaPeerConnectionFactory( + JNIEnv* jni, + rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf, + std::unique_ptr<rtc::SocketFactory> socket_factory, + std::unique_ptr<rtc::Thread> network_thread, + std::unique_ptr<rtc::Thread> worker_thread, + std::unique_ptr<rtc::Thread> signaling_thread); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc new file mode 100644 index 0000000000..f305324ac8 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc @@ -0,0 +1,59 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/rtc_certificate.h" +#include "sdk/android/src/jni/pc/ice_candidate.h" + +#include "rtc_base/ref_count.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "sdk/android/generated_peerconnection_jni/RtcCertificatePem_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +rtc::RTCCertificatePEM JavaToNativeRTCCertificatePEM( + JNIEnv* jni, + const JavaRef<jobject>& j_rtc_certificate) { + ScopedJavaLocalRef<jstring> privatekey_field = + Java_RtcCertificatePem_getPrivateKey(jni, j_rtc_certificate); + ScopedJavaLocalRef<jstring> certificate_field = + Java_RtcCertificatePem_getCertificate(jni, j_rtc_certificate); + return rtc::RTCCertificatePEM(JavaToNativeString(jni, privatekey_field), + JavaToNativeString(jni, certificate_field)); +} + +ScopedJavaLocalRef<jobject> NativeToJavaRTCCertificatePEM( + JNIEnv* jni, + const rtc::RTCCertificatePEM& certificate) { + return Java_RtcCertificatePem_Constructor( + jni, NativeToJavaString(jni, certificate.private_key()), + NativeToJavaString(jni, certificate.certificate())); +} + +static ScopedJavaLocalRef<jobject> JNI_RtcCertificatePem_GenerateCertificate( + JNIEnv* jni, + const JavaParamRef<jobject>& j_key_type, + jlong j_expires) { + rtc::KeyType key_type = JavaToNativeKeyType(jni, j_key_type); + uint64_t expires = (uint64_t)j_expires; + rtc::scoped_refptr<rtc::RTCCertificate> certificate = + rtc::RTCCertificateGenerator::GenerateCertificate( + rtc::KeyParams(key_type), expires); + rtc::RTCCertificatePEM pem = certificate->ToPEM(); + return Java_RtcCertificatePem_Constructor( + jni, NativeToJavaString(jni, pem.private_key()), + NativeToJavaString(jni, pem.certificate())); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h new file mode 100644 index 0000000000..91a413cd37 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h @@ -0,0 +1,33 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_ +#define SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_ + +#include "rtc_base/ref_count.h" +#include "rtc_base/rtc_certificate.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +rtc::RTCCertificatePEM JavaToNativeRTCCertificatePEM( + JNIEnv* jni, + const JavaRef<jobject>& j_rtc_certificate); + +ScopedJavaLocalRef<jobject> NativeToJavaRTCCertificatePEM( + JNIEnv* env, + const rtc::RTCCertificatePEM& certificate); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc new file mode 100644 index 0000000000..b8eae739f9 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc @@ -0,0 +1,161 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h" + +#include <string> +#include <vector> + +#include "rtc_base/string_encode.h" +#include "sdk/android/generated_external_classes_jni/BigInteger_jni.h" +#include "sdk/android/generated_peerconnection_jni/RTCStatsCollectorCallback_jni.h" +#include "sdk/android/generated_peerconnection_jni/RTCStatsReport_jni.h" +#include "sdk/android/generated_peerconnection_jni/RTCStats_jni.h" +#include "sdk/android/native_api/jni/java_types.h" + +namespace webrtc { +namespace jni { + +namespace { + +ScopedJavaLocalRef<jobject> NativeToJavaBigInteger(JNIEnv* env, uint64_t u) { + return JNI_BigInteger::Java_BigInteger_ConstructorJMBI_JLS( + env, NativeToJavaString(env, rtc::ToString(u))); +} + +ScopedJavaLocalRef<jobjectArray> NativeToJavaBigIntegerArray( + JNIEnv* env, + const std::vector<uint64_t>& container) { + return NativeToJavaObjectArray( + env, container, java_math_BigInteger_clazz(env), &NativeToJavaBigInteger); +} + +ScopedJavaLocalRef<jobject> MemberToJava( + JNIEnv* env, + const RTCStatsMemberInterface& member) { + switch (member.type()) { + case RTCStatsMemberInterface::kBool: + return NativeToJavaBoolean(env, *member.cast_to<RTCStatsMember<bool>>()); + + case RTCStatsMemberInterface::kInt32: + return NativeToJavaInteger(env, + *member.cast_to<RTCStatsMember<int32_t>>()); + + case RTCStatsMemberInterface::kUint32: + return NativeToJavaLong(env, *member.cast_to<RTCStatsMember<uint32_t>>()); + + case RTCStatsMemberInterface::kInt64: + return NativeToJavaLong(env, *member.cast_to<RTCStatsMember<int64_t>>()); + + case RTCStatsMemberInterface::kUint64: + return NativeToJavaBigInteger( + env, *member.cast_to<RTCStatsMember<uint64_t>>()); + + case RTCStatsMemberInterface::kDouble: + return NativeToJavaDouble(env, *member.cast_to<RTCStatsMember<double>>()); + + case RTCStatsMemberInterface::kString: + return NativeToJavaString(env, + *member.cast_to<RTCStatsMember<std::string>>()); + + case RTCStatsMemberInterface::kSequenceBool: + return NativeToJavaBooleanArray( + env, *member.cast_to<RTCStatsMember<std::vector<bool>>>()); + + case RTCStatsMemberInterface::kSequenceInt32: + return NativeToJavaIntegerArray( + env, *member.cast_to<RTCStatsMember<std::vector<int32_t>>>()); + + case RTCStatsMemberInterface::kSequenceUint32: { + const std::vector<uint32_t>& v = + *member.cast_to<RTCStatsMember<std::vector<uint32_t>>>(); + return NativeToJavaLongArray(env, + std::vector<int64_t>(v.begin(), v.end())); + } + case RTCStatsMemberInterface::kSequenceInt64: + return NativeToJavaLongArray( + env, *member.cast_to<RTCStatsMember<std::vector<int64_t>>>()); + + case RTCStatsMemberInterface::kSequenceUint64: + return NativeToJavaBigIntegerArray( + env, *member.cast_to<RTCStatsMember<std::vector<uint64_t>>>()); + + case RTCStatsMemberInterface::kSequenceDouble: + return NativeToJavaDoubleArray( + env, *member.cast_to<RTCStatsMember<std::vector<double>>>()); + + case RTCStatsMemberInterface::kSequenceString: + return NativeToJavaStringArray( + env, *member.cast_to<RTCStatsMember<std::vector<std::string>>>()); + + case RTCStatsMemberInterface::kMapStringUint64: + return NativeToJavaMap( + env, + *member.cast_to<RTCStatsMember<std::map<std::string, uint64_t>>>(), + [](JNIEnv* env, const auto& entry) { + return std::make_pair(NativeToJavaString(env, entry.first), + NativeToJavaBigInteger(env, entry.second)); + }); + + case RTCStatsMemberInterface::kMapStringDouble: + return NativeToJavaMap( + env, *member.cast_to<RTCStatsMember<std::map<std::string, double>>>(), + [](JNIEnv* env, const auto& entry) { + return std::make_pair(NativeToJavaString(env, entry.first), + NativeToJavaDouble(env, entry.second)); + }); + } + RTC_DCHECK_NOTREACHED(); + return nullptr; +} + +ScopedJavaLocalRef<jobject> NativeToJavaRtcStats(JNIEnv* env, + const RTCStats& stats) { + JavaMapBuilder builder(env); + for (auto* const member : stats.Members()) { + if (!member->is_defined()) + continue; + builder.put(NativeToJavaString(env, member->name()), + MemberToJava(env, *member)); + } + return Java_RTCStats_create( + env, stats.timestamp_us(), NativeToJavaString(env, stats.type()), + NativeToJavaString(env, stats.id()), builder.GetJavaMap()); +} + +ScopedJavaLocalRef<jobject> NativeToJavaRtcStatsReport( + JNIEnv* env, + const rtc::scoped_refptr<const RTCStatsReport>& report) { + ScopedJavaLocalRef<jobject> j_stats_map = + NativeToJavaMap(env, *report, [](JNIEnv* env, const RTCStats& stats) { + return std::make_pair(NativeToJavaString(env, stats.id()), + NativeToJavaRtcStats(env, stats)); + }); + return Java_RTCStatsReport_create(env, report->timestamp_us(), j_stats_map); +} + +} // namespace + +RTCStatsCollectorCallbackWrapper::RTCStatsCollectorCallbackWrapper( + JNIEnv* jni, + const JavaRef<jobject>& j_callback) + : j_callback_global_(jni, j_callback) {} + +RTCStatsCollectorCallbackWrapper::~RTCStatsCollectorCallbackWrapper() = default; + +void RTCStatsCollectorCallbackWrapper::OnStatsDelivered( + const rtc::scoped_refptr<const RTCStatsReport>& report) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + Java_RTCStatsCollectorCallback_onStatsDelivered( + jni, j_callback_global_, NativeToJavaRtcStatsReport(jni, report)); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h new file mode 100644 index 0000000000..50fad1844d --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h @@ -0,0 +1,41 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_ +#define SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_ + +#include <jni.h> + +#include "api/peer_connection_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// Adapter for a Java RTCStatsCollectorCallback presenting a C++ +// RTCStatsCollectorCallback and dispatching the callback from C++ back to +// Java. +class RTCStatsCollectorCallbackWrapper : public RTCStatsCollectorCallback { + public: + RTCStatsCollectorCallbackWrapper(JNIEnv* jni, + const JavaRef<jobject>& j_callback); + ~RTCStatsCollectorCallbackWrapper() override; + + void OnStatsDelivered( + const rtc::scoped_refptr<const RTCStatsReport>& report) override; + + private: + const ScopedJavaGlobalRef<jobject> j_callback_global_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc new file mode 100644 index 0000000000..4bd9ee0e1d --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc @@ -0,0 +1,211 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/rtp_parameters.h" + +#include "sdk/android/generated_peerconnection_jni/RtpParameters_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" + +namespace webrtc { +namespace jni { + +namespace { + +webrtc::DegradationPreference JavaToNativeDegradationPreference( + JNIEnv* jni, + const JavaRef<jobject>& j_degradation_preference) { + std::string enum_name = GetJavaEnumName(jni, j_degradation_preference); + + if (enum_name == "DISABLED") + return webrtc::DegradationPreference::DISABLED; + + if (enum_name == "MAINTAIN_FRAMERATE") + return webrtc::DegradationPreference::MAINTAIN_FRAMERATE; + + if (enum_name == "MAINTAIN_RESOLUTION") + return webrtc::DegradationPreference::MAINTAIN_RESOLUTION; + + if (enum_name == "BALANCED") + return webrtc::DegradationPreference::BALANCED; + + RTC_CHECK(false) << "Unexpected DegradationPreference enum_name " + << enum_name; + return webrtc::DegradationPreference::DISABLED; +} + +ScopedJavaLocalRef<jobject> NativeToJavaRtpEncodingParameter( + JNIEnv* env, + const RtpEncodingParameters& encoding) { + return Java_Encoding_Constructor( + env, NativeToJavaString(env, encoding.rid), encoding.active, + encoding.bitrate_priority, static_cast<int>(encoding.network_priority), + NativeToJavaInteger(env, encoding.max_bitrate_bps), + NativeToJavaInteger(env, encoding.min_bitrate_bps), + NativeToJavaInteger(env, encoding.max_framerate), + NativeToJavaInteger(env, encoding.num_temporal_layers), + NativeToJavaDouble(env, encoding.scale_resolution_down_by), + encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr, + encoding.adaptive_ptime); +} + +ScopedJavaLocalRef<jobject> NativeToJavaRtpCodecParameter( + JNIEnv* env, + const RtpCodecParameters& codec) { + return Java_Codec_Constructor(env, codec.payload_type, + NativeToJavaString(env, codec.name), + NativeToJavaMediaType(env, codec.kind), + NativeToJavaInteger(env, codec.clock_rate), + NativeToJavaInteger(env, codec.num_channels), + NativeToJavaStringMap(env, codec.parameters)); +} + +ScopedJavaLocalRef<jobject> NativeToJavaRtpRtcpParameters( + JNIEnv* env, + const RtcpParameters& rtcp) { + return Java_Rtcp_Constructor(env, NativeToJavaString(env, rtcp.cname), + rtcp.reduced_size); +} + +ScopedJavaLocalRef<jobject> NativeToJavaRtpHeaderExtensionParameter( + JNIEnv* env, + const RtpExtension& extension) { + return Java_HeaderExtension_Constructor( + env, NativeToJavaString(env, extension.uri), extension.id, + extension.encrypt); +} + +} // namespace + +RtpEncodingParameters JavaToNativeRtpEncodingParameters( + JNIEnv* jni, + const JavaRef<jobject>& j_encoding_parameters) { + RtpEncodingParameters encoding; + ScopedJavaLocalRef<jstring> j_rid = + Java_Encoding_getRid(jni, j_encoding_parameters); + if (!IsNull(jni, j_rid)) { + encoding.rid = JavaToNativeString(jni, j_rid); + } + encoding.active = Java_Encoding_getActive(jni, j_encoding_parameters); + ScopedJavaLocalRef<jobject> j_max_bitrate = + Java_Encoding_getMaxBitrateBps(jni, j_encoding_parameters); + encoding.bitrate_priority = + Java_Encoding_getBitratePriority(jni, j_encoding_parameters); + encoding.network_priority = static_cast<webrtc::Priority>( + Java_Encoding_getNetworkPriority(jni, j_encoding_parameters)); + encoding.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max_bitrate); + ScopedJavaLocalRef<jobject> j_min_bitrate = + Java_Encoding_getMinBitrateBps(jni, j_encoding_parameters); + encoding.min_bitrate_bps = JavaToNativeOptionalInt(jni, j_min_bitrate); + ScopedJavaLocalRef<jobject> j_max_framerate = + Java_Encoding_getMaxFramerate(jni, j_encoding_parameters); + encoding.max_framerate = JavaToNativeOptionalInt(jni, j_max_framerate); + ScopedJavaLocalRef<jobject> j_num_temporal_layers = + Java_Encoding_getNumTemporalLayers(jni, j_encoding_parameters); + encoding.num_temporal_layers = + JavaToNativeOptionalInt(jni, j_num_temporal_layers); + ScopedJavaLocalRef<jobject> j_scale_resolution_down_by = + Java_Encoding_getScaleResolutionDownBy(jni, j_encoding_parameters); + encoding.scale_resolution_down_by = + JavaToNativeOptionalDouble(jni, j_scale_resolution_down_by); + encoding.adaptive_ptime = + Java_Encoding_getAdaptivePTime(jni, j_encoding_parameters); + ScopedJavaLocalRef<jobject> j_ssrc = + Java_Encoding_getSsrc(jni, j_encoding_parameters); + if (!IsNull(jni, j_ssrc)) + encoding.ssrc = JavaToNativeLong(jni, j_ssrc); + return encoding; +} + +RtpParameters JavaToNativeRtpParameters(JNIEnv* jni, + const JavaRef<jobject>& j_parameters) { + RtpParameters parameters; + + ScopedJavaLocalRef<jstring> j_transaction_id = + Java_RtpParameters_getTransactionId(jni, j_parameters); + parameters.transaction_id = JavaToNativeString(jni, j_transaction_id); + + ScopedJavaLocalRef<jobject> j_degradation_preference = + Java_RtpParameters_getDegradationPreference(jni, j_parameters); + if (!IsNull(jni, j_degradation_preference)) { + parameters.degradation_preference = + JavaToNativeDegradationPreference(jni, j_degradation_preference); + } + + ScopedJavaLocalRef<jobject> j_rtcp = + Java_RtpParameters_getRtcp(jni, j_parameters); + ScopedJavaLocalRef<jstring> j_rtcp_cname = Java_Rtcp_getCname(jni, j_rtcp); + jboolean j_rtcp_reduced_size = Java_Rtcp_getReducedSize(jni, j_rtcp); + parameters.rtcp.cname = JavaToNativeString(jni, j_rtcp_cname); + parameters.rtcp.reduced_size = j_rtcp_reduced_size; + + ScopedJavaLocalRef<jobject> j_header_extensions = + Java_RtpParameters_getHeaderExtensions(jni, j_parameters); + for (const JavaRef<jobject>& j_header_extension : + Iterable(jni, j_header_extensions)) { + RtpExtension header_extension; + header_extension.uri = JavaToStdString( + jni, Java_HeaderExtension_getUri(jni, j_header_extension)); + header_extension.id = Java_HeaderExtension_getId(jni, j_header_extension); + header_extension.encrypt = + Java_HeaderExtension_getEncrypted(jni, j_header_extension); + parameters.header_extensions.push_back(header_extension); + } + + // Convert encodings. + ScopedJavaLocalRef<jobject> j_encodings = + Java_RtpParameters_getEncodings(jni, j_parameters); + for (const JavaRef<jobject>& j_encoding_parameters : + Iterable(jni, j_encodings)) { + RtpEncodingParameters encoding = + JavaToNativeRtpEncodingParameters(jni, j_encoding_parameters); + parameters.encodings.push_back(encoding); + } + + // Convert codecs. + ScopedJavaLocalRef<jobject> j_codecs = + Java_RtpParameters_getCodecs(jni, j_parameters); + for (const JavaRef<jobject>& j_codec : Iterable(jni, j_codecs)) { + RtpCodecParameters codec; + codec.payload_type = Java_Codec_getPayloadType(jni, j_codec); + codec.name = JavaToStdString(jni, Java_Codec_getName(jni, j_codec)); + codec.kind = JavaToNativeMediaType(jni, Java_Codec_getKind(jni, j_codec)); + codec.clock_rate = + JavaToNativeOptionalInt(jni, Java_Codec_getClockRate(jni, j_codec)); + codec.num_channels = + JavaToNativeOptionalInt(jni, Java_Codec_getNumChannels(jni, j_codec)); + auto parameters_map = + JavaToNativeStringMap(jni, Java_Codec_getParameters(jni, j_codec)); + codec.parameters.insert(parameters_map.begin(), parameters_map.end()); + parameters.codecs.push_back(codec); + } + return parameters; +} + +ScopedJavaLocalRef<jobject> NativeToJavaRtpParameters( + JNIEnv* env, + const RtpParameters& parameters) { + return Java_RtpParameters_Constructor( + env, NativeToJavaString(env, parameters.transaction_id), + parameters.degradation_preference.has_value() + ? Java_DegradationPreference_fromNativeIndex( + env, static_cast<int>(*parameters.degradation_preference)) + : nullptr, + NativeToJavaRtpRtcpParameters(env, parameters.rtcp), + NativeToJavaList(env, parameters.header_extensions, + &NativeToJavaRtpHeaderExtensionParameter), + NativeToJavaList(env, parameters.encodings, + &NativeToJavaRtpEncodingParameter), + NativeToJavaList(env, parameters.codecs, &NativeToJavaRtpCodecParameter)); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h new file mode 100644 index 0000000000..3bcd343fae --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h @@ -0,0 +1,35 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_ +#define SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_ + +#include <jni.h> + +#include "api/rtp_parameters.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +RtpEncodingParameters JavaToNativeRtpEncodingParameters( + JNIEnv* env, + const JavaRef<jobject>& j_encoding_parameters); + +RtpParameters JavaToNativeRtpParameters(JNIEnv* jni, + const JavaRef<jobject>& j_parameters); +ScopedJavaLocalRef<jobject> NativeToJavaRtpParameters( + JNIEnv* jni, + const RtpParameters& parameters); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc new file mode 100644 index 0000000000..7a3600b424 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc @@ -0,0 +1,127 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/rtp_receiver.h" + +#include "sdk/android/generated_peerconnection_jni/RtpReceiver_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" +#include "sdk/android/src/jni/pc/rtp_parameters.h" + +namespace webrtc { +namespace jni { + +namespace { + +// Adapter between the C++ RtpReceiverObserverInterface and the Java +// RtpReceiver.Observer interface. Wraps an instance of the Java interface and +// dispatches C++ callbacks to Java. +class RtpReceiverObserverJni : public RtpReceiverObserverInterface { + public: + RtpReceiverObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer) + : j_observer_global_(env, j_observer) {} + + ~RtpReceiverObserverJni() override = default; + + void OnFirstPacketReceived(cricket::MediaType media_type) override { + JNIEnv* const env = AttachCurrentThreadIfNeeded(); + Java_Observer_onFirstPacketReceived(env, j_observer_global_, + NativeToJavaMediaType(env, media_type)); + } + + private: + const ScopedJavaGlobalRef<jobject> j_observer_global_; +}; + +} // namespace + +ScopedJavaLocalRef<jobject> NativeToJavaRtpReceiver( + JNIEnv* env, + rtc::scoped_refptr<RtpReceiverInterface> receiver) { + // Receiver is now owned by Java object, and will be freed from there. + return Java_RtpReceiver_Constructor(env, + jlongFromPointer(receiver.release())); +} + +JavaRtpReceiverGlobalOwner::JavaRtpReceiverGlobalOwner( + JNIEnv* env, + const JavaRef<jobject>& j_receiver) + : j_receiver_(env, j_receiver) {} + +JavaRtpReceiverGlobalOwner::JavaRtpReceiverGlobalOwner( + JavaRtpReceiverGlobalOwner&& other) = default; + +JavaRtpReceiverGlobalOwner::~JavaRtpReceiverGlobalOwner() { + if (j_receiver_.obj()) + Java_RtpReceiver_dispose(AttachCurrentThreadIfNeeded(), j_receiver_); +} + +static jlong JNI_RtpReceiver_GetTrack(JNIEnv* jni, + jlong j_rtp_receiver_pointer) { + // MediaStreamTrack will have shared ownership by the MediaStreamTrack Java + // object. + return jlongFromPointer( + reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer) + ->track() + .release()); +} + +static ScopedJavaLocalRef<jobject> JNI_RtpReceiver_GetParameters( + JNIEnv* jni, + jlong j_rtp_receiver_pointer) { + RtpParameters parameters = + reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer) + ->GetParameters(); + return NativeToJavaRtpParameters(jni, parameters); +} + +static ScopedJavaLocalRef<jstring> JNI_RtpReceiver_GetId( + JNIEnv* jni, + jlong j_rtp_receiver_pointer) { + return NativeToJavaString( + jni, + reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id()); +} + +static jlong JNI_RtpReceiver_SetObserver( + JNIEnv* jni, + jlong j_rtp_receiver_pointer, + const JavaParamRef<jobject>& j_observer) { + RtpReceiverObserverJni* rtpReceiverObserver = + new RtpReceiverObserverJni(jni, j_observer); + reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer) + ->SetObserver(rtpReceiverObserver); + return jlongFromPointer(rtpReceiverObserver); +} + +static void JNI_RtpReceiver_UnsetObserver(JNIEnv* jni, + jlong j_rtp_receiver_pointer, + jlong j_observer_pointer) { + reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer) + ->SetObserver(nullptr); + RtpReceiverObserverJni* observer = + reinterpret_cast<RtpReceiverObserverJni*>(j_observer_pointer); + if (observer) { + delete observer; + } +} + +static void JNI_RtpReceiver_SetFrameDecryptor(JNIEnv* jni, + jlong j_rtp_sender_pointer, + jlong j_frame_decryptor_pointer) { + reinterpret_cast<RtpReceiverInterface*>(j_rtp_sender_pointer) + ->SetFrameDecryptor(rtc::scoped_refptr<FrameDecryptorInterface>( + reinterpret_cast<FrameDecryptorInterface*>( + j_frame_decryptor_pointer))); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h new file mode 100644 index 0000000000..ccef44b040 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h @@ -0,0 +1,41 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_ +#define SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_ + +#include <jni.h> + +#include "api/rtp_receiver_interface.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef<jobject> NativeToJavaRtpReceiver( + JNIEnv* env, + rtc::scoped_refptr<RtpReceiverInterface> receiver); + +// Takes ownership of the passed `j_receiver` and stores it as a global +// reference. Will call dispose() in the dtor. +class JavaRtpReceiverGlobalOwner { + public: + JavaRtpReceiverGlobalOwner(JNIEnv* env, const JavaRef<jobject>& j_receiver); + JavaRtpReceiverGlobalOwner(JavaRtpReceiverGlobalOwner&& other); + ~JavaRtpReceiverGlobalOwner(); + + private: + ScopedJavaGlobalRef<jobject> j_receiver_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc new file mode 100644 index 0000000000..233a353654 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc @@ -0,0 +1,114 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/rtp_sender.h" + +#include "sdk/android/generated_peerconnection_jni/RtpSender_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/rtp_parameters.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef<jobject> NativeToJavaRtpSender( + JNIEnv* env, + rtc::scoped_refptr<RtpSenderInterface> sender) { + if (!sender) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders(). + return Java_RtpSender_Constructor(env, jlongFromPointer(sender.release())); +} + +static jboolean JNI_RtpSender_SetTrack(JNIEnv* jni, + jlong j_rtp_sender_pointer, + jlong j_track_pointer) { + return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer) + ->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer)); +} + +jlong JNI_RtpSender_GetTrack(JNIEnv* jni, + jlong j_rtp_sender_pointer) { + // MediaStreamTrack will have shared ownership by the MediaStreamTrack Java + // object. + return jlongFromPointer( + reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer) + ->track() + .release()); +} + +static void JNI_RtpSender_SetStreams( + JNIEnv* jni, + jlong j_rtp_sender_pointer, + const JavaParamRef<jobject>& j_stream_labels) { + reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer) + ->SetStreams(JavaListToNativeVector<std::string, jstring>( + jni, j_stream_labels, &JavaToNativeString)); +} + +ScopedJavaLocalRef<jobject> JNI_RtpSender_GetStreams( + JNIEnv* jni, + jlong j_rtp_sender_pointer) { + ScopedJavaLocalRef<jstring> (*convert_function)(JNIEnv*, const std::string&) = + &NativeToJavaString; + return NativeToJavaList( + jni, + reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->stream_ids(), + convert_function); +} + +jlong JNI_RtpSender_GetDtmfSender(JNIEnv* jni, + jlong j_rtp_sender_pointer) { + return jlongFromPointer( + reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer) + ->GetDtmfSender() + .release()); +} + +jboolean JNI_RtpSender_SetParameters( + JNIEnv* jni, + jlong j_rtp_sender_pointer, + const JavaParamRef<jobject>& j_parameters) { + if (IsNull(jni, j_parameters)) { + return false; + } + RtpParameters parameters = JavaToNativeRtpParameters(jni, j_parameters); + return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer) + ->SetParameters(parameters) + .ok(); +} + +ScopedJavaLocalRef<jobject> JNI_RtpSender_GetParameters( + JNIEnv* jni, + jlong j_rtp_sender_pointer) { + RtpParameters parameters = + reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer) + ->GetParameters(); + return NativeToJavaRtpParameters(jni, parameters); +} + +ScopedJavaLocalRef<jstring> JNI_RtpSender_GetId(JNIEnv* jni, + jlong j_rtp_sender_pointer) { + return NativeToJavaString( + jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id()); +} + +static void JNI_RtpSender_SetFrameEncryptor(JNIEnv* jni, + jlong j_rtp_sender_pointer, + jlong j_frame_encryptor_pointer) { + reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer) + ->SetFrameEncryptor(rtc::scoped_refptr<FrameEncryptorInterface>( + reinterpret_cast<FrameEncryptorInterface*>( + j_frame_encryptor_pointer))); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h new file mode 100644 index 0000000000..d782ca915f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h @@ -0,0 +1,29 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_ +#define SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_ + +#include <jni.h> + +#include "api/rtp_sender_interface.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef<jobject> NativeToJavaRtpSender( + JNIEnv* env, + rtc::scoped_refptr<RtpSenderInterface> sender); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc new file mode 100644 index 0000000000..1d468461f1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc @@ -0,0 +1,176 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/rtp_transceiver.h" + +#include <string> + +#include "sdk/android/generated_peerconnection_jni/RtpTransceiver_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" +#include "sdk/android/src/jni/pc/rtp_parameters.h" +#include "sdk/android/src/jni/pc/rtp_receiver.h" +#include "sdk/android/src/jni/pc/rtp_sender.h" + +namespace webrtc { +namespace jni { + +namespace { + +ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiverDirection( + JNIEnv* jni, + RtpTransceiverDirection rtp_transceiver_direction) { + return Java_RtpTransceiverDirection_fromNativeIndex( + jni, static_cast<int>(rtp_transceiver_direction)); +} + +} // namespace + +RtpTransceiverInit JavaToNativeRtpTransceiverInit( + JNIEnv* jni, + const JavaRef<jobject>& j_init) { + RtpTransceiverInit init; + + // Convert the direction. + init.direction = static_cast<RtpTransceiverDirection>( + Java_RtpTransceiverInit_getDirectionNativeIndex(jni, j_init)); + + // Convert the stream ids. + ScopedJavaLocalRef<jobject> j_stream_ids = + Java_RtpTransceiverInit_getStreamIds(jni, j_init); + init.stream_ids = JavaListToNativeVector<std::string, jstring>( + jni, j_stream_ids, &JavaToNativeString); + + // Convert the send encodings. + ScopedJavaLocalRef<jobject> j_send_encodings = + Java_RtpTransceiverInit_getSendEncodings(jni, j_init); + init.send_encodings = JavaListToNativeVector<RtpEncodingParameters, jobject>( + jni, j_send_encodings, &JavaToNativeRtpEncodingParameters); + return init; +} + +ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiver( + JNIEnv* env, + rtc::scoped_refptr<RtpTransceiverInterface> transceiver) { + if (!transceiver) { + return nullptr; + } + // Transceiver will now have shared ownership by the Java object. + return Java_RtpTransceiver_Constructor( + env, jlongFromPointer(transceiver.release())); +} + +JavaRtpTransceiverGlobalOwner::JavaRtpTransceiverGlobalOwner( + JNIEnv* env, + const JavaRef<jobject>& j_transceiver) + : j_transceiver_(env, j_transceiver) {} + +JavaRtpTransceiverGlobalOwner::JavaRtpTransceiverGlobalOwner( + JavaRtpTransceiverGlobalOwner&& other) = default; + +JavaRtpTransceiverGlobalOwner::~JavaRtpTransceiverGlobalOwner() { + if (j_transceiver_.obj()) { + Java_RtpTransceiver_dispose(AttachCurrentThreadIfNeeded(), j_transceiver_); + } +} + +ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetMediaType( + JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + return NativeToJavaMediaType( + jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->media_type()); +} + +ScopedJavaLocalRef<jstring> JNI_RtpTransceiver_GetMid( + JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + absl::optional<std::string> mid = + reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->mid(); + return NativeToJavaString(jni, mid); +} + +ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetSender( + JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + return NativeToJavaRtpSender( + jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->sender()); +} + +ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetReceiver( + JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + return NativeToJavaRtpReceiver( + jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->receiver()); +} + +jboolean JNI_RtpTransceiver_Stopped(JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + return reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->stopped(); +} + +ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_Direction( + JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + return NativeToJavaRtpTransceiverDirection( + jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->direction()); +} + +ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_CurrentDirection( + JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + absl::optional<RtpTransceiverDirection> direction = + reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->current_direction(); + return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction) + : nullptr; +} + +void JNI_RtpTransceiver_StopInternal(JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->StopInternal(); +} + +void JNI_RtpTransceiver_StopStandard(JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->StopStandard(); +} + +jboolean JNI_RtpTransceiver_SetDirection( + JNIEnv* jni, + jlong j_rtp_transceiver_pointer, + const base::android::JavaParamRef<jobject>& j_rtp_transceiver_direction) { + if (IsNull(jni, j_rtp_transceiver_direction)) { + return false; + } + RtpTransceiverDirection direction = static_cast<RtpTransceiverDirection>( + Java_RtpTransceiverDirection_getNativeIndex(jni, + j_rtp_transceiver_direction)); + webrtc::RTCError error = + reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer) + ->SetDirectionWithError(direction); + if (!error.ok()) { + RTC_LOG(LS_WARNING) << "SetDirection failed, code " + << ToString(error.type()) << ", message " + << error.message(); + } + return error.ok(); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h new file mode 100644 index 0000000000..5b2d0121ea --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h @@ -0,0 +1,46 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_ +#define SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_ + +#include <jni.h> + +#include "api/rtp_transceiver_interface.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +RtpTransceiverInit JavaToNativeRtpTransceiverInit( + JNIEnv* jni, + const JavaRef<jobject>& j_init); + +ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiver( + JNIEnv* env, + rtc::scoped_refptr<RtpTransceiverInterface> transceiver); + +// This takes ownership of the of the `j_transceiver` and stores it as a global +// reference. This calls the Java Transceiver's dispose() method with the dtor. +class JavaRtpTransceiverGlobalOwner { + public: + JavaRtpTransceiverGlobalOwner(JNIEnv* env, + const JavaRef<jobject>& j_transceiver); + JavaRtpTransceiverGlobalOwner(JavaRtpTransceiverGlobalOwner&& other); + ~JavaRtpTransceiverGlobalOwner(); + + private: + ScopedJavaGlobalRef<jobject> j_transceiver_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc new file mode 100644 index 0000000000..c8b4345af4 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc @@ -0,0 +1,81 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/sdp_observer.h" + +#include <utility> + +#include "sdk/android/generated_peerconnection_jni/SdpObserver_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/media_constraints.h" + +namespace webrtc { +namespace jni { + +CreateSdpObserverJni::CreateSdpObserverJni( + JNIEnv* env, + const JavaRef<jobject>& j_observer, + std::unique_ptr<MediaConstraints> constraints) + : j_observer_global_(env, j_observer), + constraints_(std::move(constraints)) {} + +CreateSdpObserverJni::~CreateSdpObserverJni() = default; + +void CreateSdpObserverJni::OnSuccess(SessionDescriptionInterface* desc) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + std::string sdp; + RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; + Java_SdpObserver_onCreateSuccess( + env, j_observer_global_, + NativeToJavaSessionDescription(env, sdp, desc->type())); + // OnSuccess transfers ownership of the description (there's a TODO to make + // it use unique_ptr...). + delete desc; +} + +void CreateSdpObserverJni::OnFailure(webrtc::RTCError error) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_SdpObserver_onCreateFailure(env, j_observer_global_, + NativeToJavaString(env, error.message())); +} + +SetLocalSdpObserverJni::SetLocalSdpObserverJni( + JNIEnv* env, + const JavaRef<jobject>& j_observer) + : j_observer_global_(env, j_observer) {} + +void SetLocalSdpObserverJni::OnSetLocalDescriptionComplete(RTCError error) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + if (error.ok()) { + Java_SdpObserver_onSetSuccess(env, j_observer_global_); + } else { + Java_SdpObserver_onSetFailure(env, j_observer_global_, + NativeToJavaString(env, error.message())); + } +} + +SetRemoteSdpObserverJni::SetRemoteSdpObserverJni( + JNIEnv* env, + const JavaRef<jobject>& j_observer) + : j_observer_global_(env, j_observer) {} + +void SetRemoteSdpObserverJni::OnSetRemoteDescriptionComplete(RTCError error) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + if (error.ok()) { + Java_SdpObserver_onSetSuccess(env, j_observer_global_); + } else { + Java_SdpObserver_onSetFailure(env, j_observer_global_, + NativeToJavaString(env, error.message())); + } +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h new file mode 100644 index 0000000000..b33a3018c8 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h @@ -0,0 +1,69 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_ +#define SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_ + +#include <memory> +#include <string> + +#include "api/peer_connection_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/session_description.h" +#include "sdk/media_constraints.h" + +namespace webrtc { +namespace jni { + +class CreateSdpObserverJni : public CreateSessionDescriptionObserver { + public: + CreateSdpObserverJni(JNIEnv* env, + const JavaRef<jobject>& j_observer, + std::unique_ptr<MediaConstraints> constraints); + ~CreateSdpObserverJni() override; + + MediaConstraints* constraints() { return constraints_.get(); } + + void OnSuccess(SessionDescriptionInterface* desc) override; + void OnFailure(RTCError error) override; + + private: + const ScopedJavaGlobalRef<jobject> j_observer_global_; + std::unique_ptr<MediaConstraints> constraints_; +}; + +class SetLocalSdpObserverJni : public SetLocalDescriptionObserverInterface { + public: + SetLocalSdpObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer); + + ~SetLocalSdpObserverJni() override = default; + + virtual void OnSetLocalDescriptionComplete(RTCError error) override; + + private: + const ScopedJavaGlobalRef<jobject> j_observer_global_; +}; + +class SetRemoteSdpObserverJni : public SetRemoteDescriptionObserverInterface { + public: + SetRemoteSdpObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer); + + ~SetRemoteSdpObserverJni() override = default; + + virtual void OnSetRemoteDescriptionComplete(RTCError error) override; + + private: + const ScopedJavaGlobalRef<jobject> j_observer_global_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc new file mode 100644 index 0000000000..bbac721e51 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc @@ -0,0 +1,48 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/session_description.h" + +#include <string> + +#include "rtc_base/logging.h" +#include "sdk/android/generated_peerconnection_jni/SessionDescription_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +std::unique_ptr<SessionDescriptionInterface> JavaToNativeSessionDescription( + JNIEnv* jni, + const JavaRef<jobject>& j_sdp) { + std::string std_type = JavaToStdString( + jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp)); + std::string std_description = + JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp)); + absl::optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type); + if (!sdp_type_maybe) { + RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type; + return nullptr; + } + return CreateSessionDescription(*sdp_type_maybe, std_description); +} + +ScopedJavaLocalRef<jobject> NativeToJavaSessionDescription( + JNIEnv* jni, + const std::string& sdp, + const std::string& type) { + return Java_SessionDescription_Constructor( + jni, Java_Type_fromCanonicalForm(jni, NativeToJavaString(jni, type)), + NativeToJavaString(jni, sdp)); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h new file mode 100644 index 0000000000..f0f49cb2ee --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h @@ -0,0 +1,36 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_ +#define SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_ + +#include <jni.h> +#include <memory> +#include <string> + +#include "api/jsep.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +std::unique_ptr<SessionDescriptionInterface> JavaToNativeSessionDescription( + JNIEnv* jni, + const JavaRef<jobject>& j_sdp); + +ScopedJavaLocalRef<jobject> NativeToJavaSessionDescription( + JNIEnv* jni, + const std::string& sdp, + const std::string& type); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc new file mode 100644 index 0000000000..74ef3b8049 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc @@ -0,0 +1,44 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h" +#include "sdk/android/generated_peerconnection_jni/SSLCertificateVerifier_jni.h" +#include "sdk/android/native_api/jni/class_loader.h" +#include "sdk/android/native_api/jni/java_types.h" + +namespace webrtc { +namespace jni { + +SSLCertificateVerifierWrapper::SSLCertificateVerifierWrapper( + JNIEnv* jni, + const JavaRef<jobject>& ssl_certificate_verifier) + : ssl_certificate_verifier_(jni, ssl_certificate_verifier) {} + +SSLCertificateVerifierWrapper::~SSLCertificateVerifierWrapper() = default; + +bool SSLCertificateVerifierWrapper::Verify( + const rtc::SSLCertificate& certificate) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + + // Serialize the der encoding of the cert into a jbyteArray + rtc::Buffer cert_der_buffer; + certificate.ToDER(&cert_der_buffer); + ScopedJavaLocalRef<jbyteArray> jni_buffer( + jni, jni->NewByteArray(cert_der_buffer.size())); + jni->SetByteArrayRegion( + jni_buffer.obj(), 0, cert_der_buffer.size(), + reinterpret_cast<const jbyte*>(cert_der_buffer.data())); + + return Java_SSLCertificateVerifier_verify(jni, ssl_certificate_verifier_, + jni_buffer); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h new file mode 100644 index 0000000000..8c883f445b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h @@ -0,0 +1,41 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_ +#define SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_ + +#include <jni.h> +#include <vector> + +#include "rtc_base/ssl_certificate.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// Wrapper for Java SSLCertifiacteVerifier class. Delegates method calls through +// JNI and wraps the encoder inside SSLCertificateVerifierWrapper. +class SSLCertificateVerifierWrapper : public rtc::SSLCertificateVerifier { + public: + SSLCertificateVerifierWrapper( + JNIEnv* jni, + const JavaRef<jobject>& ssl_certificate_verifier); + ~SSLCertificateVerifierWrapper() override; + + bool Verify(const rtc::SSLCertificate& certificate) override; + + private: + const ScopedJavaGlobalRef<jobject> ssl_certificate_verifier_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc new file mode 100644 index 0000000000..6d4a31df1c --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc @@ -0,0 +1,74 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/stats_observer.h" + +#include <vector> + +#include "sdk/android/generated_peerconnection_jni/StatsObserver_jni.h" +#include "sdk/android/generated_peerconnection_jni/StatsReport_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +namespace { + +ScopedJavaLocalRef<jobject> NativeToJavaStatsReportValue( + JNIEnv* env, + const rtc::scoped_refptr<StatsReport::Value>& value_ptr) { + // Should we use the '.name' enum value here instead of converting the + // name to a string? + return Java_Value_Constructor( + env, NativeToJavaString(env, value_ptr->display_name()), + NativeToJavaString(env, value_ptr->ToString())); +} + +ScopedJavaLocalRef<jobjectArray> NativeToJavaStatsReportValueArray( + JNIEnv* env, + const StatsReport::Values& value_map) { + // Ignore the keys and make an array out of the values. + std::vector<StatsReport::ValuePtr> values; + for (const auto& it : value_map) + values.push_back(it.second); + return NativeToJavaObjectArray(env, values, + org_webrtc_StatsReport_00024Value_clazz(env), + &NativeToJavaStatsReportValue); +} + +ScopedJavaLocalRef<jobject> NativeToJavaStatsReport(JNIEnv* env, + const StatsReport& report) { + return Java_StatsReport_Constructor( + env, NativeToJavaString(env, report.id()->ToString()), + NativeToJavaString(env, report.TypeToString()), report.timestamp(), + NativeToJavaStatsReportValueArray(env, report.values())); +} + +} // namespace + +StatsObserverJni::StatsObserverJni(JNIEnv* jni, + const JavaRef<jobject>& j_observer) + : j_observer_global_(jni, j_observer) {} + +StatsObserverJni::~StatsObserverJni() = default; + +void StatsObserverJni::OnComplete(const StatsReports& reports) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobjectArray> j_reports = + NativeToJavaObjectArray(env, reports, org_webrtc_StatsReport_clazz(env), + [](JNIEnv* env, const StatsReport* report) { + return NativeToJavaStatsReport(env, *report); + }); + Java_StatsObserver_onComplete(env, j_observer_global_, j_reports); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h new file mode 100644 index 0000000000..0cfd43384b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h @@ -0,0 +1,36 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_ +#define SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_ + +#include "api/peer_connection_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// Adapter for a Java StatsObserver presenting a C++ StatsObserver and +// dispatching the callback from C++ back to Java. +class StatsObserverJni : public StatsObserver { + public: + StatsObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer); + ~StatsObserverJni() override; + + void OnComplete(const StatsReports& reports) override; + + private: + const ScopedJavaGlobalRef<jobject> j_observer_global_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc new file mode 100644 index 0000000000..5c93fcd7c0 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc @@ -0,0 +1,35 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/turn_customizer.h" +#include "sdk/android/generated_peerconnection_jni/TurnCustomizer_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +TurnCustomizer* GetNativeTurnCustomizer( + JNIEnv* env, + const JavaRef<jobject>& j_turn_customizer) { + if (IsNull(env, j_turn_customizer)) + return nullptr; + return reinterpret_cast<webrtc::TurnCustomizer*>( + Java_TurnCustomizer_getNativeTurnCustomizer(env, j_turn_customizer)); +} + +static void JNI_TurnCustomizer_FreeTurnCustomizer( + JNIEnv* jni, + jlong j_turn_customizer_pointer) { + delete reinterpret_cast<TurnCustomizer*>(j_turn_customizer_pointer); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h new file mode 100644 index 0000000000..359234fc76 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_ +#define SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_ + +#include "api/turn_customizer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +TurnCustomizer* GetNativeTurnCustomizer( + JNIEnv* env, + const JavaRef<jobject>& j_turn_customizer); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc new file mode 100644 index 0000000000..b955dbb1ef --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc @@ -0,0 +1,55 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/video.h" + +#include <jni.h> + +#include <memory> + +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "rtc_base/logging.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/android_video_track_source.h" +#include "sdk/android/src/jni/video_decoder_factory_wrapper.h" +#include "sdk/android/src/jni/video_encoder_factory_wrapper.h" + +namespace webrtc { +namespace jni { + +VideoEncoderFactory* CreateVideoEncoderFactory( + JNIEnv* jni, + const JavaRef<jobject>& j_encoder_factory) { + return IsNull(jni, j_encoder_factory) + ? nullptr + : new VideoEncoderFactoryWrapper(jni, j_encoder_factory); +} + +VideoDecoderFactory* CreateVideoDecoderFactory( + JNIEnv* jni, + const JavaRef<jobject>& j_decoder_factory) { + return IsNull(jni, j_decoder_factory) + ? nullptr + : new VideoDecoderFactoryWrapper(jni, j_decoder_factory); +} + +void* CreateVideoSource(JNIEnv* env, + rtc::Thread* signaling_thread, + rtc::Thread* worker_thread, + jboolean is_screencast, + jboolean align_timestamps) { + auto source = rtc::make_ref_counted<AndroidVideoTrackSource>( + signaling_thread, env, is_screencast, align_timestamps); + return source.release(); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/video.h b/third_party/libwebrtc/sdk/android/src/jni/pc/video.h new file mode 100644 index 0000000000..32bc6406a1 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/pc/video.h @@ -0,0 +1,45 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_VIDEO_H_ +#define SDK_ANDROID_SRC_JNI_PC_VIDEO_H_ + +#include <jni.h> + +#include "api/scoped_refptr.h" +#include "rtc_base/thread.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +class VideoEncoderFactory; +class VideoDecoderFactory; +} // namespace webrtc + +namespace webrtc { +namespace jni { + +VideoEncoderFactory* CreateVideoEncoderFactory( + JNIEnv* jni, + const JavaRef<jobject>& j_encoder_factory); + +VideoDecoderFactory* CreateVideoDecoderFactory( + JNIEnv* jni, + const JavaRef<jobject>& j_decoder_factory); + +void* CreateVideoSource(JNIEnv* env, + rtc::Thread* signaling_thread, + rtc::Thread* worker_thread, + jboolean is_screencast, + jboolean align_timestamps); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_VIDEO_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc new file mode 100644 index 0000000000..1df8c7ade5 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc @@ -0,0 +1,38 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/scoped_java_ref_counted.h" + +#include "sdk/android/generated_base_jni/RefCounted_jni.h" + +namespace webrtc { +namespace jni { + +// static +ScopedJavaRefCounted ScopedJavaRefCounted::Retain( + JNIEnv* jni, + const JavaRef<jobject>& j_object) { + Java_RefCounted_retain(jni, j_object); + CHECK_EXCEPTION(jni) + << "Unexpected java exception from java JavaRefCounted.retain()"; + return Adopt(jni, j_object); +} + +ScopedJavaRefCounted::~ScopedJavaRefCounted() { + if (!j_object_.is_null()) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + Java_RefCounted_release(jni, j_object_); + CHECK_EXCEPTION(jni) + << "Unexpected java exception from java RefCounted.release()"; + } +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h new file mode 100644 index 0000000000..3ea226259e --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h @@ -0,0 +1,49 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_ +#define SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_ + +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +// Holds a reference to a java object implementing the RefCounted interface, and +// calls its release() method from the destructor. +class ScopedJavaRefCounted { + public: + // Takes over the caller's reference. + static ScopedJavaRefCounted Adopt(JNIEnv* jni, + const JavaRef<jobject>& j_object) { + return ScopedJavaRefCounted(jni, j_object); + } + + // Retains the java object for the live time of this object. + static ScopedJavaRefCounted Retain(JNIEnv* jni, + const JavaRef<jobject>& j_object); + ScopedJavaRefCounted(ScopedJavaRefCounted&& other) = default; + + ScopedJavaRefCounted(const ScopedJavaRefCounted& other) = delete; + ScopedJavaRefCounted& operator=(const ScopedJavaRefCounted&) = delete; + + ~ScopedJavaRefCounted(); + + private: + // Adopts reference. + ScopedJavaRefCounted(JNIEnv* jni, const JavaRef<jobject>& j_object) + : j_object_(jni, j_object) {} + + ScopedJavaGlobalRef<jobject> j_object_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc b/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc new file mode 100644 index 0000000000..c0c5fd9d9f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc @@ -0,0 +1,46 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "rtc_base/time_utils.h" +#include "rtc_base/timestamp_aligner.h" +#include "sdk/android/generated_video_jni/TimestampAligner_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_TimestampAligner_RtcTimeNanos(JNIEnv* env) { + return rtc::TimeNanos(); +} + +static jlong JNI_TimestampAligner_CreateTimestampAligner(JNIEnv* env) { + return jlongFromPointer(new rtc::TimestampAligner()); +} + +static void JNI_TimestampAligner_ReleaseTimestampAligner( + JNIEnv* env, + jlong timestamp_aligner) { + delete reinterpret_cast<rtc::TimestampAligner*>(timestamp_aligner); +} + +static jlong JNI_TimestampAligner_TranslateTimestamp( + JNIEnv* env, + jlong timestamp_aligner, + jlong camera_time_ns) { + return reinterpret_cast<rtc::TimestampAligner*>(timestamp_aligner) + ->TranslateTimestamp(camera_time_ns / rtc::kNumNanosecsPerMicrosec, + rtc::TimeMicros()) * + rtc::kNumNanosecsPerMicrosec; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc new file mode 100644 index 0000000000..a218a1d23f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc @@ -0,0 +1,37 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/video_codec_info.h" + +#include "sdk/android/generated_video_jni/VideoCodecInfo_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni, + const JavaRef<jobject>& j_info) { + return SdpVideoFormat( + JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)), + JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info))); +} + +ScopedJavaLocalRef<jobject> SdpVideoFormatToVideoCodecInfo( + JNIEnv* jni, + const SdpVideoFormat& format) { + ScopedJavaLocalRef<jobject> j_params = + NativeToJavaStringMap(jni, format.parameters); + return Java_VideoCodecInfo_Constructor( + jni, NativeToJavaString(jni, format.name), j_params); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h new file mode 100644 index 0000000000..07b073086a --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h @@ -0,0 +1,31 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_ +#define SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_ + +#include <jni.h> + +#include "api/video_codecs/sdp_video_format.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni, + const JavaRef<jobject>& info); +ScopedJavaLocalRef<jobject> SdpVideoFormatToVideoCodecInfo( + JNIEnv* jni, + const SdpVideoFormat& format); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc new file mode 100644 index 0000000000..e34d6d69e2 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc @@ -0,0 +1,25 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/video_codec_status.h" + +#include "sdk/android/generated_video_jni/VideoCodecStatus_jni.h" + +namespace webrtc { +namespace jni { + +int32_t JavaToNativeVideoCodecStatus( + JNIEnv* env, + const JavaRef<jobject>& j_video_codec_status) { + return Java_VideoCodecStatus_getNumber(env, j_video_codec_status); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h new file mode 100644 index 0000000000..607bd46340 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_ +#define SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_ + +#include <jni.h> +#include <stdint.h> + +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { +int32_t JavaToNativeVideoCodecStatus( + JNIEnv* env, + const JavaRef<jobject>& j_video_codec_status); +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc new file mode 100644 index 0000000000..2d9240493a --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc @@ -0,0 +1,51 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/video_decoder_factory_wrapper.h" + +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" +#include "rtc_base/logging.h" +#include "sdk/android/generated_video_jni/VideoDecoderFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/video_codec_info.h" +#include "sdk/android/src/jni/video_decoder_wrapper.h" + +namespace webrtc { +namespace jni { + +VideoDecoderFactoryWrapper::VideoDecoderFactoryWrapper( + JNIEnv* jni, + const JavaRef<jobject>& decoder_factory) + : decoder_factory_(jni, decoder_factory) {} +VideoDecoderFactoryWrapper::~VideoDecoderFactoryWrapper() = default; + +std::unique_ptr<VideoDecoder> VideoDecoderFactoryWrapper::CreateVideoDecoder( + const SdpVideoFormat& format) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> j_codec_info = + SdpVideoFormatToVideoCodecInfo(jni, format); + ScopedJavaLocalRef<jobject> decoder = Java_VideoDecoderFactory_createDecoder( + jni, decoder_factory_, j_codec_info); + if (!decoder.obj()) + return nullptr; + return JavaToNativeVideoDecoder(jni, decoder); +} + +std::vector<SdpVideoFormat> VideoDecoderFactoryWrapper::GetSupportedFormats() + const { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + return JavaToNativeVector<SdpVideoFormat>( + env, Java_VideoDecoderFactory_getSupportedCodecs(env, decoder_factory_), + &VideoCodecInfoToSdpVideoFormat); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h new file mode 100644 index 0000000000..2122fdc008 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h @@ -0,0 +1,41 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_ +#define SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_ + +#include <jni.h> + +#include "api/video_codecs/video_decoder_factory.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// Wrapper for Java VideoDecoderFactory class. Delegates method calls through +// JNI and wraps the decoder inside VideoDecoderWrapper. +class VideoDecoderFactoryWrapper : public VideoDecoderFactory { + public: + VideoDecoderFactoryWrapper(JNIEnv* jni, + const JavaRef<jobject>& decoder_factory); + ~VideoDecoderFactoryWrapper() override; + + std::vector<SdpVideoFormat> GetSupportedFormats() const override; + std::unique_ptr<VideoDecoder> CreateVideoDecoder( + const SdpVideoFormat& format) override; + + private: + const ScopedJavaGlobalRef<jobject> decoder_factory_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc new file mode 100644 index 0000000000..a678280f69 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "api/video_codecs/video_decoder_software_fallback_wrapper.h" +#include "sdk/android/generated_video_jni/VideoDecoderFallback_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_decoder_wrapper.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_VideoDecoderFallback_CreateDecoder( + JNIEnv* jni, + const JavaParamRef<jobject>& j_fallback_decoder, + const JavaParamRef<jobject>& j_primary_decoder) { + std::unique_ptr<VideoDecoder> fallback_decoder = + JavaToNativeVideoDecoder(jni, j_fallback_decoder); + std::unique_ptr<VideoDecoder> primary_decoder = + JavaToNativeVideoDecoder(jni, j_primary_decoder); + + VideoDecoder* nativeWrapper = + CreateVideoDecoderSoftwareFallbackWrapper(std::move(fallback_decoder), + std::move(primary_decoder)) + .release(); + + return jlongFromPointer(nativeWrapper); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc new file mode 100644 index 0000000000..328f8d8d4b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc @@ -0,0 +1,273 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/video_decoder_wrapper.h" + +#include "api/video/render_resolution.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/video_decoder.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/utility/vp8_header_parser.h" +#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/time_utils.h" +#include "sdk/android/generated_video_jni/VideoDecoderWrapper_jni.h" +#include "sdk/android/generated_video_jni/VideoDecoder_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/encoded_image.h" +#include "sdk/android/src/jni/video_codec_status.h" +#include "sdk/android/src/jni/video_frame.h" + +namespace webrtc { +namespace jni { + +namespace { +// RTP timestamps are 90 kHz. +const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec; + +template <typename Dst, typename Src> +inline absl::optional<Dst> cast_optional(const absl::optional<Src>& value) { + return value ? absl::optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value)) + : absl::nullopt; +} +} // namespace + +VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni, + const JavaRef<jobject>& decoder) + : decoder_(jni, decoder), + implementation_name_(JavaToStdString( + jni, + Java_VideoDecoder_getImplementationName(jni, decoder))), + initialized_(false), + qp_parsing_enabled_(true) // QP parsing starts enabled and we disable it + // if the decoder provides frames. + +{ + decoder_thread_checker_.Detach(); +} + +VideoDecoderWrapper::~VideoDecoderWrapper() = default; + +bool VideoDecoderWrapper::Configure(const Settings& settings) { + RTC_DCHECK_RUN_ON(&decoder_thread_checker_); + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + decoder_settings_ = settings; + return ConfigureInternal(jni); +} + +bool VideoDecoderWrapper::ConfigureInternal(JNIEnv* jni) { + RenderResolution resolution = decoder_settings_.max_render_resolution(); + ScopedJavaLocalRef<jobject> settings = + Java_Settings_Constructor(jni, decoder_settings_.number_of_cores(), + resolution.Width(), resolution.Height()); + + ScopedJavaLocalRef<jobject> callback = + Java_VideoDecoderWrapper_createDecoderCallback(jni, + jlongFromPointer(this)); + + int32_t status = JavaToNativeVideoCodecStatus( + jni, Java_VideoDecoder_initDecode(jni, decoder_, settings, callback)); + RTC_LOG(LS_INFO) << "initDecode: " << status; + if (status == WEBRTC_VIDEO_CODEC_OK) { + initialized_ = true; + } + + // The decoder was reinitialized so re-enable the QP parsing in case it stops + // providing QP values. + qp_parsing_enabled_ = true; + + return status == WEBRTC_VIDEO_CODEC_OK; +} + +int32_t VideoDecoderWrapper::Decode( + const EncodedImage& image_param, + bool missing_frames, + int64_t render_time_ms) { + RTC_DCHECK_RUN_ON(&decoder_thread_checker_); + if (!initialized_) { + // Most likely initializing the codec failed. + return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; + } + + // Make a mutable copy so we can modify the timestamp. + EncodedImage input_image(image_param); + // We use RTP timestamp for capture time because capture_time_ms_ is always 0. + input_image.capture_time_ms_ = + input_image.Timestamp() / kNumRtpTicksPerMillisec; + + FrameExtraInfo frame_extra_info; + frame_extra_info.timestamp_ns = + input_image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec; + frame_extra_info.timestamp_rtp = input_image.Timestamp(); + frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_; + frame_extra_info.qp = + qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt; + { + MutexLock lock(&frame_extra_infos_lock_); + frame_extra_infos_.push_back(frame_extra_info); + } + + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> jinput_image = + NativeToJavaEncodedImage(env, input_image); + ScopedJavaLocalRef<jobject> decode_info; + ScopedJavaLocalRef<jobject> ret = + Java_VideoDecoder_decode(env, decoder_, jinput_image, decode_info); + return HandleReturnCode(env, ret, "decode"); +} + +int32_t VideoDecoderWrapper::RegisterDecodeCompleteCallback( + DecodedImageCallback* callback) { + RTC_DCHECK_RUNS_SERIALIZED(&callback_race_checker_); + callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t VideoDecoderWrapper::Release() { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + int32_t status = JavaToNativeVideoCodecStatus( + jni, Java_VideoDecoder_release(jni, decoder_)); + RTC_LOG(LS_INFO) << "release: " << status; + { + MutexLock lock(&frame_extra_infos_lock_); + frame_extra_infos_.clear(); + } + initialized_ = false; + // It is allowed to reinitialize the codec on a different thread. + decoder_thread_checker_.Detach(); + return status; +} + +const char* VideoDecoderWrapper::ImplementationName() const { + return implementation_name_.c_str(); +} + +void VideoDecoderWrapper::OnDecodedFrame( + JNIEnv* env, + const JavaRef<jobject>& j_frame, + const JavaRef<jobject>& j_decode_time_ms, + const JavaRef<jobject>& j_qp) { + RTC_DCHECK_RUNS_SERIALIZED(&callback_race_checker_); + const int64_t timestamp_ns = GetJavaVideoFrameTimestampNs(env, j_frame); + + FrameExtraInfo frame_extra_info; + { + MutexLock lock(&frame_extra_infos_lock_); + + do { + if (frame_extra_infos_.empty()) { + RTC_LOG(LS_WARNING) + << "Java decoder produced an unexpected frame: " << timestamp_ns; + return; + } + + frame_extra_info = frame_extra_infos_.front(); + frame_extra_infos_.pop_front(); + // If the decoder might drop frames so iterate through the queue until we + // find a matching timestamp. + } while (frame_extra_info.timestamp_ns != timestamp_ns); + } + + VideoFrame frame = + JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp); + frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp); + + absl::optional<int32_t> decoding_time_ms = + JavaToNativeOptionalInt(env, j_decode_time_ms); + + absl::optional<uint8_t> decoder_qp = + cast_optional<uint8_t, int32_t>(JavaToNativeOptionalInt(env, j_qp)); + // If the decoder provides QP values itself, no need to parse the bitstream. + // Enable QP parsing if decoder does not provide QP values itself. + qp_parsing_enabled_ = !decoder_qp.has_value(); + callback_->Decoded(frame, decoding_time_ms, + decoder_qp ? decoder_qp : frame_extra_info.qp); +} + +VideoDecoderWrapper::FrameExtraInfo::FrameExtraInfo() = default; +VideoDecoderWrapper::FrameExtraInfo::FrameExtraInfo(const FrameExtraInfo&) = + default; +VideoDecoderWrapper::FrameExtraInfo::~FrameExtraInfo() = default; + +int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni, + const JavaRef<jobject>& j_value, + const char* method_name) { + int32_t value = JavaToNativeVideoCodecStatus(jni, j_value); + if (value >= 0) { // OK or NO_OUTPUT + return value; + } + + RTC_LOG(LS_WARNING) << method_name << ": " << value; + if (value == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE || + value == WEBRTC_VIDEO_CODEC_UNINITIALIZED) { // Critical error. + RTC_LOG(LS_WARNING) << "Java decoder requested software fallback."; + return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; + } + + // Try resetting the codec. + if (Release() == WEBRTC_VIDEO_CODEC_OK && ConfigureInternal(jni)) { + RTC_LOG(LS_WARNING) << "Reset Java decoder."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + RTC_LOG(LS_WARNING) << "Unable to reset Java decoder."; + return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; +} + +absl::optional<uint8_t> VideoDecoderWrapper::ParseQP( + const EncodedImage& input_image) { + if (input_image.qp_ != -1) { + return input_image.qp_; + } + + absl::optional<uint8_t> qp; + switch (decoder_settings_.codec_type()) { + case kVideoCodecVP8: { + int qp_int; + if (vp8::GetQp(input_image.data(), input_image.size(), &qp_int)) { + qp = qp_int; + } + break; + } + case kVideoCodecVP9: { + int qp_int; + if (vp9::GetQp(input_image.data(), input_image.size(), &qp_int)) { + qp = qp_int; + } + break; + } + case kVideoCodecH264: { + h264_bitstream_parser_.ParseBitstream(input_image); + qp = h264_bitstream_parser_.GetLastSliceQp(); + break; + } + default: + break; // Default is to not provide QP. + } + return qp; +} + +std::unique_ptr<VideoDecoder> JavaToNativeVideoDecoder( + JNIEnv* jni, + const JavaRef<jobject>& j_decoder) { + const jlong native_decoder = + Java_VideoDecoder_createNativeVideoDecoder(jni, j_decoder); + VideoDecoder* decoder; + if (native_decoder == 0) { + decoder = new VideoDecoderWrapper(jni, j_decoder); + } else { + decoder = reinterpret_cast<VideoDecoder*>(native_decoder); + } + return std::unique_ptr<VideoDecoder>(decoder); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h new file mode 100644 index 0000000000..49d0fbf048 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h @@ -0,0 +1,117 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_ +#define SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_ + +#include <jni.h> + +#include <atomic> +#include <deque> + +#include "api/sequence_checker.h" +#include "api/video_codecs/video_decoder.h" +#include "common_video/h264/h264_bitstream_parser.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// Wraps a Java decoder and delegates all calls to it. +class VideoDecoderWrapper : public VideoDecoder { + public: + VideoDecoderWrapper(JNIEnv* jni, const JavaRef<jobject>& decoder); + ~VideoDecoderWrapper() override; + + bool Configure(const Settings& settings) override; + + int32_t Decode(const EncodedImage& input_image, + bool missing_frames, + int64_t render_time_ms) override; + + int32_t RegisterDecodeCompleteCallback( + DecodedImageCallback* callback) override; + + // TODO(sakal): This is not always called on the correct thread. It is called + // from VCMGenericDecoder destructor which is on a different thread but is + // still safe and synchronous. + int32_t Release() override RTC_NO_THREAD_SAFETY_ANALYSIS; + + const char* ImplementationName() const override; + + // Wraps the frame to a AndroidVideoBuffer and passes it to the callback. + void OnDecodedFrame(JNIEnv* env, + const JavaRef<jobject>& j_frame, + const JavaRef<jobject>& j_decode_time_ms, + const JavaRef<jobject>& j_qp); + + private: + struct FrameExtraInfo { + int64_t timestamp_ns; // Used as an identifier of the frame. + + uint32_t timestamp_rtp; + int64_t timestamp_ntp; + absl::optional<uint8_t> qp; + + FrameExtraInfo(); + FrameExtraInfo(const FrameExtraInfo&); + ~FrameExtraInfo(); + }; + + bool ConfigureInternal(JNIEnv* jni) RTC_RUN_ON(decoder_thread_checker_); + + // Takes Java VideoCodecStatus, handles it and returns WEBRTC_VIDEO_CODEC_* + // status code. + int32_t HandleReturnCode(JNIEnv* jni, + const JavaRef<jobject>& j_value, + const char* method_name) + RTC_RUN_ON(decoder_thread_checker_); + + absl::optional<uint8_t> ParseQP(const EncodedImage& input_image) + RTC_RUN_ON(decoder_thread_checker_); + + const ScopedJavaGlobalRef<jobject> decoder_; + const std::string implementation_name_; + + SequenceChecker decoder_thread_checker_; + // Callbacks must be executed sequentially on an arbitrary thread. We do not + // own this thread so a thread checker cannot be used. + rtc::RaceChecker callback_race_checker_; + + // Initialized on Configure and immutable after that. + VideoDecoder::Settings decoder_settings_ + RTC_GUARDED_BY(decoder_thread_checker_); + + bool initialized_ RTC_GUARDED_BY(decoder_thread_checker_); + H264BitstreamParser h264_bitstream_parser_ + RTC_GUARDED_BY(decoder_thread_checker_); + + DecodedImageCallback* callback_ RTC_GUARDED_BY(callback_race_checker_); + + // Accessed both on the decoder thread and the callback thread. + std::atomic<bool> qp_parsing_enabled_; + Mutex frame_extra_infos_lock_; + std::deque<FrameExtraInfo> frame_extra_infos_ + RTC_GUARDED_BY(frame_extra_infos_lock_); +}; + +/* If the j_decoder is a wrapped native decoder, unwrap it. If it is not, + * wrap it in a VideoDecoderWrapper. + */ +std::unique_ptr<VideoDecoder> JavaToNativeVideoDecoder( + JNIEnv* jni, + const JavaRef<jobject>& j_decoder); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc new file mode 100644 index 0000000000..7df129b360 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc @@ -0,0 +1,130 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/video_encoder_factory_wrapper.h" + +#include "api/video/render_resolution.h" +#include "api/video_codecs/video_encoder.h" +#include "rtc_base/logging.h" +#include "sdk/android/generated_video_jni/VideoEncoderFactory_jni.h" +#include "sdk/android/native_api/jni/class_loader.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/video_codec_info.h" +#include "sdk/android/src/jni/video_encoder_wrapper.h" + +namespace webrtc { +namespace jni { +namespace { +class VideoEncoderSelectorWrapper + : public VideoEncoderFactory::EncoderSelectorInterface { + public: + VideoEncoderSelectorWrapper(JNIEnv* jni, + const JavaRef<jobject>& encoder_selector) + : encoder_selector_(jni, encoder_selector) {} + + void OnCurrentEncoder(const SdpVideoFormat& format) override { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> j_codec_info = + SdpVideoFormatToVideoCodecInfo(jni, format); + Java_VideoEncoderSelector_onCurrentEncoder(jni, encoder_selector_, + j_codec_info); + } + + absl::optional<SdpVideoFormat> OnAvailableBitrate( + const DataRate& rate) override { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> codec_info = + Java_VideoEncoderSelector_onAvailableBitrate(jni, encoder_selector_, + rate.kbps<int>()); + if (codec_info.is_null()) { + return absl::nullopt; + } + return VideoCodecInfoToSdpVideoFormat(jni, codec_info); + } + + absl::optional<SdpVideoFormat> OnResolutionChange( + const RenderResolution& resolution) override { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> codec_info = + Java_VideoEncoderSelector_onResolutionChange( + jni, encoder_selector_, resolution.Width(), resolution.Height()); + if (codec_info.is_null()) { + return absl::nullopt; + } + return VideoCodecInfoToSdpVideoFormat(jni, codec_info); + } + + absl::optional<SdpVideoFormat> OnEncoderBroken() override { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> codec_info = + Java_VideoEncoderSelector_onEncoderBroken(jni, encoder_selector_); + if (codec_info.is_null()) { + return absl::nullopt; + } + return VideoCodecInfoToSdpVideoFormat(jni, codec_info); + } + + private: + const ScopedJavaGlobalRef<jobject> encoder_selector_; +}; + +} // namespace + +VideoEncoderFactoryWrapper::VideoEncoderFactoryWrapper( + JNIEnv* jni, + const JavaRef<jobject>& encoder_factory) + : encoder_factory_(jni, encoder_factory) { + const ScopedJavaLocalRef<jobjectArray> j_supported_codecs = + Java_VideoEncoderFactory_getSupportedCodecs(jni, encoder_factory); + supported_formats_ = JavaToNativeVector<SdpVideoFormat>( + jni, j_supported_codecs, &VideoCodecInfoToSdpVideoFormat); + const ScopedJavaLocalRef<jobjectArray> j_implementations = + Java_VideoEncoderFactory_getImplementations(jni, encoder_factory); + implementations_ = JavaToNativeVector<SdpVideoFormat>( + jni, j_implementations, &VideoCodecInfoToSdpVideoFormat); +} +VideoEncoderFactoryWrapper::~VideoEncoderFactoryWrapper() = default; + +std::unique_ptr<VideoEncoder> VideoEncoderFactoryWrapper::CreateVideoEncoder( + const SdpVideoFormat& format) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> j_codec_info = + SdpVideoFormatToVideoCodecInfo(jni, format); + ScopedJavaLocalRef<jobject> encoder = Java_VideoEncoderFactory_createEncoder( + jni, encoder_factory_, j_codec_info); + if (!encoder.obj()) + return nullptr; + return JavaToNativeVideoEncoder(jni, encoder); +} + +std::vector<SdpVideoFormat> VideoEncoderFactoryWrapper::GetSupportedFormats() + const { + return supported_formats_; +} + +std::vector<SdpVideoFormat> VideoEncoderFactoryWrapper::GetImplementations() + const { + return implementations_; +} + +std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface> +VideoEncoderFactoryWrapper::GetEncoderSelector() const { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> selector = + Java_VideoEncoderFactory_getEncoderSelector(jni, encoder_factory_); + if (selector.is_null()) { + return nullptr; + } + + return std::make_unique<VideoEncoderSelectorWrapper>(jni, selector); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h new file mode 100644 index 0000000000..2be6b1b33f --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h @@ -0,0 +1,51 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_ +#define SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_ + +#include <jni.h> +#include <vector> + +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// Wrapper for Java VideoEncoderFactory class. Delegates method calls through +// JNI and wraps the encoder inside VideoEncoderWrapper. +class VideoEncoderFactoryWrapper : public VideoEncoderFactory { + public: + VideoEncoderFactoryWrapper(JNIEnv* jni, + const JavaRef<jobject>& encoder_factory); + ~VideoEncoderFactoryWrapper() override; + + std::unique_ptr<VideoEncoder> CreateVideoEncoder( + const SdpVideoFormat& format) override; + + // Returns a list of supported codecs in order of preference. + std::vector<SdpVideoFormat> GetSupportedFormats() const override; + + std::vector<SdpVideoFormat> GetImplementations() const override; + + std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override; + + private: + const ScopedJavaGlobalRef<jobject> encoder_factory_; + std::vector<SdpVideoFormat> supported_formats_; + std::vector<SdpVideoFormat> implementations_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc new file mode 100644 index 0000000000..d581572abf --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "api/video_codecs/video_encoder_software_fallback_wrapper.h" +#include "sdk/android/generated_video_jni/VideoEncoderFallback_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_encoder_wrapper.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_VideoEncoderFallback_CreateEncoder( + JNIEnv* jni, + const JavaParamRef<jobject>& j_fallback_encoder, + const JavaParamRef<jobject>& j_primary_encoder) { + std::unique_ptr<VideoEncoder> fallback_encoder = + JavaToNativeVideoEncoder(jni, j_fallback_encoder); + std::unique_ptr<VideoEncoder> primary_encoder = + JavaToNativeVideoEncoder(jni, j_primary_encoder); + + VideoEncoder* nativeWrapper = + CreateVideoEncoderSoftwareFallbackWrapper(std::move(fallback_encoder), + std::move(primary_encoder)) + .release(); + + return jlongFromPointer(nativeWrapper); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc new file mode 100644 index 0000000000..c23ab1e485 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc @@ -0,0 +1,490 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/video_encoder_wrapper.h" + +#include <utility> + +#include "common_video/h264/h264_common.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" +#include "modules/video_coding/utility/vp8_header_parser.h" +#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "rtc_base/logging.h" +#include "rtc_base/time_utils.h" +#include "sdk/android/generated_video_jni/VideoEncoderWrapper_jni.h" +#include "sdk/android/generated_video_jni/VideoEncoder_jni.h" +#include "sdk/android/native_api/jni/class_loader.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/encoded_image.h" +#include "sdk/android/src/jni/video_codec_status.h" +#include "sdk/android/src/jni/video_frame.h" + +namespace webrtc { +namespace jni { + +VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni, + const JavaRef<jobject>& j_encoder) + : encoder_(jni, j_encoder), int_array_class_(GetClass(jni, "[I")) { + initialized_ = false; + num_resets_ = 0; + + // Fetch and update encoder info. + UpdateEncoderInfo(jni); +} +VideoEncoderWrapper::~VideoEncoderWrapper() = default; + +int VideoEncoderWrapper::InitEncode(const VideoCodec* codec_settings, + const Settings& settings) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + + codec_settings_ = *codec_settings; + capabilities_ = settings.capabilities; + number_of_cores_ = settings.number_of_cores; + num_resets_ = 0; + + return InitEncodeInternal(jni); +} + +int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) { + bool automatic_resize_on; + switch (codec_settings_.codecType) { + case kVideoCodecVP8: + automatic_resize_on = codec_settings_.VP8()->automaticResizeOn; + break; + case kVideoCodecVP9: + automatic_resize_on = codec_settings_.VP9()->automaticResizeOn; + gof_.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1); + gof_idx_ = 0; + break; + default: + automatic_resize_on = true; + } + + RTC_DCHECK(capabilities_); + ScopedJavaLocalRef<jobject> capabilities = + Java_Capabilities_Constructor(jni, capabilities_->loss_notification); + + ScopedJavaLocalRef<jobject> settings = Java_Settings_Constructor( + jni, number_of_cores_, codec_settings_.width, codec_settings_.height, + static_cast<int>(codec_settings_.startBitrate), + static_cast<int>(codec_settings_.maxFramerate), + static_cast<int>(codec_settings_.numberOfSimulcastStreams), + automatic_resize_on, capabilities); + + ScopedJavaLocalRef<jobject> callback = + Java_VideoEncoderWrapper_createEncoderCallback(jni, + jlongFromPointer(this)); + + int32_t status = JavaToNativeVideoCodecStatus( + jni, Java_VideoEncoder_initEncode(jni, encoder_, settings, callback)); + RTC_LOG(LS_INFO) << "initEncode: " << status; + + // Some encoder's properties depend on settings and may change after + // initialization. + UpdateEncoderInfo(jni); + + if (status == WEBRTC_VIDEO_CODEC_OK) { + initialized_ = true; + } + return status; +} + +void VideoEncoderWrapper::UpdateEncoderInfo(JNIEnv* jni) { + encoder_info_.supports_native_handle = true; + + encoder_info_.implementation_name = JavaToStdString( + jni, Java_VideoEncoder_getImplementationName(jni, encoder_)); + + encoder_info_.is_hardware_accelerated = + Java_VideoEncoder_isHardwareEncoder(jni, encoder_); + + encoder_info_.scaling_settings = GetScalingSettingsInternal(jni); + + encoder_info_.resolution_bitrate_limits = JavaToNativeResolutionBitrateLimits( + jni, Java_VideoEncoder_getResolutionBitrateLimits(jni, encoder_)); + + EncoderInfo info = GetEncoderInfoInternal(jni); + encoder_info_.requested_resolution_alignment = + info.requested_resolution_alignment; + encoder_info_.apply_alignment_to_all_simulcast_layers = + info.apply_alignment_to_all_simulcast_layers; +} + +int32_t VideoEncoderWrapper::RegisterEncodeCompleteCallback( + EncodedImageCallback* callback) { + callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t VideoEncoderWrapper::Release() { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + + int32_t status = JavaToNativeVideoCodecStatus( + jni, Java_VideoEncoder_release(jni, encoder_)); + RTC_LOG(LS_INFO) << "release: " << status; + { + MutexLock lock(&frame_extra_infos_lock_); + frame_extra_infos_.clear(); + } + initialized_ = false; + + return status; +} + +int32_t VideoEncoderWrapper::Encode( + const VideoFrame& frame, + const std::vector<VideoFrameType>* frame_types) { + if (!initialized_) { + // Most likely initializing the codec failed. + return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; + } + + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + + // Construct encode info. + ScopedJavaLocalRef<jobjectArray> j_frame_types = + NativeToJavaFrameTypeArray(jni, *frame_types); + ScopedJavaLocalRef<jobject> encode_info = + Java_EncodeInfo_Constructor(jni, j_frame_types); + + FrameExtraInfo info; + info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec; + info.timestamp_rtp = frame.timestamp(); + { + MutexLock lock(&frame_extra_infos_lock_); + frame_extra_infos_.push_back(info); + } + + ScopedJavaLocalRef<jobject> j_frame = NativeToJavaVideoFrame(jni, frame); + ScopedJavaLocalRef<jobject> ret = + Java_VideoEncoder_encode(jni, encoder_, j_frame, encode_info); + ReleaseJavaVideoFrame(jni, j_frame); + return HandleReturnCode(jni, ret, "encode"); +} + +void VideoEncoderWrapper::SetRates(const RateControlParameters& rc_parameters) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + + ScopedJavaLocalRef<jobject> j_rc_parameters = + ToJavaRateControlParameters(jni, rc_parameters); + ScopedJavaLocalRef<jobject> ret = + Java_VideoEncoder_setRates(jni, encoder_, j_rc_parameters); + HandleReturnCode(jni, ret, "setRates"); +} + +VideoEncoder::EncoderInfo VideoEncoderWrapper::GetEncoderInfo() const { + return encoder_info_; +} + +VideoEncoderWrapper::ScalingSettings +VideoEncoderWrapper::GetScalingSettingsInternal(JNIEnv* jni) const { + ScopedJavaLocalRef<jobject> j_scaling_settings = + Java_VideoEncoder_getScalingSettings(jni, encoder_); + bool isOn = + Java_VideoEncoderWrapper_getScalingSettingsOn(jni, j_scaling_settings); + + if (!isOn) + return ScalingSettings::kOff; + + absl::optional<int> low = JavaToNativeOptionalInt( + jni, + Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings)); + absl::optional<int> high = JavaToNativeOptionalInt( + jni, + Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings)); + + if (low && high) + return ScalingSettings(*low, *high); + + switch (codec_settings_.codecType) { + case kVideoCodecVP8: { + // Same as in vp8_impl.cc. + static const int kLowVp8QpThreshold = 29; + static const int kHighVp8QpThreshold = 95; + return ScalingSettings(low.value_or(kLowVp8QpThreshold), + high.value_or(kHighVp8QpThreshold)); + } + case kVideoCodecVP9: { + // QP is obtained from VP9-bitstream, so the QP corresponds to the + // bitstream range of [0, 255] and not the user-level range of [0,63]. + static const int kLowVp9QpThreshold = 96; + static const int kHighVp9QpThreshold = 185; + + return VideoEncoder::ScalingSettings(kLowVp9QpThreshold, + kHighVp9QpThreshold); + } + case kVideoCodecH264: { + // Same as in h264_encoder_impl.cc. + static const int kLowH264QpThreshold = 24; + static const int kHighH264QpThreshold = 37; + return ScalingSettings(low.value_or(kLowH264QpThreshold), + high.value_or(kHighH264QpThreshold)); + } + default: + return ScalingSettings::kOff; + } +} + +VideoEncoder::EncoderInfo VideoEncoderWrapper::GetEncoderInfoInternal( + JNIEnv* jni) const { + ScopedJavaLocalRef<jobject> j_encoder_info = + Java_VideoEncoder_getEncoderInfo(jni, encoder_); + + jint requested_resolution_alignment = + Java_EncoderInfo_getRequestedResolutionAlignment(jni, j_encoder_info); + + jboolean apply_alignment_to_all_simulcast_layers = + Java_EncoderInfo_getApplyAlignmentToAllSimulcastLayers(jni, + j_encoder_info); + + VideoEncoder::EncoderInfo info; + info.requested_resolution_alignment = requested_resolution_alignment; + info.apply_alignment_to_all_simulcast_layers = + apply_alignment_to_all_simulcast_layers; + + return info; +} + +void VideoEncoderWrapper::OnEncodedFrame( + JNIEnv* jni, + const JavaRef<jobject>& j_encoded_image) { + EncodedImage frame = JavaToNativeEncodedImage(jni, j_encoded_image); + int64_t capture_time_ns = + GetJavaEncodedImageCaptureTimeNs(jni, j_encoded_image); + + // Encoded frames are delivered in the order received, but some of them + // may be dropped, so remove records of frames older than the current + // one. + // + // NOTE: if the current frame is associated with Encoder A, in the time + // since this frame was received, Encoder A could have been + // Release()'ed, Encoder B InitEncode()'ed (due to reuse of Encoder A), + // and frames received by Encoder B. Thus there may be frame_extra_infos + // entries that don't belong to us, and we need to be careful not to + // remove them. Removing only those entries older than the current frame + // provides this guarantee. + FrameExtraInfo frame_extra_info; + { + MutexLock lock(&frame_extra_infos_lock_); + while (!frame_extra_infos_.empty() && + frame_extra_infos_.front().capture_time_ns < capture_time_ns) { + frame_extra_infos_.pop_front(); + } + if (frame_extra_infos_.empty() || + frame_extra_infos_.front().capture_time_ns != capture_time_ns) { + RTC_LOG(LS_WARNING) + << "Java encoder produced an unexpected frame with timestamp: " + << capture_time_ns; + return; + } + frame_extra_info = frame_extra_infos_.front(); + frame_extra_infos_.pop_front(); + } + + // This is a bit subtle. The `frame` variable from the lambda capture is + // const. Which implies that (i) we need to make a copy to be able to + // write to the metadata, and (ii) we should avoid using the .data() + // method (including implicit conversion to ArrayView) on the non-const + // copy, since that would trigget a copy operation on the underlying + // CopyOnWriteBuffer. + EncodedImage frame_copy = frame; + + frame_copy.SetTimestamp(frame_extra_info.timestamp_rtp); + frame_copy.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec; + + if (frame_copy.qp_ < 0) + frame_copy.qp_ = ParseQp(frame); + + CodecSpecificInfo info(ParseCodecSpecificInfo(frame)); + + callback_->OnEncodedImage(frame_copy, &info); +} + +int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni, + const JavaRef<jobject>& j_value, + const char* method_name) { + int32_t value = JavaToNativeVideoCodecStatus(jni, j_value); + if (value >= 0) { // OK or NO_OUTPUT + return value; + } + + RTC_LOG(LS_WARNING) << method_name << ": " << value; + if (value == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE || + value == WEBRTC_VIDEO_CODEC_UNINITIALIZED) { // Critical error. + RTC_LOG(LS_WARNING) << "Java encoder requested software fallback."; + return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; + } + + // Try resetting the codec. + if (Release() == WEBRTC_VIDEO_CODEC_OK && + InitEncodeInternal(jni) == WEBRTC_VIDEO_CODEC_OK) { + RTC_LOG(LS_WARNING) << "Reset Java encoder."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + RTC_LOG(LS_WARNING) << "Unable to reset Java encoder."; + return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; +} + +int VideoEncoderWrapper::ParseQp(rtc::ArrayView<const uint8_t> buffer) { + int qp; + bool success; + switch (codec_settings_.codecType) { + case kVideoCodecVP8: + success = vp8::GetQp(buffer.data(), buffer.size(), &qp); + break; + case kVideoCodecVP9: + success = vp9::GetQp(buffer.data(), buffer.size(), &qp); + break; + case kVideoCodecH264: + h264_bitstream_parser_.ParseBitstream(buffer); + qp = h264_bitstream_parser_.GetLastSliceQp().value_or(-1); + success = (qp >= 0); + break; + default: // Default is to not provide QP. + success = false; + break; + } + return success ? qp : -1; // -1 means unknown QP. +} + +CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo( + const EncodedImage& frame) { + const bool key_frame = frame._frameType == VideoFrameType::kVideoFrameKey; + + CodecSpecificInfo info; + // For stream with scalability, NextFrameConfig should be called before + // encoding and used to configure encoder, then passed here e.g. via + // FrameExtraInfo structure. But while this encoder wrapper uses only trivial + // scalability, NextFrameConfig can be called here. + auto layer_frames = svc_controller_.NextFrameConfig(/*reset=*/key_frame); + RTC_DCHECK_EQ(layer_frames.size(), 1); + info.generic_frame_info = svc_controller_.OnEncodeDone(layer_frames[0]); + if (key_frame) { + info.template_structure = svc_controller_.DependencyStructure(); + info.template_structure->resolutions = { + RenderResolution(frame._encodedWidth, frame._encodedHeight)}; + } + + info.codecType = codec_settings_.codecType; + + switch (codec_settings_.codecType) { + case kVideoCodecVP8: + info.codecSpecific.VP8.nonReference = false; + info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx; + info.codecSpecific.VP8.layerSync = false; + info.codecSpecific.VP8.keyIdx = kNoKeyIdx; + break; + case kVideoCodecVP9: + if (key_frame) { + gof_idx_ = 0; + } + info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true; + info.codecSpecific.VP9.flexible_mode = false; + info.codecSpecific.VP9.ss_data_available = key_frame ? true : false; + info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx; + info.codecSpecific.VP9.temporal_up_switch = true; + info.codecSpecific.VP9.inter_layer_predicted = false; + info.codecSpecific.VP9.gof_idx = + static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof); + info.codecSpecific.VP9.num_spatial_layers = 1; + info.codecSpecific.VP9.first_frame_in_picture = true; + info.codecSpecific.VP9.spatial_layer_resolution_present = false; + if (info.codecSpecific.VP9.ss_data_available) { + info.codecSpecific.VP9.spatial_layer_resolution_present = true; + info.codecSpecific.VP9.width[0] = frame._encodedWidth; + info.codecSpecific.VP9.height[0] = frame._encodedHeight; + info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_); + } + break; + default: + break; + } + + return info; +} + +ScopedJavaLocalRef<jobject> VideoEncoderWrapper::ToJavaBitrateAllocation( + JNIEnv* jni, + const VideoBitrateAllocation& allocation) { + ScopedJavaLocalRef<jobjectArray> j_allocation_array( + jni, jni->NewObjectArray(kMaxSpatialLayers, int_array_class_.obj(), + nullptr /* initial */)); + for (int spatial_i = 0; spatial_i < kMaxSpatialLayers; ++spatial_i) { + std::array<int32_t, kMaxTemporalStreams> spatial_layer; + for (int temporal_i = 0; temporal_i < kMaxTemporalStreams; ++temporal_i) { + spatial_layer[temporal_i] = allocation.GetBitrate(spatial_i, temporal_i); + } + + ScopedJavaLocalRef<jintArray> j_array_spatial_layer = + NativeToJavaIntArray(jni, spatial_layer); + jni->SetObjectArrayElement(j_allocation_array.obj(), spatial_i, + j_array_spatial_layer.obj()); + } + return Java_BitrateAllocation_Constructor(jni, j_allocation_array); +} + +ScopedJavaLocalRef<jobject> VideoEncoderWrapper::ToJavaRateControlParameters( + JNIEnv* jni, + const VideoEncoder::RateControlParameters& rc_parameters) { + ScopedJavaLocalRef<jobject> j_bitrate_allocation = + ToJavaBitrateAllocation(jni, rc_parameters.bitrate); + + return Java_RateControlParameters_Constructor(jni, j_bitrate_allocation, + rc_parameters.framerate_fps); +} + +std::unique_ptr<VideoEncoder> JavaToNativeVideoEncoder( + JNIEnv* jni, + const JavaRef<jobject>& j_encoder) { + const jlong native_encoder = + Java_VideoEncoder_createNativeVideoEncoder(jni, j_encoder); + VideoEncoder* encoder; + if (native_encoder == 0) { + encoder = new VideoEncoderWrapper(jni, j_encoder); + } else { + encoder = reinterpret_cast<VideoEncoder*>(native_encoder); + } + return std::unique_ptr<VideoEncoder>(encoder); +} + +std::vector<VideoEncoder::ResolutionBitrateLimits> +JavaToNativeResolutionBitrateLimits( + JNIEnv* jni, + const JavaRef<jobjectArray>& j_bitrate_limits_array) { + std::vector<VideoEncoder::ResolutionBitrateLimits> resolution_bitrate_limits; + + const jsize array_length = jni->GetArrayLength(j_bitrate_limits_array.obj()); + for (int i = 0; i < array_length; ++i) { + ScopedJavaLocalRef<jobject> j_bitrate_limits = ScopedJavaLocalRef<jobject>( + jni, jni->GetObjectArrayElement(j_bitrate_limits_array.obj(), i)); + + jint frame_size_pixels = + Java_ResolutionBitrateLimits_getFrameSizePixels(jni, j_bitrate_limits); + jint min_start_bitrate_bps = + Java_ResolutionBitrateLimits_getMinStartBitrateBps(jni, + j_bitrate_limits); + jint min_bitrate_bps = + Java_ResolutionBitrateLimits_getMinBitrateBps(jni, j_bitrate_limits); + jint max_bitrate_bps = + Java_ResolutionBitrateLimits_getMaxBitrateBps(jni, j_bitrate_limits); + + resolution_bitrate_limits.push_back(VideoEncoder::ResolutionBitrateLimits( + frame_size_pixels, min_start_bitrate_bps, min_bitrate_bps, + max_bitrate_bps)); + } + + return resolution_bitrate_limits; +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h new file mode 100644 index 0000000000..5c5aab7588 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h @@ -0,0 +1,133 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_ +#define SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_ + +#include <jni.h> + +#include <deque> +#include <memory> +#include <string> +#include <vector> + +#include "absl/types/optional.h" +#include "api/video_codecs/video_encoder.h" +#include "common_video/h264/h264_bitstream_parser.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" +#include "rtc_base/synchronization/mutex.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// Wraps a Java encoder and delegates all calls to it. +class VideoEncoderWrapper : public VideoEncoder { + public: + VideoEncoderWrapper(JNIEnv* jni, const JavaRef<jobject>& j_encoder); + ~VideoEncoderWrapper() override; + + int32_t InitEncode(const VideoCodec* codec_settings, + const Settings& settings) override; + + int32_t RegisterEncodeCompleteCallback( + EncodedImageCallback* callback) override; + + int32_t Release() override; + + int32_t Encode(const VideoFrame& frame, + const std::vector<VideoFrameType>* frame_types) override; + + void SetRates(const RateControlParameters& rc_parameters) override; + + EncoderInfo GetEncoderInfo() const override; + + // Should only be called by JNI. + void OnEncodedFrame(JNIEnv* jni, + const JavaRef<jobject>& j_encoded_image); + + private: + struct FrameExtraInfo { + int64_t capture_time_ns; // Used as an identifier of the frame. + + uint32_t timestamp_rtp; + }; + + int32_t InitEncodeInternal(JNIEnv* jni); + + // Takes Java VideoCodecStatus, handles it and returns WEBRTC_VIDEO_CODEC_* + // status code. + int32_t HandleReturnCode(JNIEnv* jni, + const JavaRef<jobject>& j_value, + const char* method_name); + + int ParseQp(rtc::ArrayView<const uint8_t> buffer); + + CodecSpecificInfo ParseCodecSpecificInfo(const EncodedImage& frame); + + ScopedJavaLocalRef<jobject> ToJavaBitrateAllocation( + JNIEnv* jni, + const VideoBitrateAllocation& allocation); + + ScopedJavaLocalRef<jobject> ToJavaRateControlParameters( + JNIEnv* jni, + const VideoEncoder::RateControlParameters& rc_parameters); + + void UpdateEncoderInfo(JNIEnv* jni); + + ScalingSettings GetScalingSettingsInternal(JNIEnv* jni) const; + std::vector<ResolutionBitrateLimits> GetResolutionBitrateLimits( + JNIEnv* jni) const; + + VideoEncoder::EncoderInfo GetEncoderInfoInternal(JNIEnv* jni) const; + + const ScopedJavaGlobalRef<jobject> encoder_; + const ScopedJavaGlobalRef<jclass> int_array_class_; + + // Modified both on the encoder thread and the callback thread. + Mutex frame_extra_infos_lock_; + std::deque<FrameExtraInfo> frame_extra_infos_ + RTC_GUARDED_BY(frame_extra_infos_lock_); + EncodedImageCallback* callback_; + bool initialized_; + int num_resets_; + absl::optional<VideoEncoder::Capabilities> capabilities_; + int number_of_cores_; + VideoCodec codec_settings_; + EncoderInfo encoder_info_; + H264BitstreamParser h264_bitstream_parser_; + + // Fills frame dependencies in codec-agnostic format. + ScalableVideoControllerNoLayering svc_controller_; + // VP9 variables to populate codec specific structure. + GofInfoVP9 gof_; // Contains each frame's temporal information for + // non-flexible VP9 mode. + size_t gof_idx_; +}; + +/* If the j_encoder is a wrapped native encoder, unwrap it. If it is not, + * wrap it in a VideoEncoderWrapper. + */ +std::unique_ptr<VideoEncoder> JavaToNativeVideoEncoder( + JNIEnv* jni, + const JavaRef<jobject>& j_encoder); + +bool IsHardwareVideoEncoder(JNIEnv* jni, const JavaRef<jobject>& j_encoder); + +std::vector<VideoEncoder::ResolutionBitrateLimits> +JavaToNativeResolutionBitrateLimits( + JNIEnv* jni, + const JavaRef<jobjectArray>& j_bitrate_limits_array); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc b/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc new file mode 100644 index 0000000000..121b34fa94 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc @@ -0,0 +1,319 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/video_frame.h" + +#include "api/scoped_refptr.h" +#include "common_video/include/video_frame_buffer.h" +#include "rtc_base/time_utils.h" +#include "sdk/android/generated_video_jni/VideoFrame_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/wrapped_native_i420_buffer.h" + +namespace webrtc { +namespace jni { + +namespace { + +class AndroidVideoBuffer : public VideoFrameBuffer { + public: + // Creates a native VideoFrameBuffer from a Java VideoFrame.Buffer. + static rtc::scoped_refptr<AndroidVideoBuffer> Create( + JNIEnv* jni, + const JavaRef<jobject>& j_video_frame_buffer); + + // Similar to the Create() above, but adopts and takes ownership of the Java + // VideoFrame.Buffer. I.e. retain() will not be called, but release() will be + // called when the returned AndroidVideoBuffer is destroyed. + static rtc::scoped_refptr<AndroidVideoBuffer> Adopt( + JNIEnv* jni, + const JavaRef<jobject>& j_video_frame_buffer); + + ~AndroidVideoBuffer() override; + + const ScopedJavaGlobalRef<jobject>& video_frame_buffer() const; + + // Crops a region defined by `crop_x`, `crop_y`, `crop_width` and + // `crop_height`. Scales it to size `scale_width` x `scale_height`. + rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int crop_x, + int crop_y, + int crop_width, + int crop_height, + int scale_width, + int scale_height) override; + + protected: + // Should not be called directly. Adopts the Java VideoFrame.Buffer. Use + // Create() or Adopt() instead for clarity. + AndroidVideoBuffer(JNIEnv* jni, const JavaRef<jobject>& j_video_frame_buffer); + + private: + Type type() const override; + int width() const override; + int height() const override; + + rtc::scoped_refptr<I420BufferInterface> ToI420() override; + + const int width_; + const int height_; + // Holds a VideoFrame.Buffer. + const ScopedJavaGlobalRef<jobject> j_video_frame_buffer_; +}; + +class AndroidVideoI420Buffer : public I420BufferInterface { + public: + // Creates a native VideoFrameBuffer from a Java VideoFrame.I420Buffer. + static rtc::scoped_refptr<AndroidVideoI420Buffer> Create( + JNIEnv* jni, + int width, + int height, + const JavaRef<jobject>& j_video_frame_buffer); + + // Adopts and takes ownership of the Java VideoFrame.Buffer. I.e. retain() + // will not be called, but release() will be called when the returned + // AndroidVideoBuffer is destroyed. + static rtc::scoped_refptr<AndroidVideoI420Buffer> Adopt( + JNIEnv* jni, + int width, + int height, + const JavaRef<jobject>& j_video_frame_buffer); + + protected: + // Should not be called directly. Adopts the buffer. Use Adopt() instead for + // clarity. + AndroidVideoI420Buffer(JNIEnv* jni, + int width, + int height, + const JavaRef<jobject>& j_video_frame_buffer); + ~AndroidVideoI420Buffer() override; + + private: + const uint8_t* DataY() const override { return data_y_; } + const uint8_t* DataU() const override { return data_u_; } + const uint8_t* DataV() const override { return data_v_; } + + int StrideY() const override { return stride_y_; } + int StrideU() const override { return stride_u_; } + int StrideV() const override { return stride_v_; } + + int width() const override { return width_; } + int height() const override { return height_; } + + const int width_; + const int height_; + // Holds a VideoFrame.I420Buffer. + const ScopedJavaGlobalRef<jobject> j_video_frame_buffer_; + + const uint8_t* data_y_; + const uint8_t* data_u_; + const uint8_t* data_v_; + int stride_y_; + int stride_u_; + int stride_v_; +}; + +rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Create( + JNIEnv* jni, + int width, + int height, + const JavaRef<jobject>& j_video_frame_buffer) { + Java_Buffer_retain(jni, j_video_frame_buffer); + return AndroidVideoI420Buffer::Adopt(jni, width, height, + j_video_frame_buffer); +} + +rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Adopt( + JNIEnv* jni, + int width, + int height, + const JavaRef<jobject>& j_video_frame_buffer) { + RTC_DCHECK_EQ( + static_cast<Type>(Java_Buffer_getBufferType(jni, j_video_frame_buffer)), + Type::kI420); + return rtc::make_ref_counted<AndroidVideoI420Buffer>(jni, width, height, + j_video_frame_buffer); +} + +AndroidVideoI420Buffer::AndroidVideoI420Buffer( + JNIEnv* jni, + int width, + int height, + const JavaRef<jobject>& j_video_frame_buffer) + : width_(width), + height_(height), + j_video_frame_buffer_(jni, j_video_frame_buffer) { + ScopedJavaLocalRef<jobject> j_data_y = + Java_I420Buffer_getDataY(jni, j_video_frame_buffer); + ScopedJavaLocalRef<jobject> j_data_u = + Java_I420Buffer_getDataU(jni, j_video_frame_buffer); + ScopedJavaLocalRef<jobject> j_data_v = + Java_I420Buffer_getDataV(jni, j_video_frame_buffer); + + data_y_ = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_y.obj())); + data_u_ = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_u.obj())); + data_v_ = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_v.obj())); + + stride_y_ = Java_I420Buffer_getStrideY(jni, j_video_frame_buffer); + stride_u_ = Java_I420Buffer_getStrideU(jni, j_video_frame_buffer); + stride_v_ = Java_I420Buffer_getStrideV(jni, j_video_frame_buffer); +} + +AndroidVideoI420Buffer::~AndroidVideoI420Buffer() { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + Java_Buffer_release(jni, j_video_frame_buffer_); +} + +} // namespace + +int64_t GetJavaVideoFrameTimestampNs(JNIEnv* jni, + const JavaRef<jobject>& j_video_frame) { + return Java_VideoFrame_getTimestampNs(jni, j_video_frame); +} + +rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt( + JNIEnv* jni, + const JavaRef<jobject>& j_video_frame_buffer) { + RTC_DCHECK_EQ( + static_cast<Type>(Java_Buffer_getBufferType(jni, j_video_frame_buffer)), + Type::kNative); + return rtc::make_ref_counted<AndroidVideoBuffer>(jni, j_video_frame_buffer); +} + +rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Create( + JNIEnv* jni, + const JavaRef<jobject>& j_video_frame_buffer) { + Java_Buffer_retain(jni, j_video_frame_buffer); + return Adopt(jni, j_video_frame_buffer); +} + +AndroidVideoBuffer::AndroidVideoBuffer( + JNIEnv* jni, + const JavaRef<jobject>& j_video_frame_buffer) + : width_(Java_Buffer_getWidth(jni, j_video_frame_buffer)), + height_(Java_Buffer_getHeight(jni, j_video_frame_buffer)), + j_video_frame_buffer_(jni, j_video_frame_buffer) {} + +AndroidVideoBuffer::~AndroidVideoBuffer() { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + Java_Buffer_release(jni, j_video_frame_buffer_); +} + +const ScopedJavaGlobalRef<jobject>& AndroidVideoBuffer::video_frame_buffer() + const { + return j_video_frame_buffer_; +} + +rtc::scoped_refptr<VideoFrameBuffer> AndroidVideoBuffer::CropAndScale( + int crop_x, + int crop_y, + int crop_width, + int crop_height, + int scale_width, + int scale_height) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + return Adopt(jni, Java_Buffer_cropAndScale(jni, j_video_frame_buffer_, crop_x, + crop_y, crop_width, crop_height, + scale_width, scale_height)); +} + +VideoFrameBuffer::Type AndroidVideoBuffer::type() const { + return Type::kNative; +} + +int AndroidVideoBuffer::width() const { + return width_; +} + +int AndroidVideoBuffer::height() const { + return height_; +} + +rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> j_i420_buffer = + Java_Buffer_toI420(jni, j_video_frame_buffer_); + // In case I420 conversion fails, we propagate the nullptr. + if (j_i420_buffer.is_null()) { + return nullptr; + } + + // We don't need to retain the buffer because toI420 returns a new object that + // we are assumed to take the ownership of. + return AndroidVideoI420Buffer::Adopt(jni, width_, height_, j_i420_buffer); +} + +rtc::scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer( + JNIEnv* jni, + const JavaRef<jobject>& j_video_frame_buffer) { + VideoFrameBuffer::Type type = static_cast<VideoFrameBuffer::Type>( + Java_Buffer_getBufferType(jni, j_video_frame_buffer)); + switch (type) { + case VideoFrameBuffer::Type::kI420: { + const int width = Java_Buffer_getWidth(jni, j_video_frame_buffer); + const int height = Java_Buffer_getHeight(jni, j_video_frame_buffer); + return AndroidVideoI420Buffer::Create(jni, width, height, + j_video_frame_buffer); + } + case VideoFrameBuffer::Type::kNative: + return AndroidVideoBuffer::Create(jni, j_video_frame_buffer); + default: + RTC_CHECK_NOTREACHED(); + } +} + +VideoFrame JavaToNativeFrame(JNIEnv* jni, + const JavaRef<jobject>& j_video_frame, + uint32_t timestamp_rtp) { + ScopedJavaLocalRef<jobject> j_video_frame_buffer = + Java_VideoFrame_getBuffer(jni, j_video_frame); + int rotation = Java_VideoFrame_getRotation(jni, j_video_frame); + int64_t timestamp_ns = Java_VideoFrame_getTimestampNs(jni, j_video_frame); + rtc::scoped_refptr<VideoFrameBuffer> buffer = + JavaToNativeFrameBuffer(jni, j_video_frame_buffer); + return VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp) + .set_timestamp_ms(timestamp_ns / rtc::kNumNanosecsPerMillisec) + .set_rotation(static_cast<VideoRotation>(rotation)) + .build(); +} + +ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni, + const VideoFrame& frame) { + rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer(); + + if (buffer->type() == VideoFrameBuffer::Type::kNative) { + AndroidVideoBuffer* android_buffer = + static_cast<AndroidVideoBuffer*>(buffer.get()); + ScopedJavaLocalRef<jobject> j_video_frame_buffer( + jni, android_buffer->video_frame_buffer()); + Java_Buffer_retain(jni, j_video_frame_buffer); + return Java_VideoFrame_Constructor( + jni, j_video_frame_buffer, static_cast<jint>(frame.rotation()), + static_cast<jlong>(frame.timestamp_us() * + rtc::kNumNanosecsPerMicrosec)); + } else { + return Java_VideoFrame_Constructor( + jni, WrapI420Buffer(jni, buffer->ToI420()), + static_cast<jint>(frame.rotation()), + static_cast<jlong>(frame.timestamp_us() * + rtc::kNumNanosecsPerMicrosec)); + } +} + +void ReleaseJavaVideoFrame(JNIEnv* jni, const JavaRef<jobject>& j_video_frame) { + Java_VideoFrame_release(jni, j_video_frame); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_frame.h b/third_party/libwebrtc/sdk/android/src/jni/video_frame.h new file mode 100644 index 0000000000..9b916de40b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_frame.h @@ -0,0 +1,43 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_ +#define SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_ + +#include <jni.h> + +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +rtc::scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer( + JNIEnv* jni, + const JavaRef<jobject>& j_video_frame_buffer); + +VideoFrame JavaToNativeFrame(JNIEnv* jni, + const JavaRef<jobject>& j_video_frame, + uint32_t timestamp_rtp); + +// NOTE: Returns a new video frame that has to be released by calling +// ReleaseJavaVideoFrame. +ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni, + const VideoFrame& frame); +void ReleaseJavaVideoFrame(JNIEnv* jni, const JavaRef<jobject>& j_video_frame); + +int64_t GetJavaVideoFrameTimestampNs(JNIEnv* jni, + const JavaRef<jobject>& j_video_frame); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc new file mode 100644 index 0000000000..14321084d0 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc @@ -0,0 +1,32 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/video_sink.h" + +#include "sdk/android/generated_video_jni/VideoSink_jni.h" +#include "sdk/android/src/jni/video_frame.h" + +namespace webrtc { +namespace jni { + +VideoSinkWrapper::VideoSinkWrapper(JNIEnv* jni, const JavaRef<jobject>& j_sink) + : j_sink_(jni, j_sink) {} + +VideoSinkWrapper::~VideoSinkWrapper() {} + +void VideoSinkWrapper::OnFrame(const VideoFrame& frame) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef<jobject> j_frame = NativeToJavaVideoFrame(jni, frame); + Java_VideoSink_onFrame(jni, j_sink_, j_frame); + ReleaseJavaVideoFrame(jni, j_frame); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_sink.h b/third_party/libwebrtc/sdk/android/src/jni/video_sink.h new file mode 100644 index 0000000000..f16545434b --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_sink.h @@ -0,0 +1,36 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_ +#define SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_ + +#include <jni.h> + +#include "api/media_stream_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class VideoSinkWrapper : public rtc::VideoSinkInterface<VideoFrame> { + public: + VideoSinkWrapper(JNIEnv* jni, const JavaRef<jobject>& j_sink); + ~VideoSinkWrapper() override; + + private: + void OnFrame(const VideoFrame& frame) override; + + const ScopedJavaGlobalRef<jobject> j_sink_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_track.cc b/third_party/libwebrtc/sdk/android/src/jni/video_track.cc new file mode 100644 index 0000000000..70bedc12cf --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/video_track.cc @@ -0,0 +1,49 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "api/media_stream_interface.h" +#include "sdk/android/generated_video_jni/VideoTrack_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_sink.h" + +namespace webrtc { +namespace jni { + +static void JNI_VideoTrack_AddSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast<VideoTrackInterface*>(j_native_track) + ->AddOrUpdateSink( + reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>(j_native_sink), + rtc::VideoSinkWants()); +} + +static void JNI_VideoTrack_RemoveSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast<VideoTrackInterface*>(j_native_track) + ->RemoveSink(reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>( + j_native_sink)); +} + +static jlong JNI_VideoTrack_WrapSink(JNIEnv* jni, + const JavaParamRef<jobject>& sink) { + return jlongFromPointer(new VideoSinkWrapper(jni, sink)); +} + +static void JNI_VideoTrack_FreeSink(JNIEnv* jni, + jlong j_native_sink) { + delete reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>(j_native_sink); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc new file mode 100644 index 0000000000..8b34495dc2 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc @@ -0,0 +1,30 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "sdk/android/generated_libvpx_vp8_jni/LibvpxVp8Decoder_jni.h" +#include "sdk/android/generated_libvpx_vp8_jni/LibvpxVp8Encoder_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_LibvpxVp8Encoder_CreateEncoder(JNIEnv* jni) { + return jlongFromPointer(VP8Encoder::Create().release()); +} + +static jlong JNI_LibvpxVp8Decoder_CreateDecoder(JNIEnv* jni) { + return jlongFromPointer(VP8Decoder::Create().release()); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc new file mode 100644 index 0000000000..ad9ca793ce --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc @@ -0,0 +1,38 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Decoder_jni.h" +#include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Encoder_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_LibvpxVp9Encoder_CreateEncoder(JNIEnv* jni) { + return jlongFromPointer(VP9Encoder::Create().release()); +} + +static jboolean JNI_LibvpxVp9Encoder_IsSupported(JNIEnv* jni) { + return !SupportedVP9Codecs().empty(); +} + +static jlong JNI_LibvpxVp9Decoder_CreateDecoder(JNIEnv* jni) { + return jlongFromPointer(VP9Decoder::Create().release()); +} + +static jboolean JNI_LibvpxVp9Decoder_IsSupported(JNIEnv* jni) { + return !SupportedVP9Codecs().empty(); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc new file mode 100644 index 0000000000..f2c543e8c2 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/wrapped_native_i420_buffer.h" + +#include "sdk/android/generated_video_jni/WrappedNativeI420Buffer_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +// TODO(magjed): Write a test for this function. +ScopedJavaLocalRef<jobject> WrapI420Buffer( + JNIEnv* jni, + const rtc::scoped_refptr<I420BufferInterface>& i420_buffer) { + ScopedJavaLocalRef<jobject> y_buffer = + NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataY()), + i420_buffer->StrideY() * i420_buffer->height()); + ScopedJavaLocalRef<jobject> u_buffer = + NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataU()), + i420_buffer->StrideU() * i420_buffer->ChromaHeight()); + ScopedJavaLocalRef<jobject> v_buffer = + NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataV()), + i420_buffer->StrideV() * i420_buffer->ChromaHeight()); + + return Java_WrappedNativeI420Buffer_Constructor( + jni, i420_buffer->width(), i420_buffer->height(), y_buffer, + i420_buffer->StrideY(), u_buffer, i420_buffer->StrideU(), v_buffer, + i420_buffer->StrideV(), jlongFromPointer(i420_buffer.get())); +} + +} // namespace jni +} // namespace webrtc diff --git a/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h new file mode 100644 index 0000000000..70ad062cc6 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_ +#define SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_ + +#include <jni.h> + +#include "api/video/video_frame_buffer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +// This function wraps the C++ I420 buffer and returns a Java +// VideoFrame.I420Buffer as a jobject. +ScopedJavaLocalRef<jobject> WrapI420Buffer( + JNIEnv* jni, + const rtc::scoped_refptr<I420BufferInterface>& i420_buffer); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_ diff --git a/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc b/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc new file mode 100644 index 0000000000..e812bc9527 --- /dev/null +++ b/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc @@ -0,0 +1,158 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <jni.h> + +#include "sdk/android/generated_video_jni/YuvHelper_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" + +namespace webrtc { +namespace jni { + +void JNI_YuvHelper_CopyPlane(JNIEnv* jni, + const JavaParamRef<jobject>& j_src, + jint src_stride, + const JavaParamRef<jobject>& j_dst, + jint dst_stride, + jint width, + jint height) { + const uint8_t* src = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src.obj())); + uint8_t* dst = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst.obj())); + + libyuv::CopyPlane(src, src_stride, dst, dst_stride, width, height); +} + +void JNI_YuvHelper_I420Copy(JNIEnv* jni, + const JavaParamRef<jobject>& j_src_y, + jint src_stride_y, + const JavaParamRef<jobject>& j_src_u, + jint src_stride_u, + const JavaParamRef<jobject>& j_src_v, + jint src_stride_v, + const JavaParamRef<jobject>& j_dst_y, + jint dst_stride_y, + const JavaParamRef<jobject>& j_dst_u, + jint dst_stride_u, + const JavaParamRef<jobject>& j_dst_v, + jint dst_stride_v, + jint width, + jint height) { + const uint8_t* src_y = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj())); + const uint8_t* src_u = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj())); + const uint8_t* src_v = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj())); + uint8_t* dst_y = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj())); + uint8_t* dst_u = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj())); + uint8_t* dst_v = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj())); + + libyuv::I420Copy(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u, + dst_v, dst_stride_v, width, height); +} + +static void JNI_YuvHelper_I420ToNV12(JNIEnv* jni, + const JavaParamRef<jobject>& j_src_y, + jint src_stride_y, + const JavaParamRef<jobject>& j_src_u, + jint src_stride_u, + const JavaParamRef<jobject>& j_src_v, + jint src_stride_v, + const JavaParamRef<jobject>& j_dst_y, + jint dst_stride_y, + const JavaParamRef<jobject>& j_dst_uv, + jint dst_stride_uv, + jint width, + jint height) { + const uint8_t* src_y = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj())); + const uint8_t* src_u = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj())); + const uint8_t* src_v = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj())); + uint8_t* dst_y = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj())); + uint8_t* dst_uv = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_uv.obj())); + + libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv, + width, height); +} + +void JNI_YuvHelper_I420Rotate(JNIEnv* jni, + const JavaParamRef<jobject>& j_src_y, + jint src_stride_y, + const JavaParamRef<jobject>& j_src_u, + jint src_stride_u, + const JavaParamRef<jobject>& j_src_v, + jint src_stride_v, + const JavaParamRef<jobject>& j_dst_y, + jint dst_stride_y, + const JavaParamRef<jobject>& j_dst_u, + jint dst_stride_u, + const JavaParamRef<jobject>& j_dst_v, + jint dst_stride_v, + jint src_width, + jint src_height, + jint rotation_mode) { + const uint8_t* src_y = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj())); + const uint8_t* src_u = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj())); + const uint8_t* src_v = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj())); + uint8_t* dst_y = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj())); + uint8_t* dst_u = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj())); + uint8_t* dst_v = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj())); + + libyuv::I420Rotate(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u, + dst_v, dst_stride_v, src_width, src_height, + static_cast<libyuv::RotationMode>(rotation_mode)); +} + +void JNI_YuvHelper_ABGRToI420(JNIEnv* jni, + const JavaParamRef<jobject>& j_src, + jint src_stride, + const JavaParamRef<jobject>& j_dst_y, + jint dst_stride_y, + const JavaParamRef<jobject>& j_dst_u, + jint dst_stride_u, + const JavaParamRef<jobject>& j_dst_v, + jint dst_stride_v, + jint src_width, + jint src_height) { + const uint8_t* src = + static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src.obj())); + uint8_t* dst_y = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj())); + uint8_t* dst_u = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj())); + uint8_t* dst_v = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj())); + + libyuv::ABGRToI420(src, src_stride, dst_y, dst_stride_y, dst_u, dst_stride_u, + dst_v, dst_stride_v, src_width, src_height); +} + +} // namespace jni +} // namespace webrtc |