From 6bf0a5cb5034a7e684dcc3500e841785237ce2dd Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 7 Apr 2024 19:32:43 +0200 Subject: Adding upstream version 1:115.7.0. Signed-off-by: Daniel Baumann --- .../videoengine/CaptureCapabilityAndroid.java | 25 +++ .../webrtc/videoengine/VideoCaptureAndroid.java | 216 +++++++++++++++++++++ .../videoengine/VideoCaptureDeviceInfoAndroid.java | 121 ++++++++++++ 3 files changed, 362 insertions(+) create mode 100644 dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java create mode 100644 dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java create mode 100644 dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java (limited to 'dom/media/systemservices/android_video_capture/java') diff --git a/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java new file mode 100644 index 0000000000..305fc74804 --- /dev/null +++ b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.videoengine; + +import org.mozilla.gecko.annotation.WebRTCJNITarget; + +@WebRTCJNITarget +public class CaptureCapabilityAndroid { + public String name; + public int width[]; + public int height[]; + public int minMilliFPS; + public int maxMilliFPS; + public boolean frontFacing; + public boolean infrared; + public int orientation; +} diff --git a/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java new file mode 100644 index 0000000000..cc54009a7b --- /dev/null +++ b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java @@ -0,0 +1,216 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.videoengine; + +import java.io.IOException; +import java.util.List; + +import android.content.Context; +import android.util.Log; +import android.view.Surface; +import android.view.WindowManager; + +import java.util.concurrent.CountDownLatch; + +import org.mozilla.gecko.annotation.WebRTCJNITarget; + +import org.webrtc.CameraEnumerator; +import org.webrtc.Camera1Enumerator; +import org.webrtc.Camera2Enumerator; +import org.webrtc.CameraVideoCapturer; +import org.webrtc.CapturerObserver; +import org.webrtc.EglBase; +import org.webrtc.SurfaceTextureHelper; +import org.webrtc.VideoFrame; +import org.webrtc.VideoFrame.I420Buffer; + +public class VideoCaptureAndroid implements CameraVideoCapturer.CameraEventsHandler, CapturerObserver { + private final static String TAG = "WEBRTC-JC"; + + private final String deviceName; + private volatile long native_capturer; // |VideoCaptureAndroid*| in C++. + private Context context; + private CameraVideoCapturer cameraVideoCapturer; + private EglBase eglBase; + private SurfaceTextureHelper surfaceTextureHelper; + + // This class is recreated everytime we start/stop capture, so we + // can safely create the CountDownLatches here. + private final CountDownLatch capturerStarted = new CountDownLatch(1); + private boolean capturerStartedSucceeded = false; + private final CountDownLatch capturerStopped = new CountDownLatch(1); + + @WebRTCJNITarget + public VideoCaptureAndroid(String deviceName) { + // Remove the camera facing information from the name. + String[] parts = deviceName.split("Facing (front|back):"); + if (parts.length == 2) { + this.deviceName = parts[1].replace(" (infrared)", ""); + } else { + Log.e(TAG, "VideoCaptureAndroid: Expected facing mode as part of name: " + deviceName); + this.deviceName = deviceName; + } + this.context = GetContext(); + + CameraEnumerator enumerator; + if (Camera2Enumerator.isSupported(context)) { + enumerator = new Camera2Enumerator(context); + } else { + enumerator = new Camera1Enumerator(); + } + try { + cameraVideoCapturer = enumerator.createCapturer(this.deviceName, this); + eglBase = EglBase.create(); + surfaceTextureHelper = SurfaceTextureHelper.create("VideoCaptureAndroidSurfaceTextureHelper", eglBase.getEglBaseContext()); + cameraVideoCapturer.initialize(surfaceTextureHelper, context, this); + } catch (java.lang.IllegalArgumentException e) { + Log.e(TAG, "VideoCaptureAndroid: Exception while creating capturer: " + e); + } + } + + // Return the global application context. + @WebRTCJNITarget + private static native Context GetContext(); + + // Called by native code. Returns true if capturer is started. + // + // Note that this actually opens the camera, and Camera callbacks run on the + // thread that calls open(), so this is done on the CameraThread. Since ViE + // API needs a synchronous success return value we wait for the result. + @WebRTCJNITarget + private synchronized boolean startCapture( + final int width, final int height, + final int min_mfps, final int max_mfps, + long native_capturer) { + Log.d(TAG, "startCapture: " + width + "x" + height + "@" + + min_mfps + ":" + max_mfps); + + if (cameraVideoCapturer == null) { + return false; + } + + cameraVideoCapturer.startCapture(width, height, max_mfps); + try { + capturerStarted.await(); + } catch (InterruptedException e) { + return false; + } + if (capturerStartedSucceeded) { + this.native_capturer = native_capturer; + } + return capturerStartedSucceeded; + } + + // Called by native code. Returns true when camera is known to be stopped. + @WebRTCJNITarget + private synchronized boolean stopCapture() { + Log.d(TAG, "stopCapture"); + if (cameraVideoCapturer == null) { + return false; + } + + native_capturer = 0; + try { + cameraVideoCapturer.stopCapture(); + capturerStopped.await(); + } catch (InterruptedException e) { + return false; + } + Log.d(TAG, "stopCapture done"); + return true; + } + + @WebRTCJNITarget + private int getDeviceOrientation() { + int orientation = 0; + if (context != null) { + WindowManager wm = (WindowManager) context.getSystemService( + Context.WINDOW_SERVICE); + switch(wm.getDefaultDisplay().getRotation()) { + case Surface.ROTATION_90: + orientation = 90; + break; + case Surface.ROTATION_180: + orientation = 180; + break; + case Surface.ROTATION_270: + orientation = 270; + break; + case Surface.ROTATION_0: + default: + orientation = 0; + break; + } + } + return orientation; + } + + @WebRTCJNITarget + private native void ProvideCameraFrame( + int width, int height, + java.nio.ByteBuffer dataY, int strideY, + java.nio.ByteBuffer dataU, int strideU, + java.nio.ByteBuffer dataV, int strideV, + int rotation, long timeStamp, long captureObject); + + // + // CameraVideoCapturer.CameraEventsHandler interface + // + + // Camera error handler - invoked when camera can not be opened + // or any camera exception happens on camera thread. + public void onCameraError(String errorDescription) {} + + // Called when camera is disconnected. + public void onCameraDisconnected() {} + + // Invoked when camera stops receiving frames. + public void onCameraFreezed(String errorDescription) {} + + // Callback invoked when camera is opening. + public void onCameraOpening(String cameraName) {} + + // Callback invoked when first camera frame is available after camera is started. + public void onFirstFrameAvailable() {} + + // Callback invoked when camera is closed. + public void onCameraClosed() {} + + // + // CapturerObserver interface + // + + // Notify if the capturer have been started successfully or not. + public void onCapturerStarted(boolean success) { + capturerStartedSucceeded = success; + capturerStarted.countDown(); + } + + // Notify that the capturer has been stopped. + public void onCapturerStopped() { + capturerStopped.countDown(); + } + + // Delivers a captured frame. + public void onFrameCaptured(VideoFrame frame) { + if (native_capturer != 0) { + I420Buffer i420Buffer = frame.getBuffer().toI420(); + ProvideCameraFrame(i420Buffer.getWidth(), i420Buffer.getHeight(), + i420Buffer.getDataY(), i420Buffer.getStrideY(), + i420Buffer.getDataU(), i420Buffer.getStrideU(), + i420Buffer.getDataV(), i420Buffer.getStrideV(), + frame.getRotation(), + frame.getTimestampNs() / 1000000, native_capturer); + + i420Buffer.release(); + } + } +} diff --git a/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java new file mode 100644 index 0000000000..8ad8453955 --- /dev/null +++ b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.videoengine; + +import java.util.ArrayList; +import java.util.List; + +import android.Manifest; +import android.app.Activity; +import android.content.Context; +import android.util.Log; + +import org.mozilla.gecko.GeckoAppShell; +import org.mozilla.gecko.annotation.WebRTCJNITarget; + +import org.webrtc.CameraEnumerator; +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; +import org.webrtc.Camera1Enumerator; +import org.webrtc.Camera2Enumerator; + +public class VideoCaptureDeviceInfoAndroid { + private final static String TAG = "WEBRTC-JC"; + + // Returns information about all cameras on the device. + // Since this reflects static information about the hardware present, there is + // no need to call this function more than once in a single process. It is + // marked "private" as it is only called by native code. + @WebRTCJNITarget + private static CaptureCapabilityAndroid[] getDeviceInfo() { + final Context context = GeckoAppShell.getApplicationContext(); + + if (Camera2Enumerator.isSupported(context)) { + return createDeviceList(new Camera2Enumerator(context)); + } else { + return createDeviceList(new Camera1Enumerator()); + } + } + + private static CaptureCapabilityAndroid[] createDeviceList(CameraEnumerator enumerator) { + + ArrayList allDevices = new ArrayList(); + ArrayList IRDevices = new ArrayList(); + + for (String camera: enumerator.getDeviceNames()) { + List formats = enumerator.getSupportedFormats(camera); + int numFormats = formats.size(); + if (numFormats <= 0) { + continue; + } + + CaptureCapabilityAndroid device = new CaptureCapabilityAndroid(); + + // The only way to plumb through whether the device is front facing + // or not is by the name, but the name we receive depends upon the + // camera API in use. For the Camera1 API, this information is + // already present, but that is not the case when using Camera2. + // Later on, we look up the camera by name, so we have to use a + // format this is easy to undo. Ideally, libwebrtc would expose + // camera facing in VideoCaptureCapability and none of this would be + // necessary. + device.name = "Facing " + (enumerator.isFrontFacing(camera) ? "front" : "back") + ":" + camera; + + + boolean ir = enumerator.isInfrared(camera); + device.infrared = ir; + if (ir) { + device.name += " (infrared)"; + } + + // This isn't part of the new API, but we don't call + // GetDeviceOrientation() anywhere, so this value is unused. + device.orientation = 0; + + device.width = new int[numFormats]; + device.height = new int[numFormats]; + device.minMilliFPS = formats.get(0).framerate.min; + device.maxMilliFPS = formats.get(0).framerate.max; + int i = 0; + for (CaptureFormat format: formats) { + device.width[i] = format.width; + device.height[i] = format.height; + if (format.framerate.min < device.minMilliFPS) { + device.minMilliFPS = format.framerate.min; + } + if (format.framerate.max > device.maxMilliFPS) { + device.maxMilliFPS = format.framerate.max; + } + i++; + } + device.frontFacing = enumerator.isFrontFacing(camera); + // Infrared devices always last (but front facing ones before + // non-front-facing ones), front-facing non IR first, other in + // the middle. + if (!device.infrared) { + if (device.frontFacing) { + allDevices.add(0, device); + } else { + allDevices.add(device); + } + } else { + if (device.frontFacing) { + IRDevices.add(0, device); + } else { + IRDevices.add(device); + } + } + } + + allDevices.addAll(IRDevices); + + return allDevices.toArray(new CaptureCapabilityAndroid[0]); + } +} -- cgit v1.2.3