summaryrefslogtreecommitdiffstats
path: root/dom/media/systemservices/android_video_capture
diff options
context:
space:
mode:
Diffstat (limited to 'dom/media/systemservices/android_video_capture')
-rw-r--r--dom/media/systemservices/android_video_capture/device_info_android.cc316
-rw-r--r--dom/media/systemservices/android_video_capture/device_info_android.h73
-rw-r--r--dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java25
-rw-r--r--dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java216
-rw-r--r--dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java121
-rw-r--r--dom/media/systemservices/android_video_capture/video_capture_android.cc270
-rw-r--r--dom/media/systemservices/android_video_capture/video_capture_android.h47
7 files changed, 1068 insertions, 0 deletions
diff --git a/dom/media/systemservices/android_video_capture/device_info_android.cc b/dom/media/systemservices/android_video_capture/device_info_android.cc
new file mode 100644
index 0000000000..581040eb94
--- /dev/null
+++ b/dom/media/systemservices/android_video_capture/device_info_android.cc
@@ -0,0 +1,316 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_android.h"
+
+#include <algorithm>
+#include <string>
+#include <sstream>
+#include <vector>
+
+#include "rtc_base/logging.h"
+#include "modules/utility/include/helpers_android.h"
+
+#include "mozilla/jni/Utils.h"
+
+namespace webrtc {
+
+namespace videocapturemodule {
+
+// Helper for storing lists of pairs of ints. Used e.g. for resolutions & FPS
+// ranges.
+typedef std::pair<int, int> IntPair;
+typedef std::vector<IntPair> IntPairs;
+
+static std::string IntPairsToString(const IntPairs& pairs, char separator) {
+ std::stringstream stream;
+ for (size_t i = 0; i < pairs.size(); ++i) {
+ if (i > 0) {
+ stream << ", ";
+ }
+ stream << "(" << pairs[i].first << separator << pairs[i].second << ")";
+ }
+ return stream.str();
+}
+
+struct AndroidCameraInfo {
+ std::string name;
+ bool front_facing;
+ int orientation;
+ IntPairs resolutions; // Pairs are: (width,height).
+ // Pairs are (min,max) in units of FPS*1000 ("milli-frame-per-second").
+ IntPairs mfpsRanges;
+
+ std::string ToString() {
+ std::stringstream stream;
+ stream << "Name: [" << name << "], MFPS ranges: ["
+ << IntPairsToString(mfpsRanges, ':')
+ << "], front_facing: " << front_facing
+ << ", orientation: " << orientation << ", resolutions: ["
+ << IntPairsToString(resolutions, 'x') << "]";
+ return stream.str();
+ }
+};
+
+// Camera info; populated during DeviceInfoAndroid::Refresh()
+static std::vector<AndroidCameraInfo>* g_camera_info = NULL;
+
+static JavaVM* g_jvm_dev_info = NULL;
+
+// Set |*index| to the index of |name| in g_camera_info or return false if no
+// match found.
+static bool FindCameraIndexByName(const std::string& name, size_t* index) {
+ for (size_t i = 0; i < g_camera_info->size(); ++i) {
+ if (g_camera_info->at(i).name == name) {
+ *index = i;
+ return true;
+ }
+ }
+ return false;
+}
+
+// Returns a pointer to the named member of g_camera_info, or NULL if no match
+// is found.
+static AndroidCameraInfo* FindCameraInfoByName(const std::string& name) {
+ size_t index = 0;
+ if (FindCameraIndexByName(name, &index)) {
+ return &g_camera_info->at(index);
+ }
+ return NULL;
+}
+
+// static
+void DeviceInfoAndroid::Initialize(JavaVM* javaVM) {
+ // TODO(henrike): this "if" would make a lot more sense as an assert, but
+ // Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine() and
+ // Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate() conspire to
+ // prevent this. Once that code is made to only
+ // VideoEngine::SetAndroidObjects() once per process, this can turn into an
+ // assert.
+ if (g_camera_info) {
+ return;
+ }
+
+ g_jvm_dev_info = javaVM;
+ BuildDeviceList();
+}
+
+void DeviceInfoAndroid::BuildDeviceList() {
+ if (!g_jvm_dev_info) {
+ return;
+ }
+
+ AttachThreadScoped ats(g_jvm_dev_info);
+ JNIEnv* jni = ats.env();
+
+ g_camera_info = new std::vector<AndroidCameraInfo>();
+ jclass j_info_class = mozilla::jni::GetClassRef(
+ jni, "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
+ jclass j_cap_class = mozilla::jni::GetClassRef(
+ jni, "org/webrtc/videoengine/CaptureCapabilityAndroid");
+ assert(j_info_class);
+ jmethodID j_get_device_info = jni->GetStaticMethodID(
+ j_info_class, "getDeviceInfo",
+ "()[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;");
+ jarray j_camera_caps = static_cast<jarray>(
+ jni->CallStaticObjectMethod(j_info_class, j_get_device_info));
+ if (jni->ExceptionCheck()) {
+ jni->ExceptionClear();
+ RTC_LOG(LS_INFO) << __FUNCTION__ << ": Failed to get camera capabilities.";
+ return;
+ }
+ if (j_camera_caps == nullptr) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << ": Failed to get camera capabilities.";
+ return;
+ }
+
+ const jsize capLength = jni->GetArrayLength(j_camera_caps);
+
+ jfieldID widthField = jni->GetFieldID(j_cap_class, "width", "[I");
+ jfieldID heightField = jni->GetFieldID(j_cap_class, "height", "[I");
+ jfieldID maxFpsField = jni->GetFieldID(j_cap_class, "maxMilliFPS", "I");
+ jfieldID minFpsField = jni->GetFieldID(j_cap_class, "minMilliFPS", "I");
+ jfieldID orientationField = jni->GetFieldID(j_cap_class, "orientation", "I");
+ jfieldID frontFacingField = jni->GetFieldID(j_cap_class, "frontFacing", "Z");
+ jfieldID nameField =
+ jni->GetFieldID(j_cap_class, "name", "Ljava/lang/String;");
+ if (widthField == NULL || heightField == NULL || maxFpsField == NULL ||
+ minFpsField == NULL || orientationField == NULL ||
+ frontFacingField == NULL || nameField == NULL) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << ": Failed to get field Id.";
+ return;
+ }
+
+ for (jsize i = 0; i < capLength; i++) {
+ jobject capabilityElement =
+ jni->GetObjectArrayElement((jobjectArray)j_camera_caps, i);
+
+ AndroidCameraInfo info;
+ jstring camName =
+ static_cast<jstring>(jni->GetObjectField(capabilityElement, nameField));
+ const char* camChars = jni->GetStringUTFChars(camName, nullptr);
+ info.name = std::string(camChars);
+ jni->ReleaseStringUTFChars(camName, camChars);
+
+ info.orientation = jni->GetIntField(capabilityElement, orientationField);
+ info.front_facing =
+ jni->GetBooleanField(capabilityElement, frontFacingField);
+ jint min_mfps = jni->GetIntField(capabilityElement, minFpsField);
+ jint max_mfps = jni->GetIntField(capabilityElement, maxFpsField);
+
+ jintArray widthResArray = static_cast<jintArray>(
+ jni->GetObjectField(capabilityElement, widthField));
+ jintArray heightResArray = static_cast<jintArray>(
+ jni->GetObjectField(capabilityElement, heightField));
+
+ const jsize numRes = jni->GetArrayLength(widthResArray);
+
+ jint* widths = jni->GetIntArrayElements(widthResArray, nullptr);
+ jint* heights = jni->GetIntArrayElements(heightResArray, nullptr);
+
+ for (jsize j = 0; j < numRes; ++j) {
+ info.resolutions.push_back(std::make_pair(widths[j], heights[j]));
+ }
+
+ info.mfpsRanges.push_back(std::make_pair(min_mfps, max_mfps));
+ g_camera_info->push_back(info);
+
+ jni->ReleaseIntArrayElements(widthResArray, widths, JNI_ABORT);
+ jni->ReleaseIntArrayElements(heightResArray, heights, JNI_ABORT);
+ }
+
+ jni->DeleteLocalRef(j_info_class);
+ jni->DeleteLocalRef(j_cap_class);
+}
+
+void DeviceInfoAndroid::DeInitialize() {
+ if (g_camera_info) {
+ delete g_camera_info;
+ g_camera_info = NULL;
+ }
+}
+
+int32_t DeviceInfoAndroid::Refresh() {
+ if (!g_camera_info || g_camera_info->size() == 0) {
+ DeviceInfoAndroid::BuildDeviceList();
+#ifdef DEBUG
+ int frontFacingIndex = -1;
+ for (uint32_t i = 0; i < g_camera_info->size(); i++) {
+ if (g_camera_info->at(i).front_facing) {
+ frontFacingIndex = i;
+ }
+ }
+ // Either there is a front-facing camera, and it's first in the list, or
+ // there is no front-facing camera.
+ MOZ_ASSERT(frontFacingIndex == 0 || frontFacingIndex == -1);
+#endif
+ }
+ return 0;
+}
+
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() {
+ return new videocapturemodule::DeviceInfoAndroid();
+}
+
+DeviceInfoAndroid::DeviceInfoAndroid() : DeviceInfoImpl() {}
+
+DeviceInfoAndroid::~DeviceInfoAndroid() {}
+
+bool DeviceInfoAndroid::FindCameraIndex(const char* deviceUniqueIdUTF8,
+ size_t* index) {
+ return FindCameraIndexByName(deviceUniqueIdUTF8, index);
+}
+
+int32_t DeviceInfoAndroid::Init() { return 0; }
+
+uint32_t DeviceInfoAndroid::NumberOfDevices() {
+ Refresh();
+ return g_camera_info->size();
+}
+
+int32_t DeviceInfoAndroid::GetDeviceName(
+ uint32_t deviceNumber, char* deviceNameUTF8, uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8, uint32_t deviceUniqueIdUTF8Length,
+ char* /*productUniqueIdUTF8*/, uint32_t /*productUniqueIdUTF8Length*/,
+ pid_t* /*pid*/) {
+ if (deviceNumber >= g_camera_info->size()) {
+ return -1;
+ }
+ const AndroidCameraInfo& info = g_camera_info->at(deviceNumber);
+ if (info.name.length() + 1 > deviceNameLength ||
+ info.name.length() + 1 > deviceUniqueIdUTF8Length) {
+ return -1;
+ }
+ memcpy(deviceNameUTF8, info.name.c_str(), info.name.length() + 1);
+ memcpy(deviceUniqueIdUTF8, info.name.c_str(), info.name.length() + 1);
+ return 0;
+}
+
+int32_t DeviceInfoAndroid::CreateCapabilityMap(const char* deviceUniqueIdUTF8) {
+ _captureCapabilities.clear();
+ const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
+ if (info == NULL) {
+ return -1;
+ }
+
+ for (size_t i = 0; i < info->resolutions.size(); ++i) {
+ for (size_t j = 0; j < info->mfpsRanges.size(); ++j) {
+ const IntPair& size = info->resolutions[i];
+ const IntPair& mfpsRange = info->mfpsRanges[j];
+ VideoCaptureCapability cap;
+ cap.width = size.first;
+ cap.height = size.second;
+ cap.maxFPS = mfpsRange.second / 1000;
+ cap.videoType = VideoType::kNV21;
+ _captureCapabilities.push_back(cap);
+ }
+ }
+ return _captureCapabilities.size();
+}
+
+int32_t DeviceInfoAndroid::GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) {
+ const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
+ if (info == NULL || VideoCaptureImpl::RotationFromDegrees(
+ info->orientation, &orientation) != 0) {
+ return -1;
+ }
+ return 0;
+}
+
+void DeviceInfoAndroid::GetMFpsRange(const char* deviceUniqueIdUTF8,
+ int max_fps_to_match, int* min_mfps,
+ int* max_mfps) {
+ const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
+ if (info == NULL) {
+ return;
+ }
+ int desired_mfps = max_fps_to_match * 1000;
+ int best_diff_mfps = 0;
+ RTC_LOG(LS_INFO) << "Search for best target mfps " << desired_mfps;
+ // Search for best fps range with preference shifted to constant fps modes.
+ for (size_t i = 0; i < info->mfpsRanges.size(); ++i) {
+ int diff_mfps =
+ abs(info->mfpsRanges[i].first - desired_mfps) +
+ abs(info->mfpsRanges[i].second - desired_mfps) +
+ (info->mfpsRanges[i].second - info->mfpsRanges[i].first) / 2;
+ RTC_LOG(LS_INFO) << "Fps range " << info->mfpsRanges[i].first << ":"
+ << info->mfpsRanges[i].second
+ << ". Distance: " << diff_mfps;
+ if (i == 0 || diff_mfps < best_diff_mfps) {
+ best_diff_mfps = diff_mfps;
+ *min_mfps = info->mfpsRanges[i].first;
+ *max_mfps = info->mfpsRanges[i].second;
+ }
+ }
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/dom/media/systemservices/android_video_capture/device_info_android.h b/dom/media/systemservices/android_video_capture/device_info_android.h
new file mode 100644
index 0000000000..ac88b2b8ba
--- /dev/null
+++ b/dom/media/systemservices/android_video_capture/device_info_android.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
+
+#include <jni.h>
+
+#include "modules/video_capture/device_info_impl.h"
+#include "modules/video_capture/video_capture_impl.h"
+
+#define AndroidJavaCaptureDeviceInfoClass \
+ "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"
+#define AndroidJavaCaptureCapabilityClass \
+ "org/webrtc/videoengine/CaptureCapabilityAndroid"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+class DeviceInfoAndroid : public DeviceInfoImpl {
+ public:
+ static void Initialize(JavaVM* javaVM);
+ static void DeInitialize();
+
+ DeviceInfoAndroid();
+ virtual ~DeviceInfoAndroid();
+
+ // Set |*index| to the index of the camera matching |deviceUniqueIdUTF8|, or
+ // return false if no match.
+ bool FindCameraIndex(const char* deviceUniqueIdUTF8, size_t* index);
+
+ virtual int32_t Init();
+ virtual uint32_t NumberOfDevices();
+ virtual int32_t Refresh();
+ virtual int32_t GetDeviceName(uint32_t deviceNumber, char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8 = 0,
+ uint32_t productUniqueIdUTF8Length = 0,
+ pid_t* pid = 0);
+ virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock);
+
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* /*deviceUniqueIdUTF8*/, const char* /*dialogTitleUTF8*/,
+ void* /*parentWindow*/, uint32_t /*positionX*/, uint32_t /*positionY*/) {
+ return -1;
+ }
+ virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation);
+
+ // Populate |min_mfps| and |max_mfps| with the closest supported range of the
+ // device to |max_fps_to_match|.
+ void GetMFpsRange(const char* deviceUniqueIdUTF8, int max_fps_to_match,
+ int* min_mfps, int* max_mfps);
+
+ private:
+ enum { kExpectedCaptureDelay = 190 };
+ static void BuildDeviceList();
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
diff --git a/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java
new file mode 100644
index 0000000000..305fc74804
--- /dev/null
+++ b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import org.mozilla.gecko.annotation.WebRTCJNITarget;
+
+@WebRTCJNITarget
+public class CaptureCapabilityAndroid {
+ public String name;
+ public int width[];
+ public int height[];
+ public int minMilliFPS;
+ public int maxMilliFPS;
+ public boolean frontFacing;
+ public boolean infrared;
+ public int orientation;
+}
diff --git a/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
new file mode 100644
index 0000000000..cc54009a7b
--- /dev/null
+++ b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -0,0 +1,216 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.io.IOException;
+import java.util.List;
+
+import android.content.Context;
+import android.util.Log;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import java.util.concurrent.CountDownLatch;
+
+import org.mozilla.gecko.annotation.WebRTCJNITarget;
+
+import org.webrtc.CameraEnumerator;
+import org.webrtc.Camera1Enumerator;
+import org.webrtc.Camera2Enumerator;
+import org.webrtc.CameraVideoCapturer;
+import org.webrtc.CapturerObserver;
+import org.webrtc.EglBase;
+import org.webrtc.SurfaceTextureHelper;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoFrame.I420Buffer;
+
+public class VideoCaptureAndroid implements CameraVideoCapturer.CameraEventsHandler, CapturerObserver {
+ private final static String TAG = "WEBRTC-JC";
+
+ private final String deviceName;
+ private volatile long native_capturer; // |VideoCaptureAndroid*| in C++.
+ private Context context;
+ private CameraVideoCapturer cameraVideoCapturer;
+ private EglBase eglBase;
+ private SurfaceTextureHelper surfaceTextureHelper;
+
+ // This class is recreated everytime we start/stop capture, so we
+ // can safely create the CountDownLatches here.
+ private final CountDownLatch capturerStarted = new CountDownLatch(1);
+ private boolean capturerStartedSucceeded = false;
+ private final CountDownLatch capturerStopped = new CountDownLatch(1);
+
+ @WebRTCJNITarget
+ public VideoCaptureAndroid(String deviceName) {
+ // Remove the camera facing information from the name.
+ String[] parts = deviceName.split("Facing (front|back):");
+ if (parts.length == 2) {
+ this.deviceName = parts[1].replace(" (infrared)", "");
+ } else {
+ Log.e(TAG, "VideoCaptureAndroid: Expected facing mode as part of name: " + deviceName);
+ this.deviceName = deviceName;
+ }
+ this.context = GetContext();
+
+ CameraEnumerator enumerator;
+ if (Camera2Enumerator.isSupported(context)) {
+ enumerator = new Camera2Enumerator(context);
+ } else {
+ enumerator = new Camera1Enumerator();
+ }
+ try {
+ cameraVideoCapturer = enumerator.createCapturer(this.deviceName, this);
+ eglBase = EglBase.create();
+ surfaceTextureHelper = SurfaceTextureHelper.create("VideoCaptureAndroidSurfaceTextureHelper", eglBase.getEglBaseContext());
+ cameraVideoCapturer.initialize(surfaceTextureHelper, context, this);
+ } catch (java.lang.IllegalArgumentException e) {
+ Log.e(TAG, "VideoCaptureAndroid: Exception while creating capturer: " + e);
+ }
+ }
+
+ // Return the global application context.
+ @WebRTCJNITarget
+ private static native Context GetContext();
+
+ // Called by native code. Returns true if capturer is started.
+ //
+ // Note that this actually opens the camera, and Camera callbacks run on the
+ // thread that calls open(), so this is done on the CameraThread. Since ViE
+ // API needs a synchronous success return value we wait for the result.
+ @WebRTCJNITarget
+ private synchronized boolean startCapture(
+ final int width, final int height,
+ final int min_mfps, final int max_mfps,
+ long native_capturer) {
+ Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
+ min_mfps + ":" + max_mfps);
+
+ if (cameraVideoCapturer == null) {
+ return false;
+ }
+
+ cameraVideoCapturer.startCapture(width, height, max_mfps);
+ try {
+ capturerStarted.await();
+ } catch (InterruptedException e) {
+ return false;
+ }
+ if (capturerStartedSucceeded) {
+ this.native_capturer = native_capturer;
+ }
+ return capturerStartedSucceeded;
+ }
+
+ // Called by native code. Returns true when camera is known to be stopped.
+ @WebRTCJNITarget
+ private synchronized boolean stopCapture() {
+ Log.d(TAG, "stopCapture");
+ if (cameraVideoCapturer == null) {
+ return false;
+ }
+
+ native_capturer = 0;
+ try {
+ cameraVideoCapturer.stopCapture();
+ capturerStopped.await();
+ } catch (InterruptedException e) {
+ return false;
+ }
+ Log.d(TAG, "stopCapture done");
+ return true;
+ }
+
+ @WebRTCJNITarget
+ private int getDeviceOrientation() {
+ int orientation = 0;
+ if (context != null) {
+ WindowManager wm = (WindowManager) context.getSystemService(
+ Context.WINDOW_SERVICE);
+ switch(wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ orientation = 90;
+ break;
+ case Surface.ROTATION_180:
+ orientation = 180;
+ break;
+ case Surface.ROTATION_270:
+ orientation = 270;
+ break;
+ case Surface.ROTATION_0:
+ default:
+ orientation = 0;
+ break;
+ }
+ }
+ return orientation;
+ }
+
+ @WebRTCJNITarget
+ private native void ProvideCameraFrame(
+ int width, int height,
+ java.nio.ByteBuffer dataY, int strideY,
+ java.nio.ByteBuffer dataU, int strideU,
+ java.nio.ByteBuffer dataV, int strideV,
+ int rotation, long timeStamp, long captureObject);
+
+ //
+ // CameraVideoCapturer.CameraEventsHandler interface
+ //
+
+ // Camera error handler - invoked when camera can not be opened
+ // or any camera exception happens on camera thread.
+ public void onCameraError(String errorDescription) {}
+
+ // Called when camera is disconnected.
+ public void onCameraDisconnected() {}
+
+ // Invoked when camera stops receiving frames.
+ public void onCameraFreezed(String errorDescription) {}
+
+ // Callback invoked when camera is opening.
+ public void onCameraOpening(String cameraName) {}
+
+ // Callback invoked when first camera frame is available after camera is started.
+ public void onFirstFrameAvailable() {}
+
+ // Callback invoked when camera is closed.
+ public void onCameraClosed() {}
+
+ //
+ // CapturerObserver interface
+ //
+
+ // Notify if the capturer have been started successfully or not.
+ public void onCapturerStarted(boolean success) {
+ capturerStartedSucceeded = success;
+ capturerStarted.countDown();
+ }
+
+ // Notify that the capturer has been stopped.
+ public void onCapturerStopped() {
+ capturerStopped.countDown();
+ }
+
+ // Delivers a captured frame.
+ public void onFrameCaptured(VideoFrame frame) {
+ if (native_capturer != 0) {
+ I420Buffer i420Buffer = frame.getBuffer().toI420();
+ ProvideCameraFrame(i420Buffer.getWidth(), i420Buffer.getHeight(),
+ i420Buffer.getDataY(), i420Buffer.getStrideY(),
+ i420Buffer.getDataU(), i420Buffer.getStrideU(),
+ i420Buffer.getDataV(), i420Buffer.getStrideV(),
+ frame.getRotation(),
+ frame.getTimestampNs() / 1000000, native_capturer);
+
+ i420Buffer.release();
+ }
+ }
+}
diff --git a/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
new file mode 100644
index 0000000000..8ad8453955
--- /dev/null
+++ b/dom/media/systemservices/android_video_capture/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import android.Manifest;
+import android.app.Activity;
+import android.content.Context;
+import android.util.Log;
+
+import org.mozilla.gecko.GeckoAppShell;
+import org.mozilla.gecko.annotation.WebRTCJNITarget;
+
+import org.webrtc.CameraEnumerator;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Camera1Enumerator;
+import org.webrtc.Camera2Enumerator;
+
+public class VideoCaptureDeviceInfoAndroid {
+ private final static String TAG = "WEBRTC-JC";
+
+ // Returns information about all cameras on the device.
+ // Since this reflects static information about the hardware present, there is
+ // no need to call this function more than once in a single process. It is
+ // marked "private" as it is only called by native code.
+ @WebRTCJNITarget
+ private static CaptureCapabilityAndroid[] getDeviceInfo() {
+ final Context context = GeckoAppShell.getApplicationContext();
+
+ if (Camera2Enumerator.isSupported(context)) {
+ return createDeviceList(new Camera2Enumerator(context));
+ } else {
+ return createDeviceList(new Camera1Enumerator());
+ }
+ }
+
+ private static CaptureCapabilityAndroid[] createDeviceList(CameraEnumerator enumerator) {
+
+ ArrayList<CaptureCapabilityAndroid> allDevices = new ArrayList<CaptureCapabilityAndroid>();
+ ArrayList<CaptureCapabilityAndroid> IRDevices = new ArrayList<CaptureCapabilityAndroid>();
+
+ for (String camera: enumerator.getDeviceNames()) {
+ List<CaptureFormat> formats = enumerator.getSupportedFormats(camera);
+ int numFormats = formats.size();
+ if (numFormats <= 0) {
+ continue;
+ }
+
+ CaptureCapabilityAndroid device = new CaptureCapabilityAndroid();
+
+ // The only way to plumb through whether the device is front facing
+ // or not is by the name, but the name we receive depends upon the
+ // camera API in use. For the Camera1 API, this information is
+ // already present, but that is not the case when using Camera2.
+ // Later on, we look up the camera by name, so we have to use a
+ // format this is easy to undo. Ideally, libwebrtc would expose
+ // camera facing in VideoCaptureCapability and none of this would be
+ // necessary.
+ device.name = "Facing " + (enumerator.isFrontFacing(camera) ? "front" : "back") + ":" + camera;
+
+
+ boolean ir = enumerator.isInfrared(camera);
+ device.infrared = ir;
+ if (ir) {
+ device.name += " (infrared)";
+ }
+
+ // This isn't part of the new API, but we don't call
+ // GetDeviceOrientation() anywhere, so this value is unused.
+ device.orientation = 0;
+
+ device.width = new int[numFormats];
+ device.height = new int[numFormats];
+ device.minMilliFPS = formats.get(0).framerate.min;
+ device.maxMilliFPS = formats.get(0).framerate.max;
+ int i = 0;
+ for (CaptureFormat format: formats) {
+ device.width[i] = format.width;
+ device.height[i] = format.height;
+ if (format.framerate.min < device.minMilliFPS) {
+ device.minMilliFPS = format.framerate.min;
+ }
+ if (format.framerate.max > device.maxMilliFPS) {
+ device.maxMilliFPS = format.framerate.max;
+ }
+ i++;
+ }
+ device.frontFacing = enumerator.isFrontFacing(camera);
+ // Infrared devices always last (but front facing ones before
+ // non-front-facing ones), front-facing non IR first, other in
+ // the middle.
+ if (!device.infrared) {
+ if (device.frontFacing) {
+ allDevices.add(0, device);
+ } else {
+ allDevices.add(device);
+ }
+ } else {
+ if (device.frontFacing) {
+ IRDevices.add(0, device);
+ } else {
+ IRDevices.add(device);
+ }
+ }
+ }
+
+ allDevices.addAll(IRDevices);
+
+ return allDevices.toArray(new CaptureCapabilityAndroid[0]);
+ }
+}
diff --git a/dom/media/systemservices/android_video_capture/video_capture_android.cc b/dom/media/systemservices/android_video_capture/video_capture_android.cc
new file mode 100644
index 0000000000..7c9cd72ccb
--- /dev/null
+++ b/dom/media/systemservices/android_video_capture/video_capture_android.cc
@@ -0,0 +1,270 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_android.h"
+
+#include "device_info_android.h"
+#include "modules/utility/include/helpers_android.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/time_utils.h"
+
+#include "AndroidBridge.h"
+
+static JavaVM* g_jvm_capture = NULL;
+static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
+static jobject g_context = NULL; // Owned android.content.Context.
+
+namespace webrtc {
+
+jobject JniCommon_allocateNativeByteBuffer(JNIEnv* env, jclass, jint size) {
+ void* new_data = ::operator new(size);
+ jobject byte_buffer = env->NewDirectByteBuffer(new_data, size);
+ return byte_buffer;
+}
+
+void JniCommon_freeNativeByteBuffer(JNIEnv* env, jclass, jobject byte_buffer) {
+ void* data = env->GetDirectBufferAddress(byte_buffer);
+ ::operator delete(data);
+}
+
+// Called by Java to get the global application context.
+jobject JNICALL GetContext(JNIEnv* env, jclass) {
+ assert(g_context);
+ return g_context;
+}
+
+// Called by Java when the camera has a new frame to deliver.
+void JNICALL ProvideCameraFrame(JNIEnv* env, jobject, jint width, jint height,
+ jobject javaDataY, jint strideY,
+ jobject javaDataU, jint strideU,
+ jobject javaDataV, jint strideV, jint rotation,
+ jlong timeStamp, jlong context) {
+ if (!context) {
+ return;
+ }
+
+ webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
+ reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
+ context);
+ uint8_t* dataY =
+ reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataY));
+ uint8_t* dataU =
+ reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataU));
+ uint8_t* dataV =
+ reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataV));
+
+ rtc::scoped_refptr<I420Buffer> i420Buffer = I420Buffer::Copy(
+ width, height, dataY, strideY, dataU, strideU, dataV, strideV);
+
+ captureModule->OnIncomingFrame(i420Buffer, rotation, timeStamp);
+}
+
+int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
+ if (g_java_capturer_class) {
+ return 0;
+ }
+
+ if (javaVM) {
+ assert(!g_jvm_capture);
+ g_jvm_capture = javaVM;
+ AttachThreadScoped ats(g_jvm_capture);
+
+ g_context = mozilla::AndroidBridge::Bridge()->GetGlobalContextRef();
+
+ videocapturemodule::DeviceInfoAndroid::Initialize(g_jvm_capture);
+
+ {
+ jclass clsRef = mozilla::jni::GetClassRef(
+ ats.env(), "org/webrtc/videoengine/VideoCaptureAndroid");
+ g_java_capturer_class =
+ static_cast<jclass>(ats.env()->NewGlobalRef(clsRef));
+ ats.env()->DeleteLocalRef(clsRef);
+ assert(g_java_capturer_class);
+
+ JNINativeMethod native_methods[] = {
+ {"GetContext", "()Landroid/content/Context;",
+ reinterpret_cast<void*>(&GetContext)},
+ {"ProvideCameraFrame",
+ "(IILjava/nio/ByteBuffer;ILjava/nio/ByteBuffer;ILjava/nio/"
+ "ByteBuffer;IIJJ)V",
+ reinterpret_cast<void*>(&ProvideCameraFrame)}};
+ if (ats.env()->RegisterNatives(g_java_capturer_class, native_methods,
+ 2) != 0)
+ assert(false);
+ }
+
+ {
+ jclass clsRef =
+ mozilla::jni::GetClassRef(ats.env(), "org/webrtc/JniCommon");
+
+ JNINativeMethod native_methods[] = {
+ {"nativeAllocateByteBuffer", "(I)Ljava/nio/ByteBuffer;",
+ reinterpret_cast<void*>(&JniCommon_allocateNativeByteBuffer)},
+ {"nativeFreeByteBuffer", "(Ljava/nio/ByteBuffer;)V",
+ reinterpret_cast<void*>(&JniCommon_freeNativeByteBuffer)}};
+ if (ats.env()->RegisterNatives(clsRef, native_methods, 2) != 0)
+ assert(false);
+ }
+ } else {
+ if (g_jvm_capture) {
+ AttachThreadScoped ats(g_jvm_capture);
+ ats.env()->UnregisterNatives(g_java_capturer_class);
+ ats.env()->DeleteGlobalRef(g_java_capturer_class);
+ g_java_capturer_class = NULL;
+ g_context = NULL;
+ videocapturemodule::DeviceInfoAndroid::DeInitialize();
+ g_jvm_capture = NULL;
+ }
+ }
+
+ return 0;
+}
+
+namespace videocapturemodule {
+
+rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
+ const char* deviceUniqueIdUTF8) {
+ rtc::scoped_refptr<VideoCaptureAndroid> implementation(
+ new rtc::RefCountedObject<VideoCaptureAndroid>());
+ if (implementation->Init(deviceUniqueIdUTF8) != 0) {
+ implementation = nullptr;
+ }
+ return implementation;
+}
+
+void VideoCaptureAndroid::OnIncomingFrame(rtc::scoped_refptr<I420Buffer> buffer,
+ int32_t degrees,
+ int64_t captureTime) {
+ MutexLock lock(&api_lock_);
+
+ VideoRotation rotation =
+ (degrees <= 45 || degrees > 315) ? kVideoRotation_0
+ : (degrees > 45 && degrees <= 135) ? kVideoRotation_90
+ : (degrees > 135 && degrees <= 225) ? kVideoRotation_180
+ : (degrees > 225 && degrees <= 315) ? kVideoRotation_270
+ : kVideoRotation_0; // Impossible.
+
+ // Historically, we have ignored captureTime. Why?
+ VideoFrame captureFrame(I420Buffer::Rotate(*buffer, rotation), 0,
+ rtc::TimeMillis(), rotation);
+
+ DeliverCapturedFrame(captureFrame);
+}
+
+VideoCaptureAndroid::VideoCaptureAndroid()
+ : VideoCaptureImpl(),
+ _deviceInfo(),
+ _jCapturer(NULL),
+ _captureStarted(false) {}
+
+int32_t VideoCaptureAndroid::Init(const char* deviceUniqueIdUTF8) {
+ const int nameLength = strlen(deviceUniqueIdUTF8);
+ if (nameLength >= kVideoCaptureUniqueNameLength) return -1;
+
+ // Store the device name
+ RTC_LOG(LS_INFO) << "VideoCaptureAndroid::Init: " << deviceUniqueIdUTF8;
+ _deviceUniqueId = new char[nameLength + 1];
+ memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+ AttachThreadScoped ats(g_jvm_capture);
+ JNIEnv* env = ats.env();
+ jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>",
+ "(Ljava/lang/String;)V");
+ assert(ctor);
+ jstring j_deviceName = env->NewStringUTF(_deviceUniqueId);
+ _jCapturer = env->NewGlobalRef(
+ env->NewObject(g_java_capturer_class, ctor, j_deviceName));
+ assert(_jCapturer);
+ return 0;
+}
+
+VideoCaptureAndroid::~VideoCaptureAndroid() {
+ // Ensure Java camera is released even if our caller didn't explicitly Stop.
+ if (_captureStarted) StopCapture();
+ AttachThreadScoped ats(g_jvm_capture);
+ JNIEnv* env = ats.env();
+ env->DeleteGlobalRef(_jCapturer);
+}
+
+int32_t VideoCaptureAndroid::StartCapture(
+ const VideoCaptureCapability& capability) {
+ AttachThreadScoped ats(g_jvm_capture);
+ JNIEnv* env = ats.env();
+ int width = 0;
+ int height = 0;
+ int min_mfps = 0;
+ int max_mfps = 0;
+ {
+ MutexLock lock(&api_lock_);
+
+ if (_deviceInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
+ _captureCapability) < 0) {
+ RTC_LOG(LS_ERROR) << __FUNCTION__
+ << "s: GetBestMatchedCapability failed: "
+ << capability.width << "x" << capability.height;
+ return -1;
+ }
+
+ width = _captureCapability.width;
+ height = _captureCapability.height;
+ _deviceInfo.GetMFpsRange(_deviceUniqueId, _captureCapability.maxFPS,
+ &min_mfps, &max_mfps);
+
+ // Exit critical section to avoid blocking camera thread inside
+ // onIncomingFrame() call.
+ }
+
+ jmethodID j_start =
+ env->GetMethodID(g_java_capturer_class, "startCapture", "(IIIIJ)Z");
+ assert(j_start);
+ jlong j_this = reinterpret_cast<intptr_t>(this);
+ bool started = env->CallBooleanMethod(_jCapturer, j_start, width, height,
+ min_mfps, max_mfps, j_this);
+ if (started) {
+ MutexLock lock(&api_lock_);
+ _requestedCapability = capability;
+ _captureStarted = true;
+ }
+ return started ? 0 : -1;
+}
+
+int32_t VideoCaptureAndroid::StopCapture() {
+ AttachThreadScoped ats(g_jvm_capture);
+ JNIEnv* env = ats.env();
+ {
+ MutexLock lock(&api_lock_);
+
+ memset(&_requestedCapability, 0, sizeof(_requestedCapability));
+ memset(&_captureCapability, 0, sizeof(_captureCapability));
+ _captureStarted = false;
+ // Exit critical section to avoid blocking camera thread inside
+ // onIncomingFrame() call.
+ }
+
+ // try to stop the capturer.
+ jmethodID j_stop =
+ env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z");
+ return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1;
+}
+
+bool VideoCaptureAndroid::CaptureStarted() {
+ MutexLock lock(&api_lock_);
+ return _captureStarted;
+}
+
+int32_t VideoCaptureAndroid::CaptureSettings(VideoCaptureCapability& settings) {
+ MutexLock lock(&api_lock_);
+ settings = _requestedCapability;
+ return 0;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/dom/media/systemservices/android_video_capture/video_capture_android.h b/dom/media/systemservices/android_video_capture/video_capture_android.h
new file mode 100644
index 0000000000..720c28e70b
--- /dev/null
+++ b/dom/media/systemservices/android_video_capture/video_capture_android.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
+
+#include <jni.h>
+
+#include "device_info_android.h"
+#include "api/video/i420_buffer.h"
+#include "modules/video_capture/video_capture_impl.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+class VideoCaptureAndroid : public VideoCaptureImpl {
+ public:
+ VideoCaptureAndroid();
+ virtual int32_t Init(const char* deviceUniqueIdUTF8);
+
+ virtual int32_t StartCapture(const VideoCaptureCapability& capability);
+ virtual int32_t StopCapture();
+ virtual bool CaptureStarted();
+ virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
+
+ void OnIncomingFrame(rtc::scoped_refptr<I420Buffer> buffer, int32_t degrees,
+ int64_t captureTime = 0);
+
+ protected:
+ virtual ~VideoCaptureAndroid();
+
+ DeviceInfoAndroid _deviceInfo;
+ jobject _jCapturer; // Global ref to Java VideoCaptureAndroid object.
+ VideoCaptureCapability _captureCapability;
+ bool _captureStarted;
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_