summaryrefslogtreecommitdiffstats
path: root/dom/media/systemservices/objc_video_capture
diff options
context:
space:
mode:
Diffstat (limited to 'dom/media/systemservices/objc_video_capture')
-rw-r--r--dom/media/systemservices/objc_video_capture/device_info.h56
-rw-r--r--dom/media/systemservices/objc_video_capture/device_info.mm170
-rw-r--r--dom/media/systemservices/objc_video_capture/device_info_avfoundation.h71
-rw-r--r--dom/media/systemservices/objc_video_capture/device_info_avfoundation.mm213
-rw-r--r--dom/media/systemservices/objc_video_capture/device_info_objc.h38
-rw-r--r--dom/media/systemservices/objc_video_capture/device_info_objc.mm166
-rw-r--r--dom/media/systemservices/objc_video_capture/rtc_video_capture_objc.h39
-rw-r--r--dom/media/systemservices/objc_video_capture/rtc_video_capture_objc.mm355
-rw-r--r--dom/media/systemservices/objc_video_capture/video_capture.h41
-rw-r--r--dom/media/systemservices/objc_video_capture/video_capture.mm102
-rw-r--r--dom/media/systemservices/objc_video_capture/video_capture_avfoundation.h79
-rw-r--r--dom/media/systemservices/objc_video_capture/video_capture_avfoundation.mm306
12 files changed, 1636 insertions, 0 deletions
diff --git a/dom/media/systemservices/objc_video_capture/device_info.h b/dom/media/systemservices/objc_video_capture/device_info.h
new file mode 100644
index 0000000000..d146cfcfda
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/device_info.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_OBJC_DEVICE_INFO_H_
+#define MODULES_VIDEO_CAPTURE_OBJC_DEVICE_INFO_H_
+
+#include "modules/video_capture/device_info_impl.h"
+
+#include <map>
+#include <string>
+
+@class DeviceInfoIosObjC;
+
+namespace webrtc::videocapturemodule {
+class DeviceInfoIos : public DeviceInfoImpl {
+ public:
+ DeviceInfoIos();
+ virtual ~DeviceInfoIos();
+
+ // Implementation of DeviceInfoImpl.
+ int32_t Init() override;
+ uint32_t NumberOfDevices() override;
+ int32_t GetDeviceName(uint32_t deviceNumber, char* deviceNameUTF8, uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8, uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8 = 0, uint32_t productUniqueIdUTF8Length = 0,
+ pid_t* pid = 0) override;
+
+ int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8) override;
+
+ int32_t GetCapability(const char* deviceUniqueIdUTF8, const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) override;
+
+ int32_t DisplayCaptureSettingsDialogBox(const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8, void* parentWindow,
+ uint32_t positionX, uint32_t positionY) override;
+
+ int32_t GetOrientation(const char* deviceUniqueIdUTF8, VideoRotation& orientation) override;
+
+ int32_t CreateCapabilityMap(const char* device_unique_id_utf8) override
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock);
+
+ private:
+ std::map<std::string, VideoCaptureCapabilities> _capabilitiesMap;
+ DeviceInfoIosObjC* _captureInfo;
+};
+
+} // namespace webrtc::videocapturemodule
+
+#endif // MODULES_VIDEO_CAPTURE_OBJC_DEVICE_INFO_H_
diff --git a/dom/media/systemservices/objc_video_capture/device_info.mm b/dom/media/systemservices/objc_video_capture/device_info.mm
new file mode 100644
index 0000000000..d0299a9ec9
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/device_info.mm
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+# error "This file requires ARC support."
+#endif
+
+#include <AVFoundation/AVFoundation.h>
+
+#include <string>
+
+#include "device_info.h"
+#include "device_info_objc.h"
+#include "modules/video_capture/video_capture_impl.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "objc_video_capture/device_info_avfoundation.h"
+#include "rtc_base/logging.h"
+
+using namespace mozilla;
+using namespace webrtc;
+using namespace videocapturemodule;
+
+static NSArray* camera_presets = @[
+ AVCaptureSessionPreset352x288, AVCaptureSessionPreset640x480, AVCaptureSessionPreset1280x720
+];
+
+#define IOS_UNSUPPORTED() \
+ RTC_LOG(LS_ERROR) << __FUNCTION__ << " is not supported on the iOS platform."; \
+ return -1;
+
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() {
+ if (StaticPrefs::media_getusermedia_camera_macavf_enabled_AtStartup()) {
+ return new DeviceInfoAvFoundation();
+ }
+ return new DeviceInfoIos();
+}
+
+DeviceInfoIos::DeviceInfoIos() { this->Init(); }
+
+DeviceInfoIos::~DeviceInfoIos() { [_captureInfo registerOwner:nil]; }
+
+int32_t DeviceInfoIos::Init() {
+ _captureInfo = [[DeviceInfoIosObjC alloc] init];
+ [_captureInfo registerOwner:this];
+
+ // Fill in all device capabilities.
+ int deviceCount = [DeviceInfoIosObjC captureDeviceCount];
+
+ for (int i = 0; i < deviceCount; i++) {
+ AVCaptureDevice* avDevice = [DeviceInfoIosObjC captureDeviceForIndex:i];
+ VideoCaptureCapabilities capabilityVector;
+
+ for (NSString* preset in camera_presets) {
+ BOOL support = [avDevice supportsAVCaptureSessionPreset:preset];
+ if (support) {
+ VideoCaptureCapability capability = [DeviceInfoIosObjC capabilityForPreset:preset];
+ capabilityVector.push_back(capability);
+ }
+ }
+
+ char deviceNameUTF8[256];
+ char deviceId[256];
+ int error = this->GetDeviceName(i, deviceNameUTF8, 256, deviceId, 256);
+ if (error) {
+ return error;
+ }
+ std::string deviceIdCopy(deviceId);
+ std::pair<std::string, VideoCaptureCapabilities> mapPair =
+ std::pair<std::string, VideoCaptureCapabilities>(deviceIdCopy, capabilityVector);
+ _capabilitiesMap.insert(mapPair);
+ }
+
+ return 0;
+}
+
+uint32_t DeviceInfoIos::NumberOfDevices() { return [DeviceInfoIosObjC captureDeviceCount]; }
+
+int32_t DeviceInfoIos::GetDeviceName(uint32_t deviceNumber, char* deviceNameUTF8,
+ uint32_t deviceNameUTF8Length, char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length, char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length, pid_t* pid) {
+ if (deviceNumber >= NumberOfDevices()) {
+ return -1;
+ }
+
+ NSString* deviceName = [DeviceInfoIosObjC deviceNameForIndex:deviceNumber];
+
+ NSString* deviceUniqueId = [DeviceInfoIosObjC deviceUniqueIdForIndex:deviceNumber];
+
+ strncpy(deviceNameUTF8, [deviceName UTF8String], deviceNameUTF8Length);
+ deviceNameUTF8[deviceNameUTF8Length - 1] = '\0';
+
+ strncpy(deviceUniqueIdUTF8, deviceUniqueId.UTF8String, deviceUniqueIdUTF8Length);
+ deviceUniqueIdUTF8[deviceUniqueIdUTF8Length - 1] = '\0';
+
+ if (productUniqueIdUTF8) {
+ productUniqueIdUTF8[0] = '\0';
+ }
+
+ return 0;
+}
+
+int32_t DeviceInfoIos::NumberOfCapabilities(const char* deviceUniqueIdUTF8) {
+ int32_t numberOfCapabilities = 0;
+ std::string deviceUniqueId(deviceUniqueIdUTF8);
+ std::map<std::string, VideoCaptureCapabilities>::iterator it =
+ _capabilitiesMap.find(deviceUniqueId);
+
+ if (it != _capabilitiesMap.end()) {
+ numberOfCapabilities = it->second.size();
+ }
+ return numberOfCapabilities;
+}
+
+int32_t DeviceInfoIos::GetCapability(const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) {
+ std::string deviceUniqueId(deviceUniqueIdUTF8);
+ std::map<std::string, VideoCaptureCapabilities>::iterator it =
+ _capabilitiesMap.find(deviceUniqueId);
+
+ if (it != _capabilitiesMap.end()) {
+ VideoCaptureCapabilities deviceCapabilities = it->second;
+
+ if (deviceCapabilityNumber < deviceCapabilities.size()) {
+ VideoCaptureCapability cap;
+ cap = deviceCapabilities[deviceCapabilityNumber];
+ capability = cap;
+ return 0;
+ }
+ }
+
+ return -1;
+}
+
+int32_t DeviceInfoIos::DisplayCaptureSettingsDialogBox(const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow, uint32_t positionX,
+ uint32_t positionY) {
+ IOS_UNSUPPORTED();
+}
+
+int32_t DeviceInfoIos::GetOrientation(const char* deviceUniqueIdUTF8, VideoRotation& orientation) {
+ if (strcmp(deviceUniqueIdUTF8, "Front Camera") == 0) {
+ orientation = kVideoRotation_0;
+ } else {
+ orientation = kVideoRotation_90;
+ }
+ return orientation;
+}
+
+int32_t DeviceInfoIos::CreateCapabilityMap(const char* deviceUniqueIdUTF8) {
+ std::string deviceName(deviceUniqueIdUTF8);
+ std::map<std::string, std::vector<VideoCaptureCapability>>::iterator it =
+ _capabilitiesMap.find(deviceName);
+ VideoCaptureCapabilities deviceCapabilities;
+ if (it != _capabilitiesMap.end()) {
+ _captureCapabilities = it->second;
+ return 0;
+ }
+
+ return -1;
+}
diff --git a/dom/media/systemservices/objc_video_capture/device_info_avfoundation.h b/dom/media/systemservices/objc_video_capture/device_info_avfoundation.h
new file mode 100644
index 0000000000..9a698480fa
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/device_info_avfoundation.h
@@ -0,0 +1,71 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_SYSTEMSERVICES_OBJC_VIDEO_CAPTURE_DEVICE_INFO_AVFOUNDATION_H_
+#define DOM_MEDIA_SYSTEMSERVICES_OBJC_VIDEO_CAPTURE_DEVICE_INFO_AVFOUNDATION_H_
+
+#include <map>
+#include <string>
+
+#include "api/sequence_checker.h"
+#include "device_info_objc.h"
+#include "modules/video_capture/device_info_impl.h"
+
+namespace webrtc::videocapturemodule {
+
+/**
+ * DeviceInfo implementation for the libwebrtc ios/mac sdk camera backend.
+ * Single threaded except for DeviceChange() that happens on a platform callback
+ * thread.
+ */
+class DeviceInfoAvFoundation : public DeviceInfoImpl {
+ public:
+ static int32_t ConvertAVFrameRateToCapabilityFPS(Float64 aRate);
+ static webrtc::VideoType ConvertFourCCToVideoType(FourCharCode aCode);
+
+ DeviceInfoAvFoundation();
+ virtual ~DeviceInfoAvFoundation();
+
+ // Implementation of DeviceInfoImpl.
+ int32_t Init() override { return 0; }
+ void DeviceChange() override;
+ uint32_t NumberOfDevices() override;
+ int32_t GetDeviceName(uint32_t aDeviceNumber, char* aDeviceNameUTF8,
+ uint32_t aDeviceNameLength, char* aDeviceUniqueIdUTF8,
+ uint32_t aDeviceUniqueIdUTF8Length,
+ char* aProductUniqueIdUTF8 = nullptr,
+ uint32_t aProductUniqueIdUTF8Length = 0,
+ pid_t* aPid = nullptr) override;
+ int32_t NumberOfCapabilities(const char* aDeviceUniqueIdUTF8) override;
+ int32_t GetCapability(const char* aDeviceUniqueIdUTF8,
+ const uint32_t aDeviceCapabilityNumber,
+ VideoCaptureCapability& aCapability) override;
+ int32_t DisplayCaptureSettingsDialogBox(const char* aDeviceUniqueIdUTF8,
+ const char* aDialogTitleUTF8,
+ void* aParentWindow,
+ uint32_t aPositionX,
+ uint32_t aPositionY) override {
+ return -1;
+ }
+ int32_t CreateCapabilityMap(const char* aDeviceUniqueIdUTF8) override
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock);
+
+ private:
+ const std::tuple<std::string, std::string, VideoCaptureCapabilities>*
+ FindDeviceAndCapabilities(const std::string& aDeviceUniqueId) const;
+ void EnsureCapabilitiesMap();
+
+ SequenceChecker mChecker;
+ std::atomic<bool> mInvalidateCapabilities;
+ // [{uniqueId, name, capabilities}]
+ std::vector<std::tuple<std::string, std::string, VideoCaptureCapabilities>>
+ mDevicesAndCapabilities RTC_GUARDED_BY(mChecker);
+ const DeviceInfoIosObjC* mDeviceChangeCaptureInfo RTC_GUARDED_BY(mChecker);
+};
+
+} // namespace webrtc::videocapturemodule
+
+#endif
diff --git a/dom/media/systemservices/objc_video_capture/device_info_avfoundation.mm b/dom/media/systemservices/objc_video_capture/device_info_avfoundation.mm
new file mode 100644
index 0000000000..fae65ff343
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/device_info_avfoundation.mm
@@ -0,0 +1,213 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "device_info_avfoundation.h"
+#include <CoreVideo/CVPixelBuffer.h>
+
+#include <string>
+
+#include "components/capturer/RTCCameraVideoCapturer.h"
+#import "helpers/NSString+StdString.h"
+#include "media/base/video_common.h"
+#include "modules/video_capture/video_capture_defines.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc::videocapturemodule {
+/* static */
+int32_t DeviceInfoAvFoundation::ConvertAVFrameRateToCapabilityFPS(Float64 aRate) {
+ return static_cast<int32_t>(aRate);
+}
+
+/* static */
+webrtc::VideoType DeviceInfoAvFoundation::ConvertFourCCToVideoType(FourCharCode aCode) {
+ switch (aCode) {
+ case kCVPixelFormatType_420YpCbCr8Planar:
+ case kCVPixelFormatType_420YpCbCr8PlanarFullRange:
+ return webrtc::VideoType::kI420;
+ case kCVPixelFormatType_24BGR:
+ return webrtc::VideoType::kRGB24;
+ case kCVPixelFormatType_32ABGR:
+ return webrtc::VideoType::kABGR;
+ case kCMPixelFormat_32ARGB:
+ return webrtc::VideoType::kBGRA;
+ case kCMPixelFormat_32BGRA:
+ return webrtc::VideoType::kARGB;
+ case kCMPixelFormat_16LE565:
+ return webrtc::VideoType::kRGB565;
+ case kCMPixelFormat_16LE555:
+ case kCMPixelFormat_16LE5551:
+ return webrtc::VideoType::kARGB1555;
+ case kCMPixelFormat_422YpCbCr8_yuvs:
+ return webrtc::VideoType::kYUY2;
+ case kCMPixelFormat_422YpCbCr8:
+ return webrtc::VideoType::kUYVY;
+ case kCMVideoCodecType_JPEG:
+ case kCMVideoCodecType_JPEG_OpenDML:
+ return webrtc::VideoType::kMJPEG;
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+ return webrtc::VideoType::kNV12;
+ default:
+ RTC_LOG(LS_WARNING) << "Unhandled FourCharCode" << aCode;
+ return webrtc::VideoType::kUnknown;
+ }
+}
+
+DeviceInfoAvFoundation::DeviceInfoAvFoundation()
+ : mInvalidateCapabilities(false), mDeviceChangeCaptureInfo([[DeviceInfoIosObjC alloc] init]) {
+ [mDeviceChangeCaptureInfo registerOwner:this];
+}
+
+DeviceInfoAvFoundation::~DeviceInfoAvFoundation() { [mDeviceChangeCaptureInfo registerOwner:nil]; }
+
+void DeviceInfoAvFoundation::DeviceChange() {
+ mInvalidateCapabilities = true;
+ DeviceInfo::DeviceChange();
+}
+
+uint32_t DeviceInfoAvFoundation::NumberOfDevices() {
+ RTC_DCHECK_RUN_ON(&mChecker);
+ EnsureCapabilitiesMap();
+ return mDevicesAndCapabilities.size();
+}
+
+int32_t DeviceInfoAvFoundation::GetDeviceName(uint32_t aDeviceNumber, char* aDeviceNameUTF8,
+ uint32_t aDeviceNameLength, char* aDeviceUniqueIdUTF8,
+ uint32_t aDeviceUniqueIdUTF8Length,
+ char* /* aProductUniqueIdUTF8 */,
+ uint32_t /* aProductUniqueIdUTF8Length */,
+ pid_t* /* aPid */) {
+ RTC_DCHECK_RUN_ON(&mChecker);
+ // Don't EnsureCapabilitiesMap() here, since:
+ // 1) That might invalidate the capabilities map
+ // 2) This function depends on the device index
+
+ if (aDeviceNumber >= mDevicesAndCapabilities.size()) {
+ return -1;
+ }
+
+ const auto& [uniqueId, name, _] = mDevicesAndCapabilities[aDeviceNumber];
+
+ strncpy(aDeviceUniqueIdUTF8, uniqueId.c_str(), aDeviceUniqueIdUTF8Length);
+ aDeviceUniqueIdUTF8[aDeviceUniqueIdUTF8Length - 1] = '\0';
+
+ strncpy(aDeviceNameUTF8, name.c_str(), aDeviceNameLength);
+ aDeviceNameUTF8[aDeviceNameLength - 1] = '\0';
+
+ return 0;
+}
+
+int32_t DeviceInfoAvFoundation::NumberOfCapabilities(const char* aDeviceUniqueIdUTF8) {
+ RTC_DCHECK_RUN_ON(&mChecker);
+
+ std::string deviceUniqueId(aDeviceUniqueIdUTF8);
+ const auto* tup = FindDeviceAndCapabilities(deviceUniqueId);
+ if (!tup) {
+ return 0;
+ }
+
+ const auto& [_, __, capabilities] = *tup;
+ return static_cast<int32_t>(capabilities.size());
+}
+
+int32_t DeviceInfoAvFoundation::GetCapability(const char* aDeviceUniqueIdUTF8,
+ const uint32_t aDeviceCapabilityNumber,
+ VideoCaptureCapability& aCapability) {
+ RTC_DCHECK_RUN_ON(&mChecker);
+
+ std::string deviceUniqueId(aDeviceUniqueIdUTF8);
+ const auto* tup = FindDeviceAndCapabilities(deviceUniqueId);
+ if (!tup) {
+ return -1;
+ }
+
+ const auto& [_, __, capabilities] = *tup;
+ if (aDeviceCapabilityNumber >= capabilities.size()) {
+ return -1;
+ }
+
+ aCapability = capabilities[aDeviceCapabilityNumber];
+ return 0;
+}
+
+int32_t DeviceInfoAvFoundation::CreateCapabilityMap(const char* aDeviceUniqueIdUTF8) {
+ RTC_DCHECK_RUN_ON(&mChecker);
+
+ const size_t deviceUniqueIdUTF8Length = strlen(aDeviceUniqueIdUTF8);
+ if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength) {
+ RTC_LOG(LS_INFO) << "Device name too long";
+ return -1;
+ }
+ RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device " << aDeviceUniqueIdUTF8;
+ std::string deviceUniqueId(aDeviceUniqueIdUTF8);
+ const auto* tup = FindDeviceAndCapabilities(deviceUniqueId);
+ if (!tup) {
+ RTC_LOG(LS_INFO) << "no matching device found";
+ return -1;
+ }
+
+ // Store the new used device name
+ _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+ _lastUsedDeviceName =
+ static_cast<char*>(realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1));
+ memcpy(_lastUsedDeviceName, aDeviceUniqueIdUTF8, _lastUsedDeviceNameLength + 1);
+
+ const auto& [_, __, capabilities] = *tup;
+ _captureCapabilities = capabilities;
+ return static_cast<int32_t>(_captureCapabilities.size());
+}
+
+auto DeviceInfoAvFoundation::FindDeviceAndCapabilities(const std::string& aDeviceUniqueId) const
+ -> const std::tuple<std::string, std::string, VideoCaptureCapabilities>* {
+ RTC_DCHECK_RUN_ON(&mChecker);
+ for (const auto& tup : mDevicesAndCapabilities) {
+ if (std::get<0>(tup) == aDeviceUniqueId) {
+ return &tup;
+ }
+ }
+ return nullptr;
+}
+
+void DeviceInfoAvFoundation::EnsureCapabilitiesMap() {
+ RTC_DCHECK_RUN_ON(&mChecker);
+
+ if (mInvalidateCapabilities.exchange(false)) {
+ mDevicesAndCapabilities.clear();
+ }
+
+ if (!mDevicesAndCapabilities.empty()) {
+ return;
+ }
+
+ for (AVCaptureDevice* device in [RTCCameraVideoCapturer captureDevices]) {
+ std::string uniqueId = [NSString stdStringForString:device.uniqueID];
+ std::string name = [NSString stdStringForString:device.localizedName];
+ auto& [_, __, capabilities] =
+ mDevicesAndCapabilities.emplace_back(uniqueId, name, VideoCaptureCapabilities());
+
+ for (AVCaptureDeviceFormat* format in
+ [RTCCameraVideoCapturer supportedFormatsForDevice:device]) {
+ VideoCaptureCapability cap;
+ FourCharCode fourcc = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+ cap.videoType = ConvertFourCCToVideoType(fourcc);
+ CMVideoDimensions dimensions =
+ CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ cap.width = dimensions.width;
+ cap.height = dimensions.height;
+
+ for (AVFrameRateRange* range in format.videoSupportedFrameRateRanges) {
+ cap.maxFPS = ConvertAVFrameRateToCapabilityFPS(range.maxFrameRate);
+ capabilities.push_back(cap);
+ }
+
+ if (capabilities.empty()) {
+ cap.maxFPS = 30;
+ capabilities.push_back(cap);
+ }
+ }
+ }
+}
+} // namespace webrtc::videocapturemodule
diff --git a/dom/media/systemservices/objc_video_capture/device_info_objc.h b/dom/media/systemservices/objc_video_capture/device_info_objc.h
new file mode 100644
index 0000000000..1ddedb471e
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/device_info_objc.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_OBJC_DEVICE_INFO_OBJC_H_
+#define MODULES_VIDEO_CAPTURE_OBJC_DEVICE_INFO_OBJC_H_
+
+#import <AVFoundation/AVFoundation.h>
+
+#include "modules/video_capture/video_capture_defines.h"
+#include "device_info.h"
+
+@interface DeviceInfoIosObjC : NSObject {
+ NSArray* _observers;
+ NSLock* _lock;
+ webrtc::VideoCaptureModule::DeviceInfo* _owner;
+}
+
++ (int)captureDeviceCount;
++ (AVCaptureDevice*)captureDeviceForIndex:(int)index;
++ (AVCaptureDevice*)captureDeviceForUniqueId:(NSString*)uniqueId;
++ (NSString*)deviceNameForIndex:(int)index;
++ (NSString*)deviceUniqueIdForIndex:(int)index;
++ (NSString*)deviceNameForUniqueId:(NSString*)uniqueId;
++ (webrtc::VideoCaptureCapability)capabilityForPreset:(NSString*)preset;
+
+- (void)registerOwner:(webrtc::VideoCaptureModule::DeviceInfo*)owner;
+- (void)configureObservers;
+
+@end
+
+#endif // MODULES_VIDEO_CAPTURE_OBJC_DEVICE_INFO_OBJC_H_
diff --git a/dom/media/systemservices/objc_video_capture/device_info_objc.mm b/dom/media/systemservices/objc_video_capture/device_info_objc.mm
new file mode 100644
index 0000000000..6e9435daff
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/device_info_objc.mm
@@ -0,0 +1,166 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+# error "This file requires ARC support."
+#endif
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "device_info_objc.h"
+
+@implementation DeviceInfoIosObjC
+
+- (id)init {
+ self = [super init];
+ if (nil != self) {
+ _lock = [[NSLock alloc] init];
+ }
+ return self;
+}
+
+- (void)dealloc {
+}
+
+- (void)registerOwner:(webrtc::VideoCaptureModule::DeviceInfo*)owner {
+ [_lock lock];
+ if (!_owner && owner) {
+ [self configureObservers];
+ } else if (_owner && !owner) {
+ NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
+ for (id observer in _observers) {
+ [notificationCenter removeObserver:observer];
+ }
+ _observers = nil;
+ }
+ _owner = owner;
+ [_lock unlock];
+}
+
++ (int)captureDeviceCount {
+ int cnt = 0;
+ @try {
+ for (AVCaptureDevice* device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
+ if ([device isSuspended]) {
+ continue;
+ }
+ cnt++;
+ }
+ } @catch (NSException* exception) {
+ cnt = 0;
+ }
+ return cnt;
+}
+
++ (AVCaptureDevice*)captureDeviceForIndex:(int)index {
+ int cnt = 0;
+ @try {
+ for (AVCaptureDevice* device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
+ if ([device isSuspended]) {
+ continue;
+ }
+ if (cnt == index) {
+ return device;
+ }
+ cnt++;
+ }
+ } @catch (NSException* exception) {
+ cnt = 0;
+ }
+
+ return nil;
+}
+
++ (AVCaptureDevice*)captureDeviceForUniqueId:(NSString*)uniqueId {
+ for (AVCaptureDevice* device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
+ if ([device isSuspended]) {
+ continue;
+ }
+ if ([uniqueId isEqual:device.uniqueID]) {
+ return device;
+ }
+ }
+
+ return nil;
+}
+
++ (NSString*)deviceNameForIndex:(int)index {
+ return [DeviceInfoIosObjC captureDeviceForIndex:index].localizedName;
+}
+
++ (NSString*)deviceUniqueIdForIndex:(int)index {
+ return [DeviceInfoIosObjC captureDeviceForIndex:index].uniqueID;
+}
+
++ (NSString*)deviceNameForUniqueId:(NSString*)uniqueId {
+ return [[AVCaptureDevice deviceWithUniqueID:uniqueId] localizedName];
+}
+
++ (webrtc::VideoCaptureCapability)capabilityForPreset:(NSString*)preset {
+ webrtc::VideoCaptureCapability capability;
+
+ // TODO(tkchin): Maybe query AVCaptureDevice for supported formats, and
+ // then get the dimensions / frame rate from each supported format
+ if ([preset isEqualToString:AVCaptureSessionPreset352x288]) {
+ capability.width = 352;
+ capability.height = 288;
+ capability.maxFPS = 30;
+ capability.videoType = webrtc::VideoType::kNV12;
+ capability.interlaced = false;
+ } else if ([preset isEqualToString:AVCaptureSessionPreset640x480]) {
+ capability.width = 640;
+ capability.height = 480;
+ capability.maxFPS = 30;
+ capability.videoType = webrtc::VideoType::kNV12;
+ capability.interlaced = false;
+ } else if ([preset isEqualToString:AVCaptureSessionPreset1280x720]) {
+ capability.width = 1280;
+ capability.height = 720;
+ capability.maxFPS = 30;
+ capability.videoType = webrtc::VideoType::kNV12;
+ capability.interlaced = false;
+ }
+
+ return capability;
+}
+
+- (void)configureObservers {
+ // register device connected / disconnected event
+ NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
+
+ id deviceWasConnectedObserver =
+ [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
+ object:nil
+ queue:[NSOperationQueue mainQueue]
+ usingBlock:^(NSNotification* note) {
+ [_lock lock];
+ AVCaptureDevice* device = [note object];
+ BOOL isVideoDevice = [device hasMediaType:AVMediaTypeVideo];
+ if (isVideoDevice && _owner) _owner->DeviceChange();
+ [_lock unlock];
+ }];
+
+ id deviceWasDisconnectedObserver =
+ [notificationCenter addObserverForName:AVCaptureDeviceWasDisconnectedNotification
+ object:nil
+ queue:[NSOperationQueue mainQueue]
+ usingBlock:^(NSNotification* note) {
+ [_lock lock];
+ AVCaptureDevice* device = [note object];
+ BOOL isVideoDevice = [device hasMediaType:AVMediaTypeVideo];
+ if (isVideoDevice && _owner) _owner->DeviceChange();
+ [_lock unlock];
+ }];
+
+ _observers = [[NSArray alloc]
+ initWithObjects:deviceWasConnectedObserver, deviceWasDisconnectedObserver, nil];
+}
+
+@end
diff --git a/dom/media/systemservices/objc_video_capture/rtc_video_capture_objc.h b/dom/media/systemservices/objc_video_capture/rtc_video_capture_objc.h
new file mode 100644
index 0000000000..9c6604ffe5
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/rtc_video_capture_objc.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_OBJC_RTC_VIDEO_CAPTURE_OBJC_H_
+#define MODULES_VIDEO_CAPTURE_OBJC_RTC_VIDEO_CAPTURE_OBJC_H_
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+#ifdef WEBRTC_IOS
+# import <UIKit/UIKit.h>
+#endif
+
+#include "video_capture.h"
+
+// The following class listens to a notification with name:
+// 'StatusBarOrientationDidChange'.
+// This notification must be posted in order for the capturer to reflect the
+// orientation change in video w.r.t. the application orientation.
+@interface RTCVideoCaptureIosObjC : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
+
+@property webrtc::VideoRotation frameRotation;
+
+// custom initializer. Instance of VideoCaptureIos is needed
+// for callback purposes.
+// default init methods have been overridden to return nil.
+- (id)initWithOwner:(webrtc::videocapturemodule::VideoCaptureIos*)owner;
+- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId;
+- (BOOL)startCaptureWithCapability:(const webrtc::VideoCaptureCapability&)capability;
+- (BOOL)stopCapture;
+
+@end
+#endif // MODULES_VIDEO_CAPTURE_OBJC_RTC_VIDEO_CAPTURE_OBJC_H_
diff --git a/dom/media/systemservices/objc_video_capture/rtc_video_capture_objc.mm b/dom/media/systemservices/objc_video_capture/rtc_video_capture_objc.mm
new file mode 100644
index 0000000000..0a36768fa8
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/rtc_video_capture_objc.mm
@@ -0,0 +1,355 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+# error "This file requires ARC support."
+#endif
+
+#import <AVFoundation/AVFoundation.h>
+#ifdef WEBRTC_IOS
+# import <UIKit/UIKit.h>
+#endif
+
+#import "device_info_objc.h"
+#import "rtc_video_capture_objc.h"
+
+#include "rtc_base/logging.h"
+
+using namespace webrtc;
+using namespace webrtc::videocapturemodule;
+
+@interface RTCVideoCaptureIosObjC (hidden)
+- (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
+@end
+
+@implementation RTCVideoCaptureIosObjC {
+ webrtc::videocapturemodule::VideoCaptureIos* _owner;
+ webrtc::VideoCaptureCapability _capability;
+ AVCaptureSession* _captureSession;
+ BOOL _orientationHasChanged;
+ AVCaptureConnection* _connection;
+ BOOL _captureChanging; // Guarded by _captureChangingCondition.
+ NSCondition* _captureChangingCondition;
+ dispatch_queue_t _frameQueue;
+}
+
+@synthesize frameRotation = _framRotation;
+
+- (id)initWithOwner:(VideoCaptureIos*)owner {
+ if (self == [super init]) {
+ _owner = owner;
+ _captureSession = [[AVCaptureSession alloc] init];
+#if defined(WEBRTC_IOS)
+ _captureSession.usesApplicationAudioSession = NO;
+#endif
+ _captureChanging = NO;
+ _captureChangingCondition = [[NSCondition alloc] init];
+
+ if (!_captureSession || !_captureChangingCondition) {
+ return nil;
+ }
+
+ // create and configure a new output (using callbacks)
+ AVCaptureVideoDataOutput* captureOutput = [[AVCaptureVideoDataOutput alloc] init];
+ NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
+
+ NSNumber* val = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8];
+ NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:val forKey:key];
+ captureOutput.videoSettings = videoSettings;
+
+ // add new output
+ if ([_captureSession canAddOutput:captureOutput]) {
+ [_captureSession addOutput:captureOutput];
+ } else {
+ RTC_LOG(LS_ERROR) << __FUNCTION__ << ": Could not add output to AVCaptureSession";
+ }
+
+#ifdef WEBRTC_IOS
+ [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
+
+ NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
+ [notify addObserver:self
+ selector:@selector(onVideoError:)
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:_captureSession];
+ [notify addObserver:self
+ selector:@selector(deviceOrientationDidChange:)
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+#endif
+ }
+
+ // Create a serial queue on which video capture will run. By setting the target,
+ // blocks should still run on DISPATH_QUEUE_PRIORITY_DEFAULT rather than creating
+ // a new thread.
+ _frameQueue = dispatch_queue_create("org.webrtc.videocapture", DISPATCH_QUEUE_SERIAL);
+ dispatch_set_target_queue(_frameQueue,
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0));
+
+ return self;
+}
+
+- (void)directOutputToSelf {
+ [[self currentOutput] setSampleBufferDelegate:self queue:_frameQueue];
+}
+
+- (void)directOutputToNil {
+ [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
+}
+
+- (void)deviceOrientationDidChange:(NSNotification*)notification {
+ _orientationHasChanged = YES;
+ [self setRelativeVideoOrientation];
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+}
+
+- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
+ [self waitForCaptureChangeToFinish];
+ // check to see if the camera is already set
+ if (_captureSession) {
+ NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
+ if ([currentInputs count] > 0) {
+ AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
+ if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
+ return YES;
+ }
+ }
+ }
+
+ return [self changeCaptureInputByUniqueId:uniqueId];
+}
+
+- (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
+ [self waitForCaptureChangeToFinish];
+ if (!_captureSession) {
+ return NO;
+ }
+
+ // check limits of the resolution
+ if (capability.maxFPS < 0 || capability.maxFPS > 60) {
+ return NO;
+ }
+
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
+ if (capability.width > 1280 || capability.height > 720) {
+ return NO;
+ }
+ } else if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
+ if (capability.width > 640 || capability.height > 480) {
+ return NO;
+ }
+ } else if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
+ if (capability.width > 352 || capability.height > 288) {
+ return NO;
+ }
+ } else if (capability.width < 0 || capability.height < 0) {
+ return NO;
+ }
+
+ _capability = capability;
+
+ AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
+ if (!currentOutput) return NO;
+
+ [self directOutputToSelf];
+
+ _orientationHasChanged = NO;
+ _captureChanging = YES;
+ dispatch_async(_frameQueue, ^{
+ [self startCaptureInBackgroundWithOutput:currentOutput];
+ });
+ return YES;
+}
+
+- (AVCaptureVideoDataOutput*)currentOutput {
+ return [[_captureSession outputs] firstObject];
+}
+
+- (void)startCaptureInBackgroundWithOutput:(AVCaptureVideoDataOutput*)currentOutput {
+ NSString* captureQuality = [NSString stringWithString:AVCaptureSessionPresetLow];
+ if (_capability.width >= 1280 || _capability.height >= 720) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
+ } else if (_capability.width >= 640 || _capability.height >= 480) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
+ } else if (_capability.width >= 352 || _capability.height >= 288) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
+ }
+
+ // begin configuration for the AVCaptureSession
+ [_captureSession beginConfiguration];
+
+ // picture resolution
+ [_captureSession setSessionPreset:captureQuality];
+
+ _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
+ [self setRelativeVideoOrientation];
+
+ // finished configuring, commit settings to AVCaptureSession.
+ [_captureSession commitConfiguration];
+
+ [_captureSession startRunning];
+ [self signalCaptureChangeEnd];
+}
+
+- (void)setRelativeVideoOrientation {
+ if (!_connection.supportsVideoOrientation) {
+ return;
+ }
+#ifndef WEBRTC_IOS
+ _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
+ return;
+#else
+ switch ([UIDevice currentDevice].orientation) {
+ case UIDeviceOrientationPortrait:
+ _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
+ break;
+ case UIDeviceOrientationPortraitUpsideDown:
+ _connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
+ break;
+ case UIDeviceOrientationLandscapeLeft:
+ _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
+ break;
+ case UIDeviceOrientationLandscapeRight:
+ _connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ case UIDeviceOrientationFaceUp:
+ case UIDeviceOrientationFaceDown:
+ case UIDeviceOrientationUnknown:
+ if (!_orientationHasChanged) {
+ _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
+ }
+ break;
+ }
+#endif
+}
+
+- (void)onVideoError:(NSNotification*)notification {
+ NSLog(@"onVideoError: %@", notification);
+ // TODO(sjlee): make the specific error handling with this notification.
+ RTC_LOG(LS_ERROR) << __FUNCTION__ << ": [AVCaptureSession startRunning] error.";
+}
+
+- (BOOL)stopCapture {
+#ifdef WEBRTC_IOS
+ [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
+#endif
+ _orientationHasChanged = NO;
+ [self waitForCaptureChangeToFinish];
+ [self directOutputToNil];
+
+ if (!_captureSession) {
+ return NO;
+ }
+
+ _captureChanging = YES;
+ [_captureSession stopRunning];
+
+ dispatch_sync(_frameQueue, ^{
+ [self signalCaptureChangeEnd];
+ });
+ return YES;
+}
+
+- (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
+ [self waitForCaptureChangeToFinish];
+ NSArray* currentInputs = [_captureSession inputs];
+ // remove current input
+ if ([currentInputs count] > 0) {
+ AVCaptureInput* currentInput = (AVCaptureInput*)[currentInputs objectAtIndex:0];
+
+ [_captureSession removeInput:currentInput];
+ }
+
+ // Look for input device with the name requested (as our input param)
+ // get list of available capture devices
+ int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
+ if (captureDeviceCount <= 0) {
+ return NO;
+ }
+
+ AVCaptureDevice* captureDevice = [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
+
+ if (!captureDevice) {
+ return NO;
+ }
+
+ // now create capture session input out of AVCaptureDevice
+ NSError* deviceError = nil;
+ AVCaptureDeviceInput* newCaptureInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
+ error:&deviceError];
+
+ if (!newCaptureInput) {
+ const char* errorMessage = [[deviceError localizedDescription] UTF8String];
+
+ RTC_LOG(LS_ERROR) << __FUNCTION__ << ": deviceInputWithDevice error:" << errorMessage;
+
+ return NO;
+ }
+
+ // try to add our new capture device to the capture session
+ [_captureSession beginConfiguration];
+
+ BOOL addedCaptureInput = NO;
+ if ([_captureSession canAddInput:newCaptureInput]) {
+ [_captureSession addInput:newCaptureInput];
+ addedCaptureInput = YES;
+ } else {
+ addedCaptureInput = NO;
+ }
+
+ [_captureSession commitConfiguration];
+
+ return addedCaptureInput;
+}
+
+- (void)captureOutput:(AVCaptureOutput*)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection*)connection {
+ const int kFlags = 0;
+ CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
+
+ if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
+ return;
+ }
+
+ uint8_t* baseAddress = (uint8_t*)CVPixelBufferGetBaseAddress(videoFrame);
+ const size_t width = CVPixelBufferGetWidth(videoFrame);
+ const size_t height = CVPixelBufferGetHeight(videoFrame);
+ const size_t frameSize = width * height * 2;
+
+ VideoCaptureCapability tempCaptureCapability;
+ tempCaptureCapability.width = width;
+ tempCaptureCapability.height = height;
+ tempCaptureCapability.maxFPS = _capability.maxFPS;
+ tempCaptureCapability.videoType = VideoType::kUYVY;
+
+ _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
+
+ CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
+}
+
+- (void)signalCaptureChangeEnd {
+ [_captureChangingCondition lock];
+ _captureChanging = NO;
+ [_captureChangingCondition signal];
+ [_captureChangingCondition unlock];
+}
+
+- (void)waitForCaptureChangeToFinish {
+ [_captureChangingCondition lock];
+ while (_captureChanging) {
+ [_captureChangingCondition wait];
+ }
+ [_captureChangingCondition unlock];
+}
+@end
diff --git a/dom/media/systemservices/objc_video_capture/video_capture.h b/dom/media/systemservices/objc_video_capture/video_capture.h
new file mode 100644
index 0000000000..b9f228f679
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/video_capture.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_OBJC_VIDEO_CAPTURE_H_
+#define MODULES_VIDEO_CAPTURE_OBJC_VIDEO_CAPTURE_H_
+
+#include "modules/video_capture/video_capture_impl.h"
+#include "api/scoped_refptr.h"
+
+@class RTCVideoCaptureIosObjC;
+
+namespace webrtc::videocapturemodule {
+class VideoCaptureIos : public VideoCaptureImpl {
+ public:
+ VideoCaptureIos();
+ virtual ~VideoCaptureIos();
+
+ static rtc::scoped_refptr<VideoCaptureModule> Create(const char* device_unique_id_utf8);
+
+ // Implementation of VideoCaptureImpl.
+ int32_t StartCapture(const VideoCaptureCapability& capability) override;
+ int32_t StopCapture() override;
+ bool CaptureStarted() override;
+ int32_t CaptureSettings(VideoCaptureCapability& settings) override;
+
+ private:
+ RTCVideoCaptureIosObjC* capture_device_;
+ bool is_capturing_;
+ VideoCaptureCapability capability_;
+};
+
+} // namespace webrtc::videocapturemodule
+
+#endif // MODULES_VIDEO_CAPTURE_OBJC_VIDEO_CAPTURE_H_
diff --git a/dom/media/systemservices/objc_video_capture/video_capture.mm b/dom/media/systemservices/objc_video_capture/video_capture.mm
new file mode 100644
index 0000000000..63aef3204c
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/video_capture.mm
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+# error "This file requires ARC support."
+#endif
+
+#include "device_info_objc.h"
+#include "rtc_video_capture_objc.h"
+#include "rtc_base/ref_counted_object.h"
+#include "api/scoped_refptr.h"
+#include "video_capture_avfoundation.h"
+#include "mozilla/StaticPrefs_media.h"
+
+using namespace mozilla;
+using namespace webrtc;
+using namespace videocapturemodule;
+
+rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(const char* deviceUniqueIdUTF8) {
+ if (StaticPrefs::media_getusermedia_camera_macavf_enabled_AtStartup()) {
+ return VideoCaptureAvFoundation::Create(deviceUniqueIdUTF8);
+ }
+ return VideoCaptureIos::Create(deviceUniqueIdUTF8);
+}
+
+VideoCaptureIos::VideoCaptureIos() : is_capturing_(false) {
+ capability_.width = kDefaultWidth;
+ capability_.height = kDefaultHeight;
+ capability_.maxFPS = kDefaultFrameRate;
+ capture_device_ = nil;
+}
+
+VideoCaptureIos::~VideoCaptureIos() {
+ if (is_capturing_) {
+ [capture_device_ stopCapture];
+ capture_device_ = nil;
+ }
+}
+
+rtc::scoped_refptr<VideoCaptureModule> VideoCaptureIos::Create(const char* deviceUniqueIdUTF8) {
+ if (!deviceUniqueIdUTF8[0]) {
+ return NULL;
+ }
+
+ rtc::scoped_refptr<VideoCaptureIos> capture_module(new rtc::RefCountedObject<VideoCaptureIos>());
+
+ const int32_t name_length = strlen(deviceUniqueIdUTF8);
+ if (name_length >= kVideoCaptureUniqueNameLength) return nullptr;
+
+ capture_module->_deviceUniqueId = new char[name_length + 1];
+ strncpy(capture_module->_deviceUniqueId, deviceUniqueIdUTF8, name_length + 1);
+ capture_module->_deviceUniqueId[name_length] = '\0';
+
+ capture_module->capture_device_ =
+ [[RTCVideoCaptureIosObjC alloc] initWithOwner:capture_module.get()];
+ if (!capture_module->capture_device_) {
+ return nullptr;
+ }
+
+ if (![capture_module->capture_device_
+ setCaptureDeviceByUniqueId:[[NSString alloc] initWithCString:deviceUniqueIdUTF8
+ encoding:NSUTF8StringEncoding]]) {
+ return nullptr;
+ }
+ return capture_module;
+}
+
+int32_t VideoCaptureIos::StartCapture(const VideoCaptureCapability& capability) {
+ capability_ = capability;
+
+ if (![capture_device_ startCaptureWithCapability:capability]) {
+ return -1;
+ }
+
+ is_capturing_ = true;
+
+ return 0;
+}
+
+int32_t VideoCaptureIos::StopCapture() {
+ if (![capture_device_ stopCapture]) {
+ return -1;
+ }
+
+ is_capturing_ = false;
+ return 0;
+}
+
+bool VideoCaptureIos::CaptureStarted() { return is_capturing_; }
+
+int32_t VideoCaptureIos::CaptureSettings(VideoCaptureCapability& settings) {
+ settings = capability_;
+ settings.videoType = VideoType::kNV12;
+ return 0;
+}
diff --git a/dom/media/systemservices/objc_video_capture/video_capture_avfoundation.h b/dom/media/systemservices/objc_video_capture/video_capture_avfoundation.h
new file mode 100644
index 0000000000..f5a45b4531
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/video_capture_avfoundation.h
@@ -0,0 +1,79 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_SYSTEMSERVICES_OBJC_VIDEO_CAPTURE_VIDEO_CAPTURE2_H_
+#define DOM_MEDIA_SYSTEMSERVICES_OBJC_VIDEO_CAPTURE_VIDEO_CAPTURE2_H_
+
+#import "components/capturer/RTCCameraVideoCapturer.h"
+
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "modules/video_capture/video_capture_impl.h"
+#include "mozilla/Maybe.h"
+#include "PerformanceRecorder.h"
+
+@class VideoCaptureAdapter;
+
+namespace webrtc::videocapturemodule {
+
+/**
+ * VideoCaptureImpl implementation of the libwebrtc ios/mac sdk camera backend.
+ * Single threaded except for OnFrame() that happens on a platform callback thread.
+ */
+class VideoCaptureAvFoundation : public VideoCaptureImpl {
+ public:
+ VideoCaptureAvFoundation(AVCaptureDevice* _Nonnull aDevice);
+ virtual ~VideoCaptureAvFoundation();
+
+ static rtc::scoped_refptr<VideoCaptureModule> Create(const char* _Nullable aDeviceUniqueIdUTF8);
+
+ // Implementation of VideoCaptureImpl. Single threaded.
+
+ // Starts capturing synchronously. Idempotent. If an existing capture is live and another
+ // capability is requested we'll restart the underlying backend with the new capability.
+ int32_t StartCapture(const VideoCaptureCapability& aCapability) MOZ_EXCLUDES(api_lock_) override;
+ // Stops capturing synchronously. Idempotent.
+ int32_t StopCapture() MOZ_EXCLUDES(api_lock_) override;
+ bool CaptureStarted() MOZ_EXCLUDES(api_lock_) override;
+ int32_t CaptureSettings(VideoCaptureCapability& aSettings) override;
+
+ // Callback. This can be called on any thread.
+ int32_t OnFrame(__strong RTCVideoFrame* _Nonnull aFrame) MOZ_EXCLUDES(api_lock_);
+
+ void SetTrackingId(uint32_t aTrackingIdProcId) MOZ_EXCLUDES(api_lock_) override;
+
+ // Registers the current thread with the profiler if not already registered.
+ void MaybeRegisterCallbackThread();
+
+ private:
+ // Control thread checker.
+ SequenceChecker mChecker;
+ AVCaptureDevice* _Nonnull const mDevice RTC_GUARDED_BY(mChecker);
+ VideoCaptureAdapter* _Nonnull const mAdapter RTC_GUARDED_BY(mChecker);
+ RTCCameraVideoCapturer* _Nonnull const mCapturer RTC_GUARDED_BY(mChecker);
+ // If capture has started, this is the capability it was started for. Written on the mChecker
+ // thread only.
+ mozilla::Maybe<VideoCaptureCapability> mCapability MOZ_GUARDED_BY(api_lock_);
+ // The image type that mCapability maps to. Set in lockstep with mCapability.
+ mozilla::Maybe<mozilla::CaptureStage::ImageType> mImageType MOZ_GUARDED_BY(api_lock_);
+ // Id string uniquely identifying this capture source. Written on the mChecker thread only.
+ mozilla::Maybe<mozilla::TrackingId> mTrackingId MOZ_GUARDED_BY(api_lock_);
+ // Adds frame specific markers to the profiler while mTrackingId is set. Callback thread only.
+ mozilla::PerformanceRecorderMulti<mozilla::CaptureStage> mCaptureRecorder;
+ mozilla::PerformanceRecorderMulti<mozilla::CopyVideoStage> mConversionRecorder;
+ std::atomic<ProfilerThreadId> mCallbackThreadId;
+};
+
+} // namespace webrtc::videocapturemodule
+
+@interface VideoCaptureAdapter : NSObject <RTCVideoCapturerDelegate> {
+ webrtc::Mutex _mutex;
+ webrtc::videocapturemodule::VideoCaptureAvFoundation* _Nullable _capturer RTC_GUARDED_BY(_mutex);
+}
+- (void)setCapturer:(webrtc::videocapturemodule::VideoCaptureAvFoundation* _Nullable)capturer;
+@end
+
+#endif
diff --git a/dom/media/systemservices/objc_video_capture/video_capture_avfoundation.mm b/dom/media/systemservices/objc_video_capture/video_capture_avfoundation.mm
new file mode 100644
index 0000000000..e5ca826fa4
--- /dev/null
+++ b/dom/media/systemservices/objc_video_capture/video_capture_avfoundation.mm
@@ -0,0 +1,306 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "video_capture_avfoundation.h"
+
+#import "api/video_frame_buffer/RTCNativeI420Buffer+Private.h"
+#import "base/RTCI420Buffer.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/capturer/RTCCameraVideoCapturer.h"
+#import "helpers/NSString+StdString.h"
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_rotation.h"
+#include "CallbackThreadRegistry.h"
+#include "device_info_avfoundation.h"
+#include "modules/video_capture/video_capture_defines.h"
+#include "mozilla/Assertions.h"
+#include "mozilla/Monitor.h"
+#include "mozilla/UniquePtr.h"
+#include "rtc_base/time_utils.h"
+
+using namespace mozilla;
+using namespace webrtc::videocapturemodule;
+
+namespace {
+webrtc::VideoRotation ToNativeRotation(RTCVideoRotation aRotation) {
+ switch (aRotation) {
+ case RTCVideoRotation_0:
+ return webrtc::kVideoRotation_0;
+ case RTCVideoRotation_90:
+ return webrtc::kVideoRotation_90;
+ case RTCVideoRotation_180:
+ return webrtc::kVideoRotation_180;
+ case RTCVideoRotation_270:
+ return webrtc::kVideoRotation_270;
+ default:
+ MOZ_CRASH_UNSAFE_PRINTF("Unexpected rotation %d", static_cast<int>(aRotation));
+ return webrtc::kVideoRotation_0;
+ }
+}
+
+AVCaptureDeviceFormat* _Nullable FindFormat(AVCaptureDevice* _Nonnull aDevice,
+ webrtc::VideoCaptureCapability aCapability) {
+ for (AVCaptureDeviceFormat* format in [aDevice formats]) {
+ CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ if (dimensions.width != aCapability.width) {
+ continue;
+ }
+ if (dimensions.height != aCapability.height) {
+ continue;
+ }
+ FourCharCode fourcc = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+ if (aCapability.videoType != DeviceInfoAvFoundation::ConvertFourCCToVideoType(fourcc)) {
+ continue;
+ }
+ if ([format.videoSupportedFrameRateRanges
+ indexOfObjectPassingTest:^BOOL(AVFrameRateRange* _Nonnull obj, NSUInteger idx,
+ BOOL* _Nonnull stop) {
+ return static_cast<BOOL>(DeviceInfoAvFoundation::ConvertAVFrameRateToCapabilityFPS(
+ obj.maxFrameRate) == aCapability.maxFPS);
+ }] == NSNotFound) {
+ continue;
+ }
+
+ return format;
+ }
+ return nullptr;
+}
+} // namespace
+
+@implementation VideoCaptureAdapter
+- (void)setCapturer:(webrtc::videocapturemodule::VideoCaptureAvFoundation* _Nullable)capturer {
+ webrtc::MutexLock lock(&_mutex);
+ _capturer = capturer;
+}
+
+- (void)capturer:(RTCVideoCapturer* _Nonnull)capturer
+ didCaptureVideoFrame:(RTCVideoFrame* _Nonnull)frame {
+ rtc::scoped_refptr<webrtc::videocapturemodule::VideoCaptureAvFoundation> cap;
+ {
+ webrtc::MutexLock lock(&_mutex);
+ cap = rtc::scoped_refptr(_capturer);
+ }
+ if (!cap) return;
+ cap->OnFrame(frame);
+}
+@end
+
+namespace webrtc::videocapturemodule {
+VideoCaptureAvFoundation::VideoCaptureAvFoundation(AVCaptureDevice* _Nonnull aDevice)
+ : mDevice(aDevice),
+ mAdapter([[VideoCaptureAdapter alloc] init]),
+ mCapturer([[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:mAdapter]),
+ mCallbackThreadId() {
+ const char* uniqueId = [[aDevice uniqueID] UTF8String];
+ size_t len = strlen(uniqueId);
+ _deviceUniqueId = new (std::nothrow) char[len + 1];
+ if (_deviceUniqueId) {
+ memcpy(_deviceUniqueId, uniqueId, len + 1);
+ }
+}
+
+VideoCaptureAvFoundation::~VideoCaptureAvFoundation() {
+ // Must block until capture has fully stopped, including async operations.
+ StopCapture();
+}
+
+/* static */
+rtc::scoped_refptr<VideoCaptureModule> VideoCaptureAvFoundation::Create(
+ const char* _Nullable aDeviceUniqueIdUTF8) {
+ std::string uniqueId(aDeviceUniqueIdUTF8);
+ for (AVCaptureDevice* device in [RTCCameraVideoCapturer captureDevices]) {
+ if ([NSString stdStringForString:device.uniqueID] == uniqueId) {
+ rtc::scoped_refptr<VideoCaptureModule> module(
+ new rtc::RefCountedObject<VideoCaptureAvFoundation>(device));
+ return module;
+ }
+ }
+ return nullptr;
+}
+
+int32_t VideoCaptureAvFoundation::StartCapture(const VideoCaptureCapability& aCapability) {
+ RTC_DCHECK_RUN_ON(&mChecker);
+ AVCaptureDeviceFormat* format = FindFormat(mDevice, aCapability);
+ if (!format) {
+ return -1;
+ }
+
+ {
+ MutexLock lock(&api_lock_);
+ if (mCapability) {
+ if (mCapability->width == aCapability.width && mCapability->height == aCapability.height &&
+ mCapability->maxFPS == aCapability.maxFPS &&
+ mCapability->videoType == aCapability.videoType) {
+ return 0;
+ }
+
+ api_lock_.Unlock();
+ int32_t rv = StopCapture();
+ api_lock_.Lock();
+
+ if (rv != 0) {
+ return rv;
+ }
+ }
+ }
+
+ [mAdapter setCapturer:this];
+
+ {
+ Monitor monitor("VideoCaptureAVFoundation::StartCapture");
+ Monitor* copyableMonitor = &monitor;
+ MonitorAutoLock lock(monitor);
+ __block Maybe<int32_t> rv;
+
+ [mCapturer startCaptureWithDevice:mDevice
+ format:format
+ fps:aCapability.maxFPS
+ completionHandler:^(NSError* error) {
+ MonitorAutoLock lock2(*copyableMonitor);
+ MOZ_RELEASE_ASSERT(!rv);
+ rv = Some(error ? -1 : 0);
+ copyableMonitor->Notify();
+ }];
+
+ while (!rv) {
+ monitor.Wait();
+ }
+
+ if (*rv != 0) {
+ return *rv;
+ }
+ }
+
+ MutexLock lock(&api_lock_);
+ mCapability = Some(aCapability);
+ mImageType = Some([type = aCapability.videoType] {
+ switch (type) {
+ case webrtc::VideoType::kI420:
+ return CaptureStage::ImageType::I420;
+ case webrtc::VideoType::kYUY2:
+ return CaptureStage::ImageType::YUY2;
+ case webrtc::VideoType::kYV12:
+ case webrtc::VideoType::kIYUV:
+ return CaptureStage::ImageType::YV12;
+ case webrtc::VideoType::kUYVY:
+ return CaptureStage::ImageType::UYVY;
+ case webrtc::VideoType::kNV12:
+ return CaptureStage::ImageType::NV12;
+ case webrtc::VideoType::kNV21:
+ return CaptureStage::ImageType::NV21;
+ case webrtc::VideoType::kMJPEG:
+ return CaptureStage::ImageType::MJPEG;
+ case webrtc::VideoType::kRGB24:
+ case webrtc::VideoType::kBGR24:
+ case webrtc::VideoType::kABGR:
+ case webrtc::VideoType::kARGB:
+ case webrtc::VideoType::kARGB4444:
+ case webrtc::VideoType::kRGB565:
+ case webrtc::VideoType::kARGB1555:
+ case webrtc::VideoType::kBGRA:
+ case webrtc::VideoType::kUnknown:
+ // Unlikely, and not represented by CaptureStage::ImageType.
+ return CaptureStage::ImageType::Unknown;
+ }
+ return CaptureStage::ImageType::Unknown;
+ }());
+
+ return 0;
+}
+
+int32_t VideoCaptureAvFoundation::StopCapture() {
+ RTC_DCHECK_RUN_ON(&mChecker);
+ {
+ MutexLock lock(&api_lock_);
+ if (!mCapability) {
+ return 0;
+ }
+ mCapability = Nothing();
+ }
+
+ Monitor monitor("VideoCaptureAVFoundation::StopCapture");
+ Monitor* copyableMonitor = &monitor;
+ MonitorAutoLock lock(monitor);
+ __block bool done = false;
+
+ [mCapturer stopCaptureWithCompletionHandler:^(void) {
+ MonitorAutoLock lock2(*copyableMonitor);
+ MOZ_RELEASE_ASSERT(!done);
+ done = true;
+ copyableMonitor->Notify();
+ }];
+
+ while (!done) {
+ monitor.Wait();
+ }
+
+ [mAdapter setCapturer:nil];
+
+ return 0;
+}
+
+bool VideoCaptureAvFoundation::CaptureStarted() {
+ RTC_DCHECK_RUN_ON(&mChecker);
+ MutexLock lock(&api_lock_);
+ return mCapability.isSome();
+}
+
+int32_t VideoCaptureAvFoundation::CaptureSettings(VideoCaptureCapability& aSettings) {
+ MOZ_CRASH("Unexpected call");
+ return -1;
+}
+
+int32_t VideoCaptureAvFoundation::OnFrame(__strong RTCVideoFrame* _Nonnull aFrame) {
+ MaybeRegisterCallbackThread();
+ if (MutexLock lock(&api_lock_); MOZ_LIKELY(mTrackingId)) {
+ mCaptureRecorder.Start(0, "VideoCaptureAVFoundation"_ns, *mTrackingId, aFrame.width,
+ aFrame.height, mImageType.valueOr(CaptureStage::ImageType::Unknown));
+ if (mCapability && mCapability->videoType != webrtc::VideoType::kI420) {
+ mConversionRecorder.Start(0, "VideoCaptureAVFoundation"_ns, *mTrackingId, aFrame.width,
+ aFrame.height);
+ }
+ }
+
+ const int64_t timestamp_us = aFrame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
+ RTCI420Buffer* buffer = [aFrame.buffer toI420];
+ mConversionRecorder.Record(0);
+ // Accessing the (intended-to-be-private) native buffer directly is hacky but lets us skip two
+ // copies
+ rtc::scoped_refptr<webrtc::I420BufferInterface> nativeBuffer = buffer.nativeI420Buffer;
+ auto frame = webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(nativeBuffer)
+ .set_rotation(ToNativeRotation(aFrame.rotation))
+ .set_timestamp_us(timestamp_us)
+ .build();
+
+ MutexLock lock(&api_lock_);
+ int32_t rv = DeliverCapturedFrame(frame);
+ mCaptureRecorder.Record(0);
+ return rv;
+}
+
+void VideoCaptureAvFoundation::SetTrackingId(uint32_t aTrackingIdProcId) {
+ RTC_DCHECK_RUN_ON(&mChecker);
+ MutexLock lock(&api_lock_);
+ if (NS_WARN_IF(mTrackingId.isSome())) {
+ // This capture instance must be shared across multiple camera requests. For now ignore other
+ // requests than the first.
+ return;
+ }
+ mTrackingId.emplace(TrackingId::Source::Camera, aTrackingIdProcId);
+}
+
+void VideoCaptureAvFoundation::MaybeRegisterCallbackThread() {
+ ProfilerThreadId id = profiler_current_thread_id();
+ if (MOZ_LIKELY(id == mCallbackThreadId)) {
+ return;
+ }
+ mCallbackThreadId = id;
+ CallbackThreadRegistry::Get()->Register(mCallbackThreadId, "VideoCaptureAVFoundationCallback");
+}
+} // namespace webrtc::videocapturemodule