summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/modules/video_capture
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
commit0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d (patch)
treea31f07c9bcca9d56ce61e9a1ffd30ef350d513aa /third_party/libwebrtc/modules/video_capture
parentInitial commit. (diff)
downloadfirefox-esr-0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d.tar.xz
firefox-esr-0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d.zip
Adding upstream version 115.8.0esr.upstream/115.8.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/modules/video_capture')
-rw-r--r--third_party/libwebrtc/modules/video_capture/BUILD.gn152
-rw-r--r--third_party/libwebrtc/modules/video_capture/DEPS6
-rw-r--r--third_party/libwebrtc/modules/video_capture/OWNERS4
-rw-r--r--third_party/libwebrtc/modules/video_capture/device_info_impl.cc224
-rw-r--r--third_party/libwebrtc/modules/video_capture/device_info_impl.h63
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc42
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc517
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h71
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc51
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc489
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h65
-rw-r--r--third_party/libwebrtc/modules/video_capture/raw_video_sink_interface.h34
-rw-r--r--third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc376
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture.h169
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_config.h33
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_defines.h59
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_factory.cc26
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_factory.h40
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_factory_null.cc27
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_impl.cc334
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_impl.h119
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build254
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build237
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc713
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/device_info_ds.h107
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc158
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.h118
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc959
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.h162
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc322
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.h74
-rw-r--r--third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc38
32 files changed, 6043 insertions, 0 deletions
diff --git a/third_party/libwebrtc/modules/video_capture/BUILD.gn b/third_party/libwebrtc/modules/video_capture/BUILD.gn
new file mode 100644
index 0000000000..4a5bf62433
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/BUILD.gn
@@ -0,0 +1,152 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+# Note this target is missing an implementation for the video capture.
+# Targets must link with either 'video_capture' or
+# 'video_capture_internal_impl' depending on whether they want to
+# use the internal capturer.
+rtc_library("video_capture_module") {
+ visibility = [ "*" ]
+ sources = [
+ "device_info_impl.cc",
+ "device_info_impl.h",
+ "raw_video_sink_interface.h",
+ "video_capture.h",
+ "video_capture_config.h",
+ "video_capture_defines.h",
+ "video_capture_factory.cc",
+ "video_capture_factory.h",
+ "video_capture_impl.cc",
+ "video_capture_impl.h",
+ ]
+
+ deps = [
+ "../../api:scoped_refptr",
+ "../../api/video:video_frame",
+ "../../api/video:video_rtp_headers",
+ "../../common_video",
+ "../../media:rtc_media_base",
+ "../../rtc_base:event_tracer",
+ "../../rtc_base:logging",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:refcount",
+ "../../rtc_base:stringutils",
+ "../../rtc_base:timeutils",
+ "../../rtc_base/synchronization:mutex",
+ "../../system_wrappers",
+ "//third_party/libyuv",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+if (!build_with_chromium) {
+ rtc_source_set("video_capture_internal_impl") {
+ visibility = [ "*" ]
+ deps = [
+ ":video_capture_module",
+ "../../api:scoped_refptr",
+ "../../api:sequence_checker",
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:platform_thread",
+ "../../rtc_base:refcount",
+ "../../rtc_base:stringutils",
+ "../../rtc_base/synchronization:mutex",
+ "../../system_wrappers",
+ ]
+
+ if (is_linux || is_bsd || is_chromeos) {
+ sources = [
+ "linux/device_info_linux.cc",
+ "linux/device_info_v4l2.cc",
+ "linux/device_info_v4l2.h",
+ "linux/video_capture_linux.cc",
+ "linux/video_capture_v4l2.cc",
+ "linux/video_capture_v4l2.h",
+ ]
+ deps += [ "../../media:rtc_media_base" ]
+ }
+ if (is_win) {
+ sources = [
+ "windows/device_info_ds.cc",
+ "windows/device_info_ds.h",
+ "windows/help_functions_ds.cc",
+ "windows/help_functions_ds.h",
+ "windows/sink_filter_ds.cc",
+ "windows/sink_filter_ds.h",
+ "windows/video_capture_ds.cc",
+ "windows/video_capture_ds.h",
+ "windows/video_capture_factory_windows.cc",
+ ]
+
+ libs = [
+ "ole32.lib",
+ "oleaut32.lib",
+ "strmiids.lib",
+ "user32.lib",
+ ]
+ }
+ if (is_fuchsia) {
+ sources = [ "video_capture_factory_null.cc" ]
+ }
+
+ if (!build_with_mozilla && is_android) {
+ include_dirs = [
+ "/config/external/nspr",
+ "/nsprpub/lib/ds",
+ "/nsprpub/pr/include",
+ ]
+
+ sources = [
+ "android/device_info_android.cc",
+ "android/video_capture_android.cc",
+ ]
+ }
+ }
+
+ if (!is_android && rtc_include_tests) {
+ rtc_test("video_capture_tests") {
+ sources = [ "test/video_capture_unittest.cc" ]
+ ldflags = []
+ if (is_linux || is_chromeos || is_mac) {
+ ldflags += [
+ "-lpthread",
+ "-lm",
+ ]
+ }
+ if (is_linux || is_chromeos) {
+ ldflags += [
+ "-lrt",
+ "-lXext",
+ "-lX11",
+ ]
+ }
+
+ deps = [
+ ":video_capture_internal_impl",
+ ":video_capture_module",
+ "../../api:scoped_refptr",
+ "../../api/video:video_frame",
+ "../../api/video:video_rtp_headers",
+ "../../common_video",
+ "../../rtc_base:timeutils",
+ "../../rtc_base/synchronization:mutex",
+ "../../system_wrappers",
+ "../../test:frame_utils",
+ "../../test:test_main",
+ "../../test:test_support",
+ "../../test:video_test_common",
+ "//testing/gtest",
+ "//third_party/abseil-cpp/absl/memory",
+ ]
+ }
+ }
+}
diff --git a/third_party/libwebrtc/modules/video_capture/DEPS b/third_party/libwebrtc/modules/video_capture/DEPS
new file mode 100644
index 0000000000..9ad1d576bc
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/DEPS
@@ -0,0 +1,6 @@
+include_rules = [
+ "+common_video",
+ "+media/base",
+ "+system_wrappers",
+ "+third_party/libyuv",
+]
diff --git a/third_party/libwebrtc/modules/video_capture/OWNERS b/third_party/libwebrtc/modules/video_capture/OWNERS
new file mode 100644
index 0000000000..364d66d36f
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/OWNERS
@@ -0,0 +1,4 @@
+ilnik@webrtc.org
+mflodman@webrtc.org
+perkj@webrtc.org
+tkchin@webrtc.org
diff --git a/third_party/libwebrtc/modules/video_capture/device_info_impl.cc b/third_party/libwebrtc/modules/video_capture/device_info_impl.cc
new file mode 100644
index 0000000000..2a6afb3147
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/device_info_impl.cc
@@ -0,0 +1,224 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/device_info_impl.h"
+
+#include <stdlib.h>
+
+#include "absl/strings/match.h"
+#include "absl/strings/string_view.h"
+#include "rtc_base/logging.h"
+
+#ifndef abs
+#define abs(a) (a >= 0 ? a : -a)
+#endif
+
+namespace webrtc {
+namespace videocapturemodule {
+
+DeviceInfoImpl::DeviceInfoImpl()
+ : _lastUsedDeviceName(NULL), _lastUsedDeviceNameLength(0) {}
+
+DeviceInfoImpl::~DeviceInfoImpl(void) {
+ MutexLock lock(&_apiLock);
+ free(_lastUsedDeviceName);
+}
+
+int32_t DeviceInfoImpl::NumberOfCapabilities(const char* deviceUniqueIdUTF8) {
+ if (!deviceUniqueIdUTF8)
+ return -1;
+
+ MutexLock lock(&_apiLock);
+
+ // Is it the same device that is asked for again.
+ if (absl::EqualsIgnoreCase(
+ deviceUniqueIdUTF8,
+ absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) {
+ return static_cast<int32_t>(_captureCapabilities.size());
+ }
+
+ int32_t ret = CreateCapabilityMap(deviceUniqueIdUTF8);
+ return ret;
+}
+
+int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) {
+ RTC_DCHECK(deviceUniqueIdUTF8);
+
+ MutexLock lock(&_apiLock);
+
+ if (!absl::EqualsIgnoreCase(
+ deviceUniqueIdUTF8,
+ absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) {
+ if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8)) {
+ return -1;
+ }
+ }
+
+ // Make sure the number is valid
+ if (deviceCapabilityNumber >= (unsigned int)_captureCapabilities.size()) {
+ RTC_LOG(LS_ERROR) << deviceUniqueIdUTF8 << " Invalid deviceCapabilityNumber "
+ << deviceCapabilityNumber << ">= number of capabilities ("
+ << _captureCapabilities.size() << ").";
+ return -1;
+ }
+
+ capability = _captureCapabilities[deviceCapabilityNumber];
+ return 0;
+}
+
+int32_t DeviceInfoImpl::GetBestMatchedCapability(
+ const char* deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested,
+ VideoCaptureCapability& resulting) {
+ if (!deviceUniqueIdUTF8)
+ return -1;
+
+ MutexLock lock(&_apiLock);
+ if (!absl::EqualsIgnoreCase(
+ deviceUniqueIdUTF8,
+ absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) {
+ if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8)) {
+ return -1;
+ }
+ }
+
+ int32_t bestformatIndex = -1;
+ int32_t bestWidth = 0;
+ int32_t bestHeight = 0;
+ int32_t bestFrameRate = 0;
+ VideoType bestVideoType = VideoType::kUnknown;
+
+ const int32_t numberOfCapabilies =
+ static_cast<int32_t>(_captureCapabilities.size());
+
+ bool hasNonRGB24Capability = false;
+ for (int32_t tmp = 0; tmp < numberOfCapabilies;
+ ++tmp) // Loop through all capabilities
+ {
+ VideoCaptureCapability& capability = _captureCapabilities[tmp];
+ if (capability.videoType != VideoType::kRGB24) {
+ hasNonRGB24Capability = true;
+ }
+ }
+
+ for (int32_t tmp = 0; tmp < numberOfCapabilies;
+ ++tmp) // Loop through all capabilities
+ {
+ VideoCaptureCapability& capability = _captureCapabilities[tmp];
+ if (hasNonRGB24Capability && capability.videoType == VideoType::kRGB24) {
+ continue;
+ }
+
+ const int32_t diffWidth = capability.width - requested.width;
+ const int32_t diffHeight = capability.height - requested.height;
+ const int32_t diffFrameRate = capability.maxFPS - requested.maxFPS;
+
+ const int32_t currentbestDiffWith = bestWidth - requested.width;
+ const int32_t currentbestDiffHeight = bestHeight - requested.height;
+ const int32_t currentbestDiffFrameRate = bestFrameRate - requested.maxFPS;
+
+ if ((diffHeight >= 0 &&
+ diffHeight <= abs(currentbestDiffHeight)) // Height better or equalt
+ // that previouse.
+ || (currentbestDiffHeight < 0 && diffHeight >= currentbestDiffHeight)) {
+ if (diffHeight ==
+ currentbestDiffHeight) // Found best height. Care about the width)
+ {
+ if ((diffWidth >= 0 &&
+ diffWidth <= abs(currentbestDiffWith)) // Width better or equal
+ || (currentbestDiffWith < 0 && diffWidth >= currentbestDiffWith)) {
+ if (diffWidth == currentbestDiffWith &&
+ diffHeight == currentbestDiffHeight) // Same size as previously
+ {
+ // Also check the best frame rate if the diff is the same as
+ // previouse
+ if (((diffFrameRate >= 0 &&
+ diffFrameRate <=
+ currentbestDiffFrameRate) // Frame rate to high but
+ // better match than previouse
+ // and we have not selected IUV
+ || (currentbestDiffFrameRate < 0 &&
+ diffFrameRate >=
+ currentbestDiffFrameRate)) // Current frame rate is
+ // lower than requested.
+ // This is better.
+ ) {
+ if ((currentbestDiffFrameRate ==
+ diffFrameRate) // Same frame rate as previous or frame rate
+ // allready good enough
+ || (currentbestDiffFrameRate >= 0)) {
+ if (bestVideoType != requested.videoType &&
+ requested.videoType != VideoType::kUnknown &&
+ (capability.videoType == requested.videoType ||
+ capability.videoType == VideoType::kI420 ||
+ capability.videoType == VideoType::kYUY2 ||
+ capability.videoType == VideoType::kYV12 ||
+ capability.videoType == VideoType::kNV12)) {
+ bestVideoType = capability.videoType;
+ bestformatIndex = tmp;
+ }
+ // If width height and frame rate is full filled we can use the
+ // camera for encoding if it is supported.
+ if (capability.height == requested.height &&
+ capability.width == requested.width &&
+ capability.maxFPS >= requested.maxFPS) {
+ bestformatIndex = tmp;
+ }
+ } else // Better frame rate
+ {
+ bestWidth = capability.width;
+ bestHeight = capability.height;
+ bestFrameRate = capability.maxFPS;
+ bestVideoType = capability.videoType;
+ bestformatIndex = tmp;
+ }
+ }
+ } else // Better width than previously
+ {
+ bestWidth = capability.width;
+ bestHeight = capability.height;
+ bestFrameRate = capability.maxFPS;
+ bestVideoType = capability.videoType;
+ bestformatIndex = tmp;
+ }
+ } // else width no good
+ } else // Better height
+ {
+ bestWidth = capability.width;
+ bestHeight = capability.height;
+ bestFrameRate = capability.maxFPS;
+ bestVideoType = capability.videoType;
+ bestformatIndex = tmp;
+ }
+ } // else height not good
+ } // end for
+
+ RTC_LOG(LS_VERBOSE) << "Best camera format: " << bestWidth << "x"
+ << bestHeight << "@" << bestFrameRate
+ << "fps, color format: "
+ << static_cast<int>(bestVideoType);
+
+ // Copy the capability
+ if (bestformatIndex < 0)
+ return -1;
+ resulting = _captureCapabilities[bestformatIndex];
+ return bestformatIndex;
+}
+
+// Default implementation. This should be overridden by Mobile implementations.
+int32_t DeviceInfoImpl::GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) {
+ orientation = kVideoRotation_0;
+ return -1;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/device_info_impl.h b/third_party/libwebrtc/modules/video_capture/device_info_impl.h
new file mode 100644
index 0000000000..8acbef6d69
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/device_info_impl.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+
+#include <stdint.h>
+
+#include <vector>
+
+#include "api/video/video_rotation.h"
+#include "modules/video_capture/video_capture.h"
+#include "modules/video_capture/video_capture_defines.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+class DeviceInfoImpl : public VideoCaptureModule::DeviceInfo {
+ public:
+ DeviceInfoImpl();
+ ~DeviceInfoImpl(void) override;
+ int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8) override;
+ int32_t GetCapability(const char* deviceUniqueIdUTF8,
+ uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) override;
+
+ int32_t GetBestMatchedCapability(const char* deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested,
+ VideoCaptureCapability& resulting) override;
+ int32_t GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) override;
+
+ protected:
+ /* Initialize this object*/
+
+ virtual int32_t Init() = 0;
+ int32_t Refresh() override { return 0; }
+ /*
+ * Fills the member variable _captureCapabilities with capabilities for the
+ * given device name.
+ */
+ virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock) = 0;
+
+ protected:
+ // Data members
+ typedef std::vector<VideoCaptureCapability> VideoCaptureCapabilities;
+ VideoCaptureCapabilities _captureCapabilities RTC_GUARDED_BY(_apiLock);
+ Mutex _apiLock;
+ char* _lastUsedDeviceName RTC_GUARDED_BY(_apiLock);
+ uint32_t _lastUsedDeviceNameLength RTC_GUARDED_BY(_apiLock);
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc
new file mode 100644
index 0000000000..4821cbccd5
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <errno.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+// v4l includes
+#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD
+#include <sys/videoio.h>
+#elif defined(__sun)
+#include <sys/videodev2.h>
+#else
+#include <linux/videodev2.h>
+#endif
+
+#include <vector>
+
+#include "modules/video_capture/linux/device_info_v4l2.h"
+#include "modules/video_capture/video_capture.h"
+#include "modules/video_capture/video_capture_defines.h"
+#include "modules/video_capture/video_capture_impl.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() {
+ return new videocapturemodule::DeviceInfoV4l2();
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc
new file mode 100644
index 0000000000..ccd4b2bd2a
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc
@@ -0,0 +1,517 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/linux/device_info_v4l2.h"
+
+#include <errno.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+// v4l includes
+#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD
+#include <sys/videoio.h>
+#elif defined(__sun)
+#include <sys/videodev2.h>
+#else
+#include <linux/videodev2.h>
+#endif
+
+#include <vector>
+
+#include "modules/video_capture/video_capture.h"
+#include "modules/video_capture/video_capture_defines.h"
+#include "modules/video_capture/video_capture_impl.h"
+#include "rtc_base/logging.h"
+
+#ifdef WEBRTC_LINUX
+#define EVENT_SIZE ( sizeof (struct inotify_event) )
+#define BUF_LEN ( 1024 * ( EVENT_SIZE + 16 ) )
+#endif
+
+// These defines are here to support building on kernel 3.16 which some
+// downstream projects, e.g. Firefox, use.
+// TODO(apehrson): Remove them and their undefs when no longer needed.
+#ifndef V4L2_PIX_FMT_ABGR32
+#define ABGR32_OVERRIDE 1
+#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4')
+#endif
+
+#ifndef V4L2_PIX_FMT_ARGB32
+#define ARGB32_OVERRIDE 1
+#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4')
+#endif
+
+#ifndef V4L2_PIX_FMT_RGBA32
+#define RGBA32_OVERRIDE 1
+#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4')
+#endif
+
+namespace webrtc {
+namespace videocapturemodule {
+#ifdef WEBRTC_LINUX
+void DeviceInfoV4l2::HandleEvent(inotify_event* event, int fd)
+{
+ if (event->mask & IN_CREATE) {
+ if (fd == _fd_v4l) {
+ DeviceChange();
+ } else if ((event->mask & IN_ISDIR) && (fd == _fd_dev)) {
+ if (_wd_v4l < 0) {
+ // Sometimes inotify_add_watch failed if we call it immediately after receiving this event
+ // Adding 5ms delay to let file system settle down
+ usleep(5*1000);
+ _wd_v4l = inotify_add_watch(_fd_v4l, "/dev/v4l/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF);
+ if (_wd_v4l >= 0) {
+ DeviceChange();
+ }
+ }
+ }
+ } else if (event->mask & IN_DELETE) {
+ if (fd == _fd_v4l) {
+ DeviceChange();
+ }
+ } else if (event->mask & IN_DELETE_SELF) {
+ if (fd == _fd_v4l) {
+ inotify_rm_watch(_fd_v4l, _wd_v4l);
+ _wd_v4l = -1;
+ } else {
+ assert(false);
+ }
+ }
+}
+
+int DeviceInfoV4l2::EventCheck(int fd)
+{
+ struct pollfd fds = {
+ .fd = fd,
+ .events = POLLIN,
+ .revents = 0
+ };
+
+ return poll(&fds, 1, 100);
+}
+
+int DeviceInfoV4l2::HandleEvents(int fd)
+{
+ char buffer[BUF_LEN];
+
+ ssize_t r = read(fd, buffer, BUF_LEN);
+
+ if (r <= 0) {
+ return r;
+ }
+
+ ssize_t buffer_i = 0;
+ inotify_event* pevent;
+ size_t eventSize;
+ int count = 0;
+
+ while (buffer_i < r)
+ {
+ pevent = (inotify_event *) (&buffer[buffer_i]);
+ eventSize = sizeof(inotify_event) + pevent->len;
+ char event[sizeof(inotify_event) + FILENAME_MAX + 1] // null-terminated
+ __attribute__ ((aligned(__alignof__(struct inotify_event))));
+
+ memcpy(event, pevent, eventSize);
+
+ HandleEvent((inotify_event*)(event), fd);
+
+ buffer_i += eventSize;
+ count++;
+ }
+
+ return count;
+}
+
+int DeviceInfoV4l2::ProcessInotifyEvents()
+{
+ while (!_isShutdown) {
+ if (EventCheck(_fd_dev) > 0) {
+ if (HandleEvents(_fd_dev) < 0) {
+ break;
+ }
+ }
+ if (EventCheck(_fd_v4l) > 0) {
+ if (HandleEvents(_fd_v4l) < 0) {
+ break;
+ }
+ }
+ }
+ return 0;
+}
+
+void DeviceInfoV4l2::InotifyProcess()
+{
+ _fd_v4l = inotify_init();
+ _fd_dev = inotify_init();
+ if (_fd_v4l >= 0 && _fd_dev >= 0) {
+ _wd_v4l = inotify_add_watch(_fd_v4l, "/dev/v4l/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF);
+ _wd_dev = inotify_add_watch(_fd_dev, "/dev/", IN_CREATE);
+ ProcessInotifyEvents();
+
+ if (_wd_v4l >= 0) {
+ inotify_rm_watch(_fd_v4l, _wd_v4l);
+ }
+
+ if (_wd_dev >= 0) {
+ inotify_rm_watch(_fd_dev, _wd_dev);
+ }
+
+ close(_fd_v4l);
+ close(_fd_dev);
+ }
+}
+#endif
+
+DeviceInfoV4l2::DeviceInfoV4l2() : DeviceInfoImpl()
+#ifdef WEBRTC_LINUX
+ , _isShutdown(false)
+#endif
+{
+#ifdef WEBRTC_LINUX
+ _inotifyEventThread = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ InotifyProcess();
+ }, "InotifyEventThread");
+#endif
+}
+
+int32_t DeviceInfoV4l2::Init() {
+ return 0;
+}
+
+DeviceInfoV4l2::~DeviceInfoV4l2() {
+#ifdef WEBRTC_LINUX
+ _isShutdown = true;
+
+ if (!_inotifyEventThread.empty()) {
+ _inotifyEventThread.Finalize();
+ }
+#endif
+}
+
+uint32_t DeviceInfoV4l2::NumberOfDevices() {
+ uint32_t count = 0;
+ char device[20];
+ int fd = -1;
+ struct v4l2_capability cap;
+
+ /* detect /dev/video [0-63]VideoCaptureModule entries */
+ for (int n = 0; n < 64; n++) {
+ snprintf(device, sizeof(device), "/dev/video%d", n);
+ if ((fd = open(device, O_RDONLY)) != -1) {
+ // query device capabilities and make sure this is a video capture device
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !IsVideoCaptureDevice(&cap)) {
+ close(fd);
+ continue;
+ }
+
+ close(fd);
+ count++;
+ }
+ }
+
+ return count;
+}
+
+int32_t DeviceInfoV4l2::GetDeviceName(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* /*productUniqueIdUTF8*/,
+ uint32_t /*productUniqueIdUTF8Length*/,
+ pid_t* /*pid*/) {
+ // Travel through /dev/video [0-63]
+ uint32_t count = 0;
+ char device[20];
+ int fd = -1;
+ bool found = false;
+ struct v4l2_capability cap;
+ int device_index;
+ for (device_index = 0; device_index < 64; device_index++) {
+ sprintf(device, "/dev/video%d", device_index);
+ if ((fd = open(device, O_RDONLY)) != -1) {
+ // query device capabilities and make sure this is a video capture device
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !IsVideoCaptureDevice(&cap)) {
+ close(fd);
+ continue;
+ }
+ if (count == deviceNumber) {
+ // Found the device
+ found = true;
+ break;
+ } else {
+ close(fd);
+ count++;
+ }
+ }
+ }
+
+ if (!found)
+ return -1;
+
+ // query device capabilities
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
+ RTC_LOG(LS_INFO) << "error in querying the device capability for device "
+ << device << ". errno = " << errno;
+ close(fd);
+ return -1;
+ }
+
+ close(fd);
+
+ char cameraName[64];
+ memset(deviceNameUTF8, 0, deviceNameLength);
+ memcpy(cameraName, cap.card, sizeof(cap.card));
+
+ if (deviceNameLength > strlen(cameraName)) {
+ memcpy(deviceNameUTF8, cameraName, strlen(cameraName));
+ } else {
+ RTC_LOG(LS_INFO) << "buffer passed is too small";
+ return -1;
+ }
+
+ if (cap.bus_info[0] != 0) { // may not available in all drivers
+ // copy device id
+ size_t len = strlen(reinterpret_cast<const char*>(cap.bus_info));
+ if (deviceUniqueIdUTF8Length > len) {
+ memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
+ memcpy(deviceUniqueIdUTF8, cap.bus_info, len);
+ } else {
+ RTC_LOG(LS_INFO) << "buffer passed is too small";
+ return -1;
+ }
+ } else {
+ // if there's no bus info to use for uniqueId, invent one - and it has to be repeatable
+ if (snprintf(deviceUniqueIdUTF8,
+ deviceUniqueIdUTF8Length, "fake_%u", device_index) >=
+ (int) deviceUniqueIdUTF8Length)
+ {
+ return -1;
+ }
+ }
+ return 0;
+}
+
+int32_t DeviceInfoV4l2::CreateCapabilityMap(const char* deviceUniqueIdUTF8) {
+ int fd;
+ char device[32];
+ bool found = false;
+
+ const int32_t deviceUniqueIdUTF8Length = strlen(deviceUniqueIdUTF8);
+ if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) {
+ RTC_LOG(LS_INFO) << "Device name too long";
+ return -1;
+ }
+ RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device "
+ << deviceUniqueIdUTF8;
+
+ /* detect /dev/video [0-63] entries */
+ for (int n = 0; n < 64; ++n) {
+ snprintf(device, sizeof(device), "/dev/video%d", n);
+ fd = open(device, O_RDONLY);
+ if (fd == -1)
+ continue;
+
+ // query device capabilities
+ struct v4l2_capability cap;
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) {
+ // skip devices without video capture capability
+ if (!IsVideoCaptureDevice(&cap)) {
+ close(fd);
+ continue;
+ }
+
+ if (cap.bus_info[0] != 0) {
+ if (strncmp(reinterpret_cast<const char*>(cap.bus_info),
+ deviceUniqueIdUTF8,
+ strlen(deviceUniqueIdUTF8)) == 0) { // match with device id
+ found = true;
+ break; // fd matches with device unique id supplied
+ }
+ } else { // match for device name
+ if (IsDeviceNameMatches(reinterpret_cast<const char*>(cap.card),
+ deviceUniqueIdUTF8)) {
+ found = true;
+ break;
+ }
+ }
+ }
+ close(fd); // close since this is not the matching device
+ }
+
+ if (!found) {
+ RTC_LOG(LS_INFO) << "no matching device found";
+ return -1;
+ }
+
+ // now fd will point to the matching device
+ // reset old capability list.
+ _captureCapabilities.clear();
+
+ int size = FillCapabilities(fd);
+ close(fd);
+
+ // Store the new used device name
+ _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+ _lastUsedDeviceName = reinterpret_cast<char*>(
+ realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1));
+ memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength + 1);
+
+ RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size();
+
+ return size;
+}
+
+int32_t DeviceInfoV4l2::DisplayCaptureSettingsDialogBox(
+ const char* /*deviceUniqueIdUTF8*/,
+ const char* /*dialogTitleUTF8*/,
+ void* /*parentWindow*/,
+ uint32_t /*positionX*/,
+ uint32_t /*positionY*/) {
+ return -1;
+}
+
+bool DeviceInfoV4l2::IsDeviceNameMatches(const char* name,
+ const char* deviceUniqueIdUTF8) {
+ if (strncmp(deviceUniqueIdUTF8, name, strlen(name)) == 0)
+ return true;
+ return false;
+}
+
+bool DeviceInfoV4l2::IsVideoCaptureDevice(struct v4l2_capability* cap)
+{
+ if (cap->capabilities & V4L2_CAP_DEVICE_CAPS) {
+ return cap->device_caps & V4L2_CAP_VIDEO_CAPTURE;
+ } else {
+ return cap->capabilities & V4L2_CAP_VIDEO_CAPTURE;
+ }
+}
+
+int32_t DeviceInfoV4l2::FillCapabilities(int fd) {
+ // set image format
+ struct v4l2_format video_fmt;
+ memset(&video_fmt, 0, sizeof(struct v4l2_format));
+
+ video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ video_fmt.fmt.pix.sizeimage = 0;
+
+ unsigned int videoFormats[] = {
+ V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_JPEG, V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YVU420, V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY,
+ V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_BGR24, V4L2_PIX_FMT_RGB24,
+ V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_ABGR32, V4L2_PIX_FMT_ARGB32,
+ V4L2_PIX_FMT_RGBA32, V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_RGB32,
+ };
+ constexpr int totalFmts = sizeof(videoFormats) / sizeof(unsigned int);
+
+ int sizes = 13;
+ unsigned int size[][2] = {{128, 96}, {160, 120}, {176, 144}, {320, 240},
+ {352, 288}, {640, 480}, {704, 576}, {800, 600},
+ {960, 720}, {1280, 720}, {1024, 768}, {1440, 1080},
+ {1920, 1080}};
+
+ for (int fmts = 0; fmts < totalFmts; fmts++) {
+ for (int i = 0; i < sizes; i++) {
+ video_fmt.fmt.pix.pixelformat = videoFormats[fmts];
+ video_fmt.fmt.pix.width = size[i][0];
+ video_fmt.fmt.pix.height = size[i][1];
+
+ if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) >= 0) {
+ if ((video_fmt.fmt.pix.width == size[i][0]) &&
+ (video_fmt.fmt.pix.height == size[i][1])) {
+ VideoCaptureCapability cap;
+ cap.width = video_fmt.fmt.pix.width;
+ cap.height = video_fmt.fmt.pix.height;
+ if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV) {
+ cap.videoType = VideoType::kYUY2;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420) {
+ cap.videoType = VideoType::kI420;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_YVU420) {
+ cap.videoType = VideoType::kYV12;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG ||
+ videoFormats[fmts] == V4L2_PIX_FMT_JPEG) {
+ cap.videoType = VideoType::kMJPEG;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY) {
+ cap.videoType = VideoType::kUYVY;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_NV12) {
+ cap.videoType = VideoType::kNV12;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_BGR24) {
+ // NB that for RGB formats, `VideoType` follows naming conventions
+ // of libyuv[1], where e.g. the format for FOURCC "ARGB" stores
+ // pixels in BGRA order in memory. V4L2[2] on the other hand names
+ // its formats based on the order of the RGB components as stored in
+ // memory. Applies to all RGB formats below.
+ // [1]https://chromium.googlesource.com/libyuv/libyuv/+/refs/heads/main/docs/formats.md#the-argb-fourcc
+ // [2]https://www.kernel.org/doc/html/v6.2/userspace-api/media/v4l/pixfmt-rgb.html#bits-per-component
+ cap.videoType = VideoType::kRGB24;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB24) {
+ cap.videoType = VideoType::kBGR24;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB565) {
+ cap.videoType = VideoType::kRGB565;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_ABGR32) {
+ cap.videoType = VideoType::kARGB;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_ARGB32) {
+ cap.videoType = VideoType::kBGRA;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_BGR32) {
+ cap.videoType = VideoType::kARGB;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB32) {
+ cap.videoType = VideoType::kBGRA;
+ } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGBA32) {
+ cap.videoType = VideoType::kABGR;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+
+ // get fps of current camera mode
+ // V4l2 does not have a stable method of knowing so we just guess.
+ if (cap.width >= 800 && cap.videoType != VideoType::kMJPEG) {
+ cap.maxFPS = 15;
+ } else {
+ cap.maxFPS = 30;
+ }
+
+ _captureCapabilities.push_back(cap);
+ RTC_LOG(LS_VERBOSE) << "Camera capability, width:" << cap.width
+ << " height:" << cap.height
+ << " type:" << static_cast<int32_t>(cap.videoType)
+ << " fps:" << cap.maxFPS;
+ }
+ }
+ }
+ }
+
+ RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size();
+ return _captureCapabilities.size();
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#ifdef ABGR32_OVERRIDE
+#undef ABGR32_OVERRIDE
+#undef V4L2_PIX_FMT_ABGR32
+#endif
+
+#ifdef ARGB32_OVERRIDE
+#undef ARGB32_OVERRIDE
+#undef V4L2_PIX_FMT_ARGB32
+#endif
+
+#ifdef RGBA32_OVERRIDE
+#undef RGBA32_OVERRIDE
+#undef V4L2_PIX_FMT_RGBA32
+#endif
diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h
new file mode 100644
index 0000000000..0bec3eb765
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_
+#define MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_
+
+#include <stdint.h>
+
+#include "modules/video_capture/device_info_impl.h"
+
+#include "rtc_base/platform_thread.h"
+#ifdef WEBRTC_LINUX
+#include <sys/inotify.h>
+#endif
+
+struct v4l2_capability;
+
+namespace webrtc {
+namespace videocapturemodule {
+class DeviceInfoV4l2 : public DeviceInfoImpl {
+ public:
+ DeviceInfoV4l2();
+ ~DeviceInfoV4l2() override;
+ uint32_t NumberOfDevices() override;
+ int32_t GetDeviceName(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8 = 0,
+ uint32_t productUniqueIdUTF8Length = 0,
+ pid_t* pid=0) override;
+ /*
+ * Fills the membervariable _captureCapabilities with capabilites for the
+ * given device name.
+ */
+ int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock);
+ int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/,
+ const char* /*dialogTitleUTF8*/,
+ void* /*parentWindow*/,
+ uint32_t /*positionX*/,
+ uint32_t /*positionY*/) override;
+ int32_t FillCapabilities(int fd) RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock);
+ int32_t Init() override;
+
+ private:
+ bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
+ bool IsVideoCaptureDevice(struct v4l2_capability* cap);
+
+#ifdef WEBRTC_LINUX
+ void HandleEvent(inotify_event* event, int fd);
+ int EventCheck(int fd);
+ int HandleEvents(int fd);
+ int ProcessInotifyEvents();
+ rtc::PlatformThread _inotifyEventThread;
+ void InotifyProcess();
+ int _fd_v4l, _fd_dev, _wd_v4l, _wd_dev; /* accessed on InotifyEventThread thread */
+ std::atomic<bool> _isShutdown;
+#endif
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_
diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc
new file mode 100644
index 0000000000..f3324a8e68
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <errno.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/select.h>
+#include <time.h>
+#include <unistd.h>
+// v4l includes
+#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD
+#include <sys/videoio.h>
+#elif defined(__sun)
+#include <sys/videodev2.h>
+#else
+#include <linux/videodev2.h>
+#endif
+
+#include <new>
+#include <string>
+
+#include "api/scoped_refptr.h"
+#include "media/base/video_common.h"
+#include "modules/video_capture/linux/video_capture_v4l2.h"
+#include "modules/video_capture/video_capture.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
+ const char* deviceUniqueId) {
+ auto implementation = rtc::make_ref_counted<VideoCaptureModuleV4L2>();
+
+ if (implementation->Init(deviceUniqueId) != 0)
+ return nullptr;
+
+ return implementation;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc
new file mode 100644
index 0000000000..2935cd027d
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc
@@ -0,0 +1,489 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/linux/video_capture_v4l2.h"
+
+#include <errno.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/select.h>
+#include <time.h>
+#include <unistd.h>
+// v4l includes
+#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD
+#include <sys/videoio.h>
+#elif defined(__sun)
+#include <sys/videodev2.h>
+#else
+#include <linux/videodev2.h>
+#endif
+
+#include <new>
+#include <string>
+
+#include "api/scoped_refptr.h"
+#include "media/base/video_common.h"
+#include "modules/video_capture/video_capture.h"
+#include "rtc_base/logging.h"
+
+// These defines are here to support building on kernel 3.16 which some
+// downstream projects, e.g. Firefox, use.
+// TODO(apehrson): Remove them and their undefs when no longer needed.
+#ifndef V4L2_PIX_FMT_ABGR32
+#define ABGR32_OVERRIDE 1
+#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4')
+#endif
+
+#ifndef V4L2_PIX_FMT_ARGB32
+#define ARGB32_OVERRIDE 1
+#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4')
+#endif
+
+#ifndef V4L2_PIX_FMT_RGBA32
+#define RGBA32_OVERRIDE 1
+#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4')
+#endif
+
+namespace webrtc {
+namespace videocapturemodule {
+VideoCaptureModuleV4L2::VideoCaptureModuleV4L2()
+ : VideoCaptureImpl(),
+ _deviceId(-1),
+ _deviceFd(-1),
+ _buffersAllocatedByDevice(-1),
+ _currentWidth(-1),
+ _currentHeight(-1),
+ _currentFrameRate(-1),
+ _captureStarted(false),
+ _captureVideoType(VideoType::kI420),
+ _pool(NULL) {}
+
+int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) {
+ int len = strlen((const char*)deviceUniqueIdUTF8);
+ _deviceUniqueId = new (std::nothrow) char[len + 1];
+ if (_deviceUniqueId) {
+ memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
+ }
+
+ int device_index;
+ if (sscanf(deviceUniqueIdUTF8,"fake_%d", &device_index) == 1)
+ {
+ _deviceId = device_index;
+ return 0;
+ }
+
+ int fd;
+ char device[32];
+ bool found = false;
+
+ /* detect /dev/video [0-63] entries */
+ int n;
+ for (n = 0; n < 64; n++) {
+ snprintf(device, sizeof(device), "/dev/video%d", n);
+ if ((fd = open(device, O_RDONLY)) != -1) {
+ // query device capabilities
+ struct v4l2_capability cap;
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) {
+ if (cap.bus_info[0] != 0) {
+ if (strncmp((const char*)cap.bus_info,
+ (const char*)deviceUniqueIdUTF8,
+ strlen((const char*)deviceUniqueIdUTF8)) ==
+ 0) { // match with device id
+ close(fd);
+ found = true;
+ break; // fd matches with device unique id supplied
+ }
+ }
+ }
+ close(fd); // close since this is not the matching device
+ }
+ }
+ if (!found) {
+ RTC_LOG(LS_INFO) << "no matching device found";
+ return -1;
+ }
+ _deviceId = n; // store the device id
+ return 0;
+}
+
+VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() {
+ StopCapture();
+ if (_deviceFd != -1)
+ close(_deviceFd);
+}
+
+int32_t VideoCaptureModuleV4L2::StartCapture(
+ const VideoCaptureCapability& capability) {
+ if (_captureStarted) {
+ if (capability.width == _currentWidth &&
+ capability.height == _currentHeight &&
+ _captureVideoType == capability.videoType) {
+ return 0;
+ } else {
+ StopCapture();
+ }
+ }
+
+ MutexLock lock(&capture_lock_);
+ // first open /dev/video device
+ char device[20];
+ snprintf(device, sizeof(device), "/dev/video%d", _deviceId);
+
+ if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) {
+ RTC_LOG(LS_INFO) << "error in opening " << device << " errono = " << errno;
+ return -1;
+ }
+
+ // Supported video formats in preferred order.
+ // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
+ // I420 otherwise.
+ unsigned int hdFmts[] = {
+ V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YVU420,
+ V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY, V4L2_PIX_FMT_NV12,
+ V4L2_PIX_FMT_ABGR32, V4L2_PIX_FMT_ARGB32, V4L2_PIX_FMT_RGBA32,
+ V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_BGR24,
+ V4L2_PIX_FMT_RGB24, V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_JPEG,
+ };
+ unsigned int sdFmts[] = {
+ V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YVU420, V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_UYVY, V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_ABGR32,
+ V4L2_PIX_FMT_ARGB32, V4L2_PIX_FMT_RGBA32, V4L2_PIX_FMT_BGR32,
+ V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_BGR24, V4L2_PIX_FMT_RGB24,
+ V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_JPEG,
+ };
+ const bool isHd = capability.width > 640 || capability.height > 480;
+ unsigned int* fmts = isHd ? hdFmts : sdFmts;
+ static_assert(sizeof(hdFmts) == sizeof(sdFmts));
+ constexpr int nFormats = sizeof(hdFmts) / sizeof(unsigned int);
+
+ // Enumerate image formats.
+ struct v4l2_fmtdesc fmt;
+ int fmtsIdx = nFormats;
+ memset(&fmt, 0, sizeof(fmt));
+ fmt.index = 0;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:";
+ while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
+ RTC_LOG(LS_INFO) << " { pixelformat = "
+ << cricket::GetFourccName(fmt.pixelformat)
+ << ", description = '" << fmt.description << "' }";
+ // Match the preferred order.
+ for (int i = 0; i < nFormats; i++) {
+ if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
+ fmtsIdx = i;
+ }
+ // Keep enumerating.
+ fmt.index++;
+ }
+
+ if (fmtsIdx == nFormats) {
+ RTC_LOG(LS_INFO) << "no supporting video formats found";
+ return -1;
+ } else {
+ RTC_LOG(LS_INFO) << "We prefer format "
+ << cricket::GetFourccName(fmts[fmtsIdx]);
+ }
+
+ struct v4l2_format video_fmt;
+ memset(&video_fmt, 0, sizeof(struct v4l2_format));
+ video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ video_fmt.fmt.pix.sizeimage = 0;
+ video_fmt.fmt.pix.width = capability.width;
+ video_fmt.fmt.pix.height = capability.height;
+ video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
+
+ if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
+ _captureVideoType = VideoType::kYUY2;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
+ _captureVideoType = VideoType::kI420;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420)
+ _captureVideoType = VideoType::kYV12;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
+ _captureVideoType = VideoType::kUYVY;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_NV12)
+ _captureVideoType = VideoType::kNV12;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_BGR24)
+ _captureVideoType = VideoType::kRGB24;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24)
+ _captureVideoType = VideoType::kBGR24;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB565)
+ _captureVideoType = VideoType::kRGB565;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_ABGR32 ||
+ video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_BGR32)
+ _captureVideoType = VideoType::kARGB;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_ARGB32 ||
+ video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB32)
+ _captureVideoType = VideoType::kBGRA;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGBA32)
+ _captureVideoType = VideoType::kABGR;
+ else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
+ video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
+ _captureVideoType = VideoType::kMJPEG;
+ else
+ RTC_DCHECK_NOTREACHED();
+
+ // set format and frame size now
+ if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) {
+ RTC_LOG(LS_INFO) << "error in VIDIOC_S_FMT, errno = " << errno;
+ return -1;
+ }
+
+ // initialize current width and height
+ _currentWidth = video_fmt.fmt.pix.width;
+ _currentHeight = video_fmt.fmt.pix.height;
+
+ // Trying to set frame rate, before check driver capability.
+ bool driver_framerate_support = true;
+ struct v4l2_streamparm streamparms;
+ memset(&streamparms, 0, sizeof(streamparms));
+ streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
+ RTC_LOG(LS_INFO) << "error in VIDIOC_G_PARM errno = " << errno;
+ driver_framerate_support = false;
+ // continue
+ } else {
+ // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
+ if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
+ // driver supports the feature. Set required framerate.
+ memset(&streamparms, 0, sizeof(streamparms));
+ streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamparms.parm.capture.timeperframe.numerator = 1;
+ streamparms.parm.capture.timeperframe.denominator = capability.maxFPS;
+ if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
+ RTC_LOG(LS_INFO) << "Failed to set the framerate. errno=" << errno;
+ driver_framerate_support = false;
+ } else {
+ _currentFrameRate = capability.maxFPS;
+ }
+ }
+ }
+ // If driver doesn't support framerate control, need to hardcode.
+ // Hardcoding the value based on the frame size.
+ if (!driver_framerate_support) {
+ if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) {
+ _currentFrameRate = 15;
+ } else {
+ _currentFrameRate = 30;
+ }
+ }
+
+ if (!AllocateVideoBuffers()) {
+ RTC_LOG(LS_INFO) << "failed to allocate video capture buffers";
+ return -1;
+ }
+
+ // start capture thread;
+ if (_captureThread.empty()) {
+ quit_ = false;
+ _captureThread = rtc::PlatformThread::SpawnJoinable(
+ [self = rtc::scoped_refptr(this)] {
+ while (self->CaptureProcess()) {
+ }
+ },
+ "CaptureThread",
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh));
+ }
+
+ // Needed to start UVC camera - from the uvcview application
+ enum v4l2_buf_type type;
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) {
+ RTC_LOG(LS_INFO) << "Failed to turn on stream";
+ return -1;
+ }
+
+ _captureStarted = true;
+ return 0;
+}
+
+int32_t VideoCaptureModuleV4L2::StopCapture() {
+ if (!_captureThread.empty()) {
+ {
+ MutexLock lock(&capture_lock_);
+ quit_ = true;
+ }
+ // Make sure the capture thread stops using the mutex.
+ _captureThread.Finalize();
+ }
+
+ MutexLock lock(&capture_lock_);
+ if (_captureStarted) {
+ _captureStarted = false;
+
+ DeAllocateVideoBuffers();
+ close(_deviceFd);
+ _deviceFd = -1;
+ }
+
+ return 0;
+}
+
+// critical section protected by the caller
+
+bool VideoCaptureModuleV4L2::AllocateVideoBuffers() {
+ struct v4l2_requestbuffers rbuffer;
+ memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
+
+ rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ rbuffer.memory = V4L2_MEMORY_MMAP;
+ rbuffer.count = kNoOfV4L2Bufffers;
+
+ if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) {
+ RTC_LOG(LS_INFO) << "Could not get buffers from device. errno = " << errno;
+ return false;
+ }
+
+ if (rbuffer.count > kNoOfV4L2Bufffers)
+ rbuffer.count = kNoOfV4L2Bufffers;
+
+ _buffersAllocatedByDevice = rbuffer.count;
+
+ // Map the buffers
+ _pool = new Buffer[rbuffer.count];
+
+ for (unsigned int i = 0; i < rbuffer.count; i++) {
+ struct v4l2_buffer buffer;
+ memset(&buffer, 0, sizeof(v4l2_buffer));
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buffer.memory = V4L2_MEMORY_MMAP;
+ buffer.index = i;
+
+ if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) {
+ return false;
+ }
+
+ _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
+ MAP_SHARED, _deviceFd, buffer.m.offset);
+
+ if (MAP_FAILED == _pool[i].start) {
+ for (unsigned int j = 0; j < i; j++)
+ munmap(_pool[j].start, _pool[j].length);
+ return false;
+ }
+
+ _pool[i].length = buffer.length;
+
+ if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() {
+ // unmap buffers
+ for (int i = 0; i < _buffersAllocatedByDevice; i++)
+ munmap(_pool[i].start, _pool[i].length);
+
+ delete[] _pool;
+
+ // turn off stream
+ enum v4l2_buf_type type;
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) {
+ RTC_LOG(LS_INFO) << "VIDIOC_STREAMOFF error. errno: " << errno;
+ }
+
+ return true;
+}
+
+bool VideoCaptureModuleV4L2::CaptureStarted() {
+ return _captureStarted;
+}
+
+bool VideoCaptureModuleV4L2::CaptureProcess() {
+ int retVal = 0;
+ struct pollfd rSet;
+
+ rSet.fd = _deviceFd;
+ rSet.events = POLLIN;
+ rSet.revents = 0;
+
+ retVal = poll(&rSet, 1, 1000);
+
+ {
+ MutexLock lock(&capture_lock_);
+
+ if (quit_) {
+ return false;
+ }
+
+ if (retVal < 0 && errno != EINTR) { // continue if interrupted
+ // poll failed
+ return false;
+ } else if (retVal == 0) {
+ // poll timed out
+ return true;
+ } else if (!(rSet.revents & POLLIN)) {
+ // not event on camera handle
+ return true;
+ }
+
+ if (_captureStarted) {
+ struct v4l2_buffer buf;
+ memset(&buf, 0, sizeof(struct v4l2_buffer));
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ // dequeue a buffer - repeat until dequeued properly!
+ while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) {
+ if (errno != EINTR) {
+ RTC_LOG(LS_INFO) << "could not sync on a buffer on device "
+ << strerror(errno);
+ return true;
+ }
+ }
+ VideoCaptureCapability frameInfo;
+ frameInfo.width = _currentWidth;
+ frameInfo.height = _currentHeight;
+ frameInfo.videoType = _captureVideoType;
+
+ // convert to to I420 if needed
+ IncomingFrame(reinterpret_cast<uint8_t*>(_pool[buf.index].start),
+ buf.bytesused, frameInfo);
+ // enqueue the buffer again
+ if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) {
+ RTC_LOG(LS_INFO) << "Failed to enqueue capture buffer";
+ }
+ }
+ }
+ usleep(0);
+ return true;
+}
+
+int32_t VideoCaptureModuleV4L2::CaptureSettings(
+ VideoCaptureCapability& settings) {
+ settings.width = _currentWidth;
+ settings.height = _currentHeight;
+ settings.maxFPS = _currentFrameRate;
+ settings.videoType = _captureVideoType;
+
+ return 0;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#ifdef ABGR32_OVERRIDE
+#undef ABGR32_OVERRIDE
+#undef V4L2_PIX_FMT_ABGR32
+#endif
+
+#ifdef ARGB32_OVERRIDE
+#undef ARGB32_OVERRIDE
+#undef V4L2_PIX_FMT_ARGB32
+#endif
+
+#ifdef RGBA32_OVERRIDE
+#undef RGBA32_OVERRIDE
+#undef V4L2_PIX_FMT_RGBA32
+#endif
diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h
new file mode 100644
index 0000000000..65e89e2daa
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_
+#define MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+
+#include "modules/video_capture/video_capture_defines.h"
+#include "modules/video_capture/video_capture_impl.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+class VideoCaptureModuleV4L2 : public VideoCaptureImpl {
+ public:
+ VideoCaptureModuleV4L2();
+ ~VideoCaptureModuleV4L2() override;
+ int32_t Init(const char* deviceUniqueId);
+ int32_t StartCapture(const VideoCaptureCapability& capability) override;
+ int32_t StopCapture() override;
+ bool CaptureStarted() override;
+ int32_t CaptureSettings(VideoCaptureCapability& settings) override;
+
+ private:
+ enum { kNoOfV4L2Bufffers = 4 };
+
+ static void CaptureThread(void*);
+ bool CaptureProcess();
+ bool AllocateVideoBuffers();
+ bool DeAllocateVideoBuffers();
+
+ rtc::PlatformThread _captureThread;
+ Mutex capture_lock_;
+ bool quit_ RTC_GUARDED_BY(capture_lock_);
+ int32_t _deviceId;
+ int32_t _deviceFd;
+
+ int32_t _buffersAllocatedByDevice;
+ int32_t _currentWidth;
+ int32_t _currentHeight;
+ int32_t _currentFrameRate;
+ bool _captureStarted;
+ VideoType _captureVideoType;
+ struct Buffer {
+ void* start;
+ size_t length;
+ };
+ Buffer* _pool;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_
diff --git a/third_party/libwebrtc/modules/video_capture/raw_video_sink_interface.h b/third_party/libwebrtc/modules/video_capture/raw_video_sink_interface.h
new file mode 100644
index 0000000000..094e9e20bd
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/raw_video_sink_interface.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains interfaces used for creating the VideoCaptureModule
+// and DeviceInfo.
+
+#ifndef MODULES_VIDEO_CAPTURE_RAW_VIDEO_SINK_INTERFACE_H_
+#define MODULES_VIDEO_CAPTURE_RAW_VIDEO_SINK_INTERFACE_H_
+
+#include "modules/video_capture/video_capture_defines.h"
+
+namespace webrtc {
+
+class RawVideoSinkInterface {
+ public:
+ virtual ~RawVideoSinkInterface() = default;
+
+ virtual int32_t OnRawFrame(uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const webrtc::VideoCaptureCapability& frameInfo,
+ VideoRotation rotation,
+ int64_t captureTime) = 0;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CAPTURE_RAW_VIDEO_SINK_INTERFACE_H_
diff --git a/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc b/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc
new file mode 100644
index 0000000000..c8af222b57
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -0,0 +1,376 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/video_capture.h"
+
+#include <stdio.h>
+
+#include <map>
+#include <memory>
+#include <sstream>
+
+#include "absl/memory/memory.h"
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "modules/video_capture/video_capture_factory.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/time_utils.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/frame_utils.h"
+#include "test/gtest.h"
+
+using webrtc::SleepMs;
+using webrtc::VideoCaptureCapability;
+using webrtc::VideoCaptureFactory;
+using webrtc::VideoCaptureModule;
+
+#define WAIT_(ex, timeout, res) \
+ do { \
+ res = (ex); \
+ int64_t start = rtc::TimeMillis(); \
+ while (!res && rtc::TimeMillis() < start + timeout) { \
+ SleepMs(5); \
+ res = (ex); \
+ } \
+ } while (0)
+
+#define EXPECT_TRUE_WAIT(ex, timeout) \
+ do { \
+ bool res; \
+ WAIT_(ex, timeout, res); \
+ if (!res) \
+ EXPECT_TRUE(ex); \
+ } while (0)
+
+static const int kTimeOut = 5000;
+#ifdef WEBRTC_MAC
+static const int kTestHeight = 288;
+static const int kTestWidth = 352;
+static const int kTestFramerate = 30;
+#endif
+
+class TestVideoCaptureCallback
+ : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ TestVideoCaptureCallback()
+ : last_render_time_ms_(0),
+ incoming_frames_(0),
+ timing_warnings_(0),
+ rotate_frame_(webrtc::kVideoRotation_0) {}
+
+ ~TestVideoCaptureCallback() override {
+ if (timing_warnings_ > 0)
+ printf("No of timing warnings %d\n", timing_warnings_);
+ }
+
+ void OnFrame(const webrtc::VideoFrame& videoFrame) override {
+ webrtc::MutexLock lock(&capture_lock_);
+ int height = videoFrame.height();
+ int width = videoFrame.width();
+#if defined(WEBRTC_ANDROID) && WEBRTC_ANDROID
+ // Android camera frames may be rotated depending on test device
+ // orientation.
+ EXPECT_TRUE(height == capability_.height || height == capability_.width);
+ EXPECT_TRUE(width == capability_.width || width == capability_.height);
+#else
+ EXPECT_EQ(height, capability_.height);
+ EXPECT_EQ(width, capability_.width);
+ EXPECT_EQ(rotate_frame_, videoFrame.rotation());
+#endif
+ // RenderTimstamp should be the time now.
+ EXPECT_TRUE(videoFrame.render_time_ms() >= rtc::TimeMillis() - 30 &&
+ videoFrame.render_time_ms() <= rtc::TimeMillis());
+
+ if ((videoFrame.render_time_ms() >
+ last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
+ last_render_time_ms_ > 0) ||
+ (videoFrame.render_time_ms() <
+ last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS &&
+ last_render_time_ms_ > 0)) {
+ timing_warnings_++;
+ }
+
+ incoming_frames_++;
+ last_render_time_ms_ = videoFrame.render_time_ms();
+ last_frame_ = videoFrame.video_frame_buffer();
+ }
+
+ void SetExpectedCapability(VideoCaptureCapability capability) {
+ webrtc::MutexLock lock(&capture_lock_);
+ capability_ = capability;
+ incoming_frames_ = 0;
+ last_render_time_ms_ = 0;
+ }
+ int incoming_frames() {
+ webrtc::MutexLock lock(&capture_lock_);
+ return incoming_frames_;
+ }
+
+ int timing_warnings() {
+ webrtc::MutexLock lock(&capture_lock_);
+ return timing_warnings_;
+ }
+ VideoCaptureCapability capability() {
+ webrtc::MutexLock lock(&capture_lock_);
+ return capability_;
+ }
+
+ bool CompareLastFrame(const webrtc::VideoFrame& frame) {
+ webrtc::MutexLock lock(&capture_lock_);
+ return webrtc::test::FrameBufsEqual(last_frame_,
+ frame.video_frame_buffer());
+ }
+
+ void SetExpectedCaptureRotation(webrtc::VideoRotation rotation) {
+ webrtc::MutexLock lock(&capture_lock_);
+ rotate_frame_ = rotation;
+ }
+
+ private:
+ webrtc::Mutex capture_lock_;
+ VideoCaptureCapability capability_;
+ int64_t last_render_time_ms_;
+ int incoming_frames_;
+ int timing_warnings_;
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> last_frame_;
+ webrtc::VideoRotation rotate_frame_;
+};
+
+class VideoCaptureTest : public ::testing::Test {
+ public:
+ VideoCaptureTest() : number_of_devices_(0) {}
+
+ void SetUp() override {
+ device_info_.reset(VideoCaptureFactory::CreateDeviceInfo());
+ RTC_DCHECK(device_info_.get());
+ number_of_devices_ = device_info_->NumberOfDevices();
+ ASSERT_GT(number_of_devices_, 0u);
+ }
+
+ rtc::scoped_refptr<VideoCaptureModule> OpenVideoCaptureDevice(
+ unsigned int device,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* callback) {
+ char device_name[256];
+ char unique_name[256];
+
+ EXPECT_EQ(0, device_info_->GetDeviceName(device, device_name, 256,
+ unique_name, 256));
+
+ rtc::scoped_refptr<VideoCaptureModule> module(
+ VideoCaptureFactory::Create(unique_name));
+ if (module.get() == NULL)
+ return nullptr;
+
+ EXPECT_FALSE(module->CaptureStarted());
+
+ module->RegisterCaptureDataCallback(callback);
+ return module;
+ }
+
+ void StartCapture(VideoCaptureModule* capture_module,
+ VideoCaptureCapability capability) {
+ ASSERT_EQ(0, capture_module->StartCapture(capability));
+ EXPECT_TRUE(capture_module->CaptureStarted());
+
+ VideoCaptureCapability resulting_capability;
+ EXPECT_EQ(0, capture_module->CaptureSettings(resulting_capability));
+ EXPECT_EQ(capability.width, resulting_capability.width);
+ EXPECT_EQ(capability.height, resulting_capability.height);
+ }
+
+ std::unique_ptr<VideoCaptureModule::DeviceInfo> device_info_;
+ unsigned int number_of_devices_;
+};
+
+#ifdef WEBRTC_MAC
+// Currently fails on Mac 64-bit, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406
+#define MAYBE_CreateDelete DISABLED_CreateDelete
+#else
+#define MAYBE_CreateDelete CreateDelete
+#endif
+TEST_F(VideoCaptureTest, MAYBE_CreateDelete) {
+ for (int i = 0; i < 5; ++i) {
+ int64_t start_time = rtc::TimeMillis();
+ TestVideoCaptureCallback capture_observer;
+ rtc::scoped_refptr<VideoCaptureModule> module(
+ OpenVideoCaptureDevice(0, &capture_observer));
+ ASSERT_TRUE(module.get() != NULL);
+
+ VideoCaptureCapability capability;
+#ifndef WEBRTC_MAC
+ device_info_->GetCapability(module->CurrentDeviceName(), 0, capability);
+#else
+ capability.width = kTestWidth;
+ capability.height = kTestHeight;
+ capability.maxFPS = kTestFramerate;
+ capability.videoType = webrtc::VideoType::kUnknown;
+#endif
+ capture_observer.SetExpectedCapability(capability);
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
+
+ // Less than 4s to start the camera.
+ EXPECT_LE(rtc::TimeMillis() - start_time, 4000);
+
+ // Make sure 5 frames are captured.
+ EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut);
+
+ int64_t stop_time = rtc::TimeMillis();
+ EXPECT_EQ(0, module->StopCapture());
+ EXPECT_FALSE(module->CaptureStarted());
+
+ // Less than 3s to stop the camera.
+ EXPECT_LE(rtc::TimeMillis() - stop_time, 3000);
+ }
+}
+
+#ifdef WEBRTC_MAC
+// Currently fails on Mac 64-bit, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406
+#define MAYBE_Capabilities DISABLED_Capabilities
+#else
+#define MAYBE_Capabilities Capabilities
+#endif
+TEST_F(VideoCaptureTest, MAYBE_Capabilities) {
+ TestVideoCaptureCallback capture_observer;
+
+ rtc::scoped_refptr<VideoCaptureModule> module(
+ OpenVideoCaptureDevice(0, &capture_observer));
+ ASSERT_TRUE(module.get() != NULL);
+
+ int number_of_capabilities =
+ device_info_->NumberOfCapabilities(module->CurrentDeviceName());
+ EXPECT_GT(number_of_capabilities, 0);
+ // Key is <width>x<height>, value is vector of maxFPS values at that
+ // resolution.
+ typedef std::map<std::string, std::vector<int> > FrameRatesByResolution;
+ FrameRatesByResolution frame_rates_by_resolution;
+ for (int i = 0; i < number_of_capabilities; ++i) {
+ VideoCaptureCapability capability;
+ EXPECT_EQ(0, device_info_->GetCapability(module->CurrentDeviceName(), i,
+ capability));
+ std::ostringstream resolutionStream;
+ resolutionStream << capability.width << "x" << capability.height;
+ resolutionStream.flush();
+ std::string resolution = resolutionStream.str();
+ frame_rates_by_resolution[resolution].push_back(capability.maxFPS);
+
+ // Since Android presents so many resolution/FPS combinations and the test
+ // runner imposes a timeout, we only actually start the capture and test
+ // that a frame was captured for 2 frame-rates at each resolution.
+ if (frame_rates_by_resolution[resolution].size() > 2)
+ continue;
+
+ capture_observer.SetExpectedCapability(capability);
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
+ // Make sure at least one frame is captured.
+ EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 1, kTimeOut);
+
+ EXPECT_EQ(0, module->StopCapture());
+ }
+
+#if defined(WEBRTC_ANDROID) && WEBRTC_ANDROID
+ // There's no reason for this to _necessarily_ be true, but in practice all
+ // Android devices this test runs on in fact do support multiple capture
+ // resolutions and multiple frame-rates per captured resolution, so we assert
+ // this fact here as a regression-test against the time that we only noticed a
+ // single frame-rate per resolution (bug 2974). If this test starts being run
+ // on devices for which this is untrue (e.g. Nexus4) then the following should
+ // probably be wrapped in a base::android::BuildInfo::model()/device() check.
+ EXPECT_GT(frame_rates_by_resolution.size(), 1U);
+ for (FrameRatesByResolution::const_iterator it =
+ frame_rates_by_resolution.begin();
+ it != frame_rates_by_resolution.end(); ++it) {
+ EXPECT_GT(it->second.size(), 1U) << it->first;
+ }
+#endif // WEBRTC_ANDROID
+}
+
+// NOTE: flaky, crashes sometimes.
+// http://code.google.com/p/webrtc/issues/detail?id=777
+TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) {
+ if (number_of_devices_ < 2) {
+ printf("There are not two cameras available. Aborting test. \n");
+ return;
+ }
+
+ TestVideoCaptureCallback capture_observer1;
+ rtc::scoped_refptr<VideoCaptureModule> module1(
+ OpenVideoCaptureDevice(0, &capture_observer1));
+ ASSERT_TRUE(module1.get() != NULL);
+ VideoCaptureCapability capability1;
+#ifndef WEBRTC_MAC
+ device_info_->GetCapability(module1->CurrentDeviceName(), 0, capability1);
+#else
+ capability1.width = kTestWidth;
+ capability1.height = kTestHeight;
+ capability1.maxFPS = kTestFramerate;
+ capability1.videoType = webrtc::VideoType::kUnknown;
+#endif
+ capture_observer1.SetExpectedCapability(capability1);
+
+ TestVideoCaptureCallback capture_observer2;
+ rtc::scoped_refptr<VideoCaptureModule> module2(
+ OpenVideoCaptureDevice(1, &capture_observer2));
+ ASSERT_TRUE(module1.get() != NULL);
+
+ VideoCaptureCapability capability2;
+#ifndef WEBRTC_MAC
+ device_info_->GetCapability(module2->CurrentDeviceName(), 0, capability2);
+#else
+ capability2.width = kTestWidth;
+ capability2.height = kTestHeight;
+ capability2.maxFPS = kTestFramerate;
+ capability2.videoType = webrtc::VideoType::kUnknown;
+#endif
+ capture_observer2.SetExpectedCapability(capability2);
+
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module1.get(), capability1));
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module2.get(), capability2));
+ EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut);
+ EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut);
+ EXPECT_EQ(0, module2->StopCapture());
+ EXPECT_EQ(0, module1->StopCapture());
+}
+
+#ifdef WEBRTC_MAC
+// No VideoCaptureImpl on Mac.
+#define MAYBE_ConcurrentAccess DISABLED_ConcurrentAccess
+#else
+#define MAYBE_ConcurrentAccess ConcurrentAccess
+#endif
+TEST_F(VideoCaptureTest, MAYBE_ConcurrentAccess) {
+ TestVideoCaptureCallback capture_observer1;
+ rtc::scoped_refptr<VideoCaptureModule> module1(
+ OpenVideoCaptureDevice(0, &capture_observer1));
+ ASSERT_TRUE(module1.get() != NULL);
+ VideoCaptureCapability capability;
+ device_info_->GetCapability(module1->CurrentDeviceName(), 0, capability);
+ capture_observer1.SetExpectedCapability(capability);
+
+ TestVideoCaptureCallback capture_observer2;
+ rtc::scoped_refptr<VideoCaptureModule> module2(
+ OpenVideoCaptureDevice(0, &capture_observer2));
+ ASSERT_TRUE(module2.get() != NULL);
+ capture_observer2.SetExpectedCapability(capability);
+
+ // Starting module1 should work.
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module1.get(), capability));
+ EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut);
+
+ // When module1 is stopped, starting module2 for the same device should work.
+ EXPECT_EQ(0, module1->StopCapture());
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module2.get(), capability));
+ EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut);
+
+ EXPECT_EQ(0, module2->StopCapture());
+}
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture.h b/third_party/libwebrtc/modules/video_capture/video_capture.h
new file mode 100644
index 0000000000..7e181c538e
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_
+#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_
+
+#include "api/video/video_rotation.h"
+#include "api/video/video_sink_interface.h"
+#include "modules/desktop_capture/desktop_capture_types.h"
+#include "modules/video_capture/raw_video_sink_interface.h"
+#include "modules/video_capture/video_capture_defines.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+#include <set>
+
+#if defined(ANDROID)
+#include <jni.h>
+#endif
+
+namespace webrtc {
+
+class VideoInputFeedBack
+{
+public:
+ virtual void OnDeviceChange() = 0;
+protected:
+ virtual ~VideoInputFeedBack(){}
+};
+
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+ int32_t SetCaptureAndroidVM(JavaVM* javaVM);
+#endif
+
+class VideoCaptureModule : public rtc::RefCountInterface {
+ public:
+ // Interface for receiving information about available camera devices.
+ class DeviceInfo {
+ public:
+ virtual uint32_t NumberOfDevices() = 0;
+ virtual int32_t Refresh() = 0;
+ virtual void DeviceChange() {
+ MutexLock lock(&_inputCallbacksMutex);
+ for (auto inputCallBack : _inputCallBacks) {
+ inputCallBack->OnDeviceChange();
+ }
+ }
+ virtual void RegisterVideoInputFeedBack(VideoInputFeedBack* callBack) {
+ MutexLock lock(&_inputCallbacksMutex);
+ _inputCallBacks.insert(callBack);
+ }
+
+ virtual void DeRegisterVideoInputFeedBack(VideoInputFeedBack* callBack) {
+ MutexLock lock(&_inputCallbacksMutex);
+ auto it = _inputCallBacks.find(callBack);
+ if (it != _inputCallBacks.end()) {
+ _inputCallBacks.erase(it);
+ }
+ }
+
+ // Returns the available capture devices.
+ // deviceNumber - Index of capture device.
+ // deviceNameUTF8 - Friendly name of the capture device.
+ // deviceUniqueIdUTF8 - Unique name of the capture device if it exist.
+ // Otherwise same as deviceNameUTF8.
+ // productUniqueIdUTF8 - Unique product id if it exist.
+ // Null terminated otherwise.
+ virtual int32_t GetDeviceName(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8 = 0,
+ uint32_t productUniqueIdUTF8Length = 0,
+ pid_t* pid = 0) = 0;
+
+ // Returns the number of capabilities this device.
+ virtual int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8) = 0;
+
+ // Gets the capabilities of the named device.
+ virtual int32_t GetCapability(const char* deviceUniqueIdUTF8,
+ uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) = 0;
+
+ // Gets clockwise angle the captured frames should be rotated in order
+ // to be displayed correctly on a normally rotated display.
+ virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) = 0;
+
+ // Gets the capability that best matches the requested width, height and
+ // frame rate.
+ // Returns the deviceCapabilityNumber on success.
+ virtual int32_t GetBestMatchedCapability(
+ const char* deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested,
+ VideoCaptureCapability& resulting) = 0;
+
+ // Display OS /capture device specific settings dialog
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY) = 0;
+
+ virtual ~DeviceInfo() {}
+ private:
+ Mutex _inputCallbacksMutex;
+ std::set<VideoInputFeedBack*> _inputCallBacks RTC_GUARDED_BY(_inputCallbacksMutex);
+ };
+
+ // Register capture data callback
+ virtual void RegisterCaptureDataCallback(
+ rtc::VideoSinkInterface<VideoFrame>* dataCallback) = 0;
+ virtual void RegisterCaptureDataCallback(
+ RawVideoSinkInterface* dataCallback) = 0;
+
+ // Remove capture data callback
+ virtual void DeRegisterCaptureDataCallback(
+ rtc::VideoSinkInterface<VideoFrame> *dataCallback) = 0;
+
+ // Start capture device
+ virtual int32_t StartCapture(const VideoCaptureCapability& capability) = 0;
+
+ virtual int32_t StopCaptureIfAllClientsClose() = 0;
+
+ virtual bool FocusOnSelectedSource() { return false; }
+
+ virtual int32_t StopCapture() = 0;
+
+ // Returns the name of the device used by this module.
+ virtual const char* CurrentDeviceName() const = 0;
+
+ // Returns true if the capture device is running
+ virtual bool CaptureStarted() = 0;
+
+ // Gets the current configuration.
+ virtual int32_t CaptureSettings(VideoCaptureCapability& settings) = 0;
+
+ // Set the rotation of the captured frames.
+ // If the rotation is set to the same as returned by
+ // DeviceInfo::GetOrientation the captured frames are
+ // displayed correctly if rendered.
+ virtual int32_t SetCaptureRotation(VideoRotation rotation) = 0;
+
+ // Tells the capture module whether to apply the pending rotation. By default,
+ // the rotation is applied and the generated frame is up right. When set to
+ // false, generated frames will carry the rotation information from
+ // SetCaptureRotation. Return value indicates whether this operation succeeds.
+ virtual bool SetApplyRotation(bool enable) = 0;
+
+ // Return whether the rotation is applied or left pending.
+ virtual bool GetApplyRotation() = 0;
+
+ // Mozilla: TrackingId setter for use in profiler markers.
+ virtual void SetTrackingId(uint32_t aTrackingIdProcId) {}
+
+ protected:
+ ~VideoCaptureModule() override {}
+};
+
+} // namespace webrtc
+#endif // MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_config.h b/third_party/libwebrtc/modules/video_capture/video_capture_config.h
new file mode 100644
index 0000000000..f285b9eeb1
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_config.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
+#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
+
+namespace webrtc {
+namespace videocapturemodule {
+enum { kDefaultWidth = 640 }; // Start width
+enum { kDefaultHeight = 480 }; // Start heigt
+enum { kDefaultFrameRate = 30 }; // Start frame rate
+
+enum { kMaxFrameRate = 60 }; // Max allowed frame rate of the start image
+
+enum { kDefaultCaptureDelay = 120 };
+enum {
+ kMaxCaptureDelay = 270
+}; // Max capture delay allowed in the precompiled capture delay values.
+
+enum { kFrameRateCallbackInterval = 1000 };
+enum { kFrameRateCountHistorySize = 90 };
+enum { kFrameRateHistoryWindowMs = 2000 };
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_defines.h b/third_party/libwebrtc/modules/video_capture/video_capture_defines.h
new file mode 100644
index 0000000000..63534600a9
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_defines.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_
+#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_
+
+#include "api/video/video_frame.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+namespace webrtc {
+
+enum {
+ kVideoCaptureUniqueNameLength = 1024
+}; // Max unique capture device name lenght
+enum { kVideoCaptureDeviceNameLength = 256 }; // Max capture device name lenght
+enum { kVideoCaptureProductIdLength = 128 }; // Max product id length
+
+struct VideoCaptureCapability {
+ int32_t width;
+ int32_t height;
+ int32_t maxFPS;
+ VideoType videoType;
+ bool interlaced;
+
+ VideoCaptureCapability() {
+ width = 0;
+ height = 0;
+ maxFPS = 0;
+ videoType = VideoType::kUnknown;
+ interlaced = false;
+ }
+ bool operator!=(const VideoCaptureCapability& other) const {
+ if (width != other.width)
+ return true;
+ if (height != other.height)
+ return true;
+ if (maxFPS != other.maxFPS)
+ return true;
+ if (videoType != other.videoType)
+ return true;
+ if (interlaced != other.interlaced)
+ return true;
+ return false;
+ }
+ bool operator==(const VideoCaptureCapability& other) const {
+ return !operator!=(other);
+ }
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_factory.cc b/third_party/libwebrtc/modules/video_capture/video_capture_factory.cc
new file mode 100644
index 0000000000..e4a46902e0
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_factory.cc
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/video_capture_factory.h"
+
+#include "modules/video_capture/video_capture_impl.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoCaptureModule> VideoCaptureFactory::Create(
+ const char* deviceUniqueIdUTF8) {
+ return videocapturemodule::VideoCaptureImpl::Create(deviceUniqueIdUTF8);
+}
+
+VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo() {
+ return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_factory.h b/third_party/libwebrtc/modules/video_capture/video_capture_factory.h
new file mode 100644
index 0000000000..1fe47d9fab
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_factory.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains interfaces used for creating the VideoCaptureModule
+// and DeviceInfo.
+
+#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_
+#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_
+
+#include "api/scoped_refptr.h"
+#include "modules/video_capture/video_capture.h"
+#include "modules/video_capture/video_capture_defines.h"
+
+namespace webrtc {
+
+class VideoCaptureFactory {
+ public:
+ // Create a video capture module object
+ // id - unique identifier of this video capture module object.
+ // deviceUniqueIdUTF8 - name of the device.
+ // Available names can be found by using GetDeviceName
+ static rtc::scoped_refptr<VideoCaptureModule> Create(
+ const char* deviceUniqueIdUTF8);
+
+ static VideoCaptureModule::DeviceInfo* CreateDeviceInfo();
+
+ private:
+ ~VideoCaptureFactory();
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_factory_null.cc b/third_party/libwebrtc/modules/video_capture/video_capture_factory_null.cc
new file mode 100644
index 0000000000..7808d19851
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_factory_null.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/video_capture_impl.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+// static
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() {
+ return nullptr;
+}
+
+rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
+ const char* device_id) {
+ return nullptr;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_impl.cc b/third_party/libwebrtc/modules/video_capture/video_capture_impl.cc
new file mode 100644
index 0000000000..9d9a1471e8
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_impl.cc
@@ -0,0 +1,334 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/video_capture_impl.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame_buffer.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "modules/video_capture/video_capture_config.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
+#include "third_party/libyuv/include/libyuv.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+const char* VideoCaptureImpl::CurrentDeviceName() const {
+ return _deviceUniqueId;
+}
+
+// static
+int32_t VideoCaptureImpl::RotationFromDegrees(int degrees,
+ VideoRotation* rotation) {
+ switch (degrees) {
+ case 0:
+ *rotation = kVideoRotation_0;
+ return 0;
+ case 90:
+ *rotation = kVideoRotation_90;
+ return 0;
+ case 180:
+ *rotation = kVideoRotation_180;
+ return 0;
+ case 270:
+ *rotation = kVideoRotation_270;
+ return 0;
+ default:
+ return -1;
+ ;
+ }
+}
+
+// static
+int32_t VideoCaptureImpl::RotationInDegrees(VideoRotation rotation,
+ int* degrees) {
+ switch (rotation) {
+ case kVideoRotation_0:
+ *degrees = 0;
+ return 0;
+ case kVideoRotation_90:
+ *degrees = 90;
+ return 0;
+ case kVideoRotation_180:
+ *degrees = 180;
+ return 0;
+ case kVideoRotation_270:
+ *degrees = 270;
+ return 0;
+ }
+ return -1;
+}
+
+VideoCaptureImpl::VideoCaptureImpl()
+ : _deviceUniqueId(NULL),
+ _requestedCapability(),
+ _lastProcessTimeNanos(rtc::TimeNanos()),
+ _lastFrameRateCallbackTimeNanos(rtc::TimeNanos()),
+ _rawDataCallBack(NULL),
+ _lastProcessFrameTimeNanos(rtc::TimeNanos()),
+ _rotateFrame(kVideoRotation_0),
+ apply_rotation_(false) {
+ _requestedCapability.width = kDefaultWidth;
+ _requestedCapability.height = kDefaultHeight;
+ _requestedCapability.maxFPS = 30;
+ _requestedCapability.videoType = VideoType::kI420;
+ memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos));
+}
+
+VideoCaptureImpl::~VideoCaptureImpl() {
+ if (_deviceUniqueId)
+ delete[] _deviceUniqueId;
+}
+
+void VideoCaptureImpl::RegisterCaptureDataCallback(
+ rtc::VideoSinkInterface<VideoFrame>* dataCallBack) {
+ MutexLock lock(&api_lock_);
+ RTC_DCHECK(!_rawDataCallBack);
+ _dataCallBacks.insert(dataCallBack);
+}
+
+void VideoCaptureImpl::RegisterCaptureDataCallback(
+ RawVideoSinkInterface* dataCallBack) {
+ MutexLock lock(&api_lock_);
+ RTC_DCHECK(_dataCallBacks.empty());
+ _rawDataCallBack = dataCallBack;
+}
+
+void VideoCaptureImpl::DeRegisterCaptureDataCallback(
+ rtc::VideoSinkInterface<VideoFrame>* dataCallBack) {
+ MutexLock lock(&api_lock_);
+ auto it = _dataCallBacks.find(dataCallBack);
+ if (it != _dataCallBacks.end()) {
+ _dataCallBacks.erase(it);
+ }
+ _rawDataCallBack = NULL;
+}
+
+int32_t VideoCaptureImpl::StopCaptureIfAllClientsClose() {
+ if (_dataCallBacks.empty()) {
+ return StopCapture();
+ } else {
+ return 0;
+ }
+}
+
+int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) {
+ UpdateFrameCount(); // frame count used for local frame rate callback.
+
+ for (auto dataCallBack : _dataCallBacks) {
+ dataCallBack->OnFrame(captureFrame);
+ }
+
+ return 0;
+}
+
+void VideoCaptureImpl::DeliverRawFrame(uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const VideoCaptureCapability& frameInfo,
+ int64_t captureTime) {
+ UpdateFrameCount();
+ _rawDataCallBack->OnRawFrame(videoFrame, videoFrameLength, frameInfo,
+ _rotateFrame, captureTime);
+}
+
+int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const VideoCaptureCapability& frameInfo,
+ int64_t captureTime /*=0*/) {
+ MutexLock lock(&api_lock_);
+
+ const int32_t width = frameInfo.width;
+ const int32_t height = frameInfo.height;
+
+ TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime);
+
+ if (_rawDataCallBack) {
+ DeliverRawFrame(videoFrame, videoFrameLength, frameInfo, captureTime);
+ return 0;
+ }
+
+ // Not encoded, convert to I420.
+ if (frameInfo.videoType != VideoType::kMJPEG) {
+ // Allow buffers larger than expected. On linux gstreamer allocates buffers
+ // page-aligned and v4l2loopback passes us the buffer size verbatim which
+ // for most cases is larger than expected.
+ // See https://github.com/umlaeute/v4l2loopback/issues/190.
+ if (auto size = CalcBufferSize(frameInfo.videoType, width, abs(height));
+ videoFrameLength < size) {
+ RTC_LOG(LS_ERROR) << "Wrong incoming frame length. Expected " << size
+ << ", Got " << videoFrameLength << ".";
+ return -1;
+ }
+ }
+
+ int target_width = width;
+ int target_height = abs(height);
+
+ // SetApplyRotation doesn't take any lock. Make a local copy here.
+ bool apply_rotation = apply_rotation_;
+
+ if (apply_rotation) {
+ // Rotating resolution when for 90/270 degree rotations.
+ if (_rotateFrame == kVideoRotation_90 ||
+ _rotateFrame == kVideoRotation_270) {
+ target_width = abs(height);
+ target_height = width;
+ }
+ }
+
+ int stride_y = target_width;
+ int stride_uv = (target_width + 1) / 2;
+
+ // Setting absolute height (in case it was negative).
+ // In Windows, the image starts bottom left, instead of top left.
+ // Setting a negative source height, inverts the image (within LibYuv).
+ rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
+ target_width, target_height, stride_y, stride_uv, stride_uv);
+
+ libyuv::RotationMode rotation_mode = libyuv::kRotate0;
+ if (apply_rotation) {
+ switch (_rotateFrame) {
+ case kVideoRotation_0:
+ rotation_mode = libyuv::kRotate0;
+ break;
+ case kVideoRotation_90:
+ rotation_mode = libyuv::kRotate90;
+ break;
+ case kVideoRotation_180:
+ rotation_mode = libyuv::kRotate180;
+ break;
+ case kVideoRotation_270:
+ rotation_mode = libyuv::kRotate270;
+ break;
+ }
+ }
+
+ int dst_width = buffer->width();
+ int dst_height = buffer->height();
+
+ // LibYuv expects pre-rotation_mode values for dst.
+ // Stride values should correspond to the destination values.
+ if (rotation_mode == libyuv::kRotate90 || rotation_mode == libyuv::kRotate270) {
+ std::swap(dst_width, dst_height);
+ }
+
+ const int conversionResult = libyuv::ConvertToI420(
+ videoFrame, videoFrameLength, buffer.get()->MutableDataY(),
+ buffer.get()->StrideY(), buffer.get()->MutableDataU(),
+ buffer.get()->StrideU(), buffer.get()->MutableDataV(),
+ buffer.get()->StrideV(), 0, 0, // No Cropping
+ width, height, dst_width, dst_height, rotation_mode,
+ ConvertVideoType(frameInfo.videoType));
+ if (conversionResult != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to convert capture frame from type "
+ << static_cast<int>(frameInfo.videoType) << "to I420.";
+ return -1;
+ }
+
+ VideoFrame captureFrame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_timestamp_rtp(0)
+ .set_timestamp_ms(rtc::TimeMillis())
+ .set_rotation(!apply_rotation ? _rotateFrame : kVideoRotation_0)
+ .build();
+ captureFrame.set_ntp_time_ms(captureTime);
+
+ // This is one ugly hack to let CamerasParent know what rotation
+ // the frame was captured at. Note that this goes against the intended
+ // meaning of rotation of the frame (how to rotate it before rendering).
+ // We do this so CamerasChild can scale to the proper dimensions
+ // later on in the pipe.
+ captureFrame.set_rotation(_rotateFrame);
+
+ DeliverCapturedFrame(captureFrame);
+
+ return 0;
+}
+
+int32_t VideoCaptureImpl::StartCapture(
+ const VideoCaptureCapability& capability) {
+ _requestedCapability = capability;
+ return -1;
+}
+
+int32_t VideoCaptureImpl::StopCapture() {
+ return -1;
+}
+
+bool VideoCaptureImpl::CaptureStarted() {
+ return false;
+}
+
+int32_t VideoCaptureImpl::CaptureSettings(
+ VideoCaptureCapability& /*settings*/) {
+ return -1;
+}
+
+int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) {
+ MutexLock lock(&api_lock_);
+ _rotateFrame = rotation;
+ return 0;
+}
+
+bool VideoCaptureImpl::SetApplyRotation(bool enable) {
+ // We can't take any lock here as it'll cause deadlock with IncomingFrame.
+
+ // The effect of this is the last caller wins.
+ apply_rotation_ = enable;
+ return true;
+}
+
+bool VideoCaptureImpl::GetApplyRotation() {
+ return apply_rotation_;
+}
+
+void VideoCaptureImpl::UpdateFrameCount() {
+ if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) {
+ // first no shift
+ } else {
+ // shift
+ for (int i = (kFrameRateCountHistorySize - 2); i >= 0; --i) {
+ _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
+ }
+ }
+ _incomingFrameTimesNanos[0] = rtc::TimeNanos();
+}
+
+uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) {
+ int32_t num = 0;
+ int32_t nrOfFrames = 0;
+ for (num = 1; num < (kFrameRateCountHistorySize - 1); ++num) {
+ if (_incomingFrameTimesNanos[num] <= 0 ||
+ (now_ns - _incomingFrameTimesNanos[num]) /
+ rtc::kNumNanosecsPerMillisec >
+ kFrameRateHistoryWindowMs) { // don't use data older than 2sec
+ break;
+ } else {
+ nrOfFrames++;
+ }
+ }
+ if (num > 1) {
+ int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
+ rtc::kNumNanosecsPerMillisec;
+ if (diff > 0) {
+ return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
+ }
+ }
+
+ return nrOfFrames;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_impl.h b/third_party/libwebrtc/modules/video_capture/video_capture_impl.h
new file mode 100644
index 0000000000..b9af5f2441
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_impl.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
+#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
+
+/*
+ * video_capture_impl.h
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_rotation.h"
+#include "api/video/video_sink_interface.h"
+#include "modules/video_capture/video_capture.h"
+#include "modules/video_capture/video_capture_config.h"
+#include "modules/video_capture/video_capture_defines.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+
+namespace videocapturemodule {
+// Class definitions
+class VideoCaptureImpl : public VideoCaptureModule {
+ public:
+ /*
+ * Create a video capture module object
+ *
+ * id - unique identifier of this video capture module object
+ * deviceUniqueIdUTF8 - name of the device. Available names can be found by
+ * using GetDeviceName
+ */
+ static rtc::scoped_refptr<VideoCaptureModule> Create(
+ const char* deviceUniqueIdUTF8);
+
+ static DeviceInfo* CreateDeviceInfo();
+
+ // Helpers for converting between (integral) degrees and
+ // VideoRotation values. Return 0 on success.
+ static int32_t RotationFromDegrees(int degrees, VideoRotation* rotation);
+ static int32_t RotationInDegrees(VideoRotation rotation, int* degrees);
+
+ // Call backs
+ void RegisterCaptureDataCallback(
+ rtc::VideoSinkInterface<VideoFrame>* dataCallback) override;
+ virtual void RegisterCaptureDataCallback(
+ RawVideoSinkInterface* dataCallback) override;
+ void DeRegisterCaptureDataCallback(
+ rtc::VideoSinkInterface<VideoFrame>* dataCallback) override;
+
+ int32_t StopCaptureIfAllClientsClose() override;
+ int32_t SetCaptureRotation(VideoRotation rotation) override;
+ bool SetApplyRotation(bool enable) override;
+ bool GetApplyRotation() override;
+
+ const char* CurrentDeviceName() const override;
+
+ // `capture_time` must be specified in NTP time format in milliseconds.
+ int32_t IncomingFrame(uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const VideoCaptureCapability& frameInfo,
+ int64_t captureTime = 0);
+
+ // Platform dependent
+ int32_t StartCapture(const VideoCaptureCapability& capability) override;
+ int32_t StopCapture() override;
+ bool CaptureStarted() override;
+ int32_t CaptureSettings(VideoCaptureCapability& /*settings*/) override;
+
+ protected:
+ VideoCaptureImpl();
+ ~VideoCaptureImpl() override;
+
+ // moved DeliverCapturedFrame to protected for VideoCaptureAndroid (mjf)
+ int32_t DeliverCapturedFrame(VideoFrame& captureFrame);
+
+ char* _deviceUniqueId; // current Device unique name;
+ Mutex api_lock_;
+ VideoCaptureCapability _requestedCapability; // Should be set by platform
+ // dependent code in
+ // StartCapture.
+ private:
+ void UpdateFrameCount();
+ uint32_t CalculateFrameRate(int64_t now_ns);
+ void DeliverRawFrame(uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const VideoCaptureCapability& frameInfo,
+ int64_t captureTime);
+
+ // last time the module process function was called.
+ int64_t _lastProcessTimeNanos;
+ // last time the frame rate callback function was called.
+ int64_t _lastFrameRateCallbackTimeNanos;
+
+ std::set<rtc::VideoSinkInterface<VideoFrame>*> _dataCallBacks;
+ RawVideoSinkInterface* _rawDataCallBack;
+
+ int64_t _lastProcessFrameTimeNanos;
+ // timestamp for local captured frames
+ int64_t _incomingFrameTimesNanos[kFrameRateCountHistorySize];
+ VideoRotation _rotateFrame; // Set if the frame should be rotated by the
+ // capture module.
+
+ // Indicate whether rotation should be applied before delivered externally.
+ bool apply_rotation_;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build b/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build
new file mode 100644
index 0000000000..cd5557b826
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build
@@ -0,0 +1,254 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "GLESv2",
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+ UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc",
+ "/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc",
+ "/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc",
+ "/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc"
+ ]
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc",
+ "/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc",
+ "/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc",
+ "/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc"
+ ]
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "crypt32",
+ "iphlpapi",
+ "ole32",
+ "oleaut32",
+ "secur32",
+ "strmiids",
+ "user32",
+ "winmm"
+ ]
+
+ SOURCES += [
+ "/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc",
+ "/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc",
+ "/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc"
+ ]
+
+ UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc",
+ "/third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_capture_internal_impl_gn")
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build b/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build
new file mode 100644
index 0000000000..8aa245b127
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build
@@ -0,0 +1,237 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/media/libyuv/",
+ "/media/libyuv/libyuv/include/",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/modules/video_capture/device_info_impl.cc",
+ "/third_party/libwebrtc/modules/video_capture/video_capture_factory.cc",
+ "/third_party/libwebrtc/modules/video_capture/video_capture_impl.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "GLESv2",
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "crypt32",
+ "iphlpapi",
+ "secur32",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_capture_module_gn")
diff --git a/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc b/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc
new file mode 100644
index 0000000000..2b01fc6930
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc
@@ -0,0 +1,713 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/windows/device_info_ds.h"
+
+#include <dvdmedia.h>
+
+#include "modules/video_capture/video_capture_config.h"
+#include "modules/video_capture/windows/help_functions_ds.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/string_utils.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+BOOL isVideoDevice(DEV_BROADCAST_HDR *pHdr)
+{
+ if (pHdr == NULL) {
+ return FALSE;
+ }
+ if (pHdr->dbch_devicetype != DBT_DEVTYP_DEVICEINTERFACE) {
+ return FALSE;
+ }
+ DEV_BROADCAST_DEVICEINTERFACE* pDi = (DEV_BROADCAST_DEVICEINTERFACE*)pHdr;
+ return pDi->dbcc_classguid == KSCATEGORY_VIDEO_CAMERA;
+}
+
+LRESULT CALLBACK WndProc(HWND hWnd, UINT uiMsg, WPARAM wParam, LPARAM lParam)
+{
+ DeviceInfoDS* pParent;
+ if (uiMsg == WM_CREATE)
+ {
+ pParent = (DeviceInfoDS*)((LPCREATESTRUCT)lParam)->lpCreateParams;
+ SetWindowLongPtr(hWnd, GWLP_USERDATA, (LONG_PTR)pParent);
+ }
+ else if (uiMsg == WM_DESTROY)
+ {
+ SetWindowLongPtr(hWnd, GWLP_USERDATA, NULL);
+ }
+ else if (uiMsg == WM_DEVICECHANGE)
+ {
+ pParent = (DeviceInfoDS*)GetWindowLongPtr(hWnd, GWLP_USERDATA);
+ if (pParent && isVideoDevice((PDEV_BROADCAST_HDR)lParam))
+ {
+ pParent->DeviceChange();
+ }
+ }
+ return DefWindowProc(hWnd, uiMsg, wParam, lParam);
+}
+
+// static
+DeviceInfoDS* DeviceInfoDS::Create() {
+ DeviceInfoDS* dsInfo = new DeviceInfoDS();
+ if (!dsInfo || dsInfo->Init() != 0) {
+ delete dsInfo;
+ dsInfo = NULL;
+ }
+ return dsInfo;
+}
+
+DeviceInfoDS::DeviceInfoDS()
+ : _dsDevEnum(NULL),
+ _dsMonikerDevEnum(NULL),
+ _CoUninitializeIsRequired(true),
+ _hdevnotify(NULL) {
+ // 1) Initialize the COM library (make Windows load the DLLs).
+ //
+ // CoInitializeEx must be called at least once, and is usually called only
+ // once, for each thread that uses the COM library. Multiple calls to
+ // CoInitializeEx by the same thread are allowed as long as they pass the same
+ // concurrency flag, but subsequent valid calls return S_FALSE. To close the
+ // COM library gracefully on a thread, each successful call to CoInitializeEx,
+ // including any call that returns S_FALSE, must be balanced by a
+ // corresponding call to CoUninitialize.
+ //
+
+ /*Apartment-threading, while allowing for multiple threads of execution,
+ serializes all incoming calls by requiring that calls to methods of objects
+ created by this thread always run on the same thread the apartment/thread
+ that created them. In addition, calls can arrive only at message-queue
+ boundaries (i.e., only during a PeekMessage, SendMessage, DispatchMessage,
+ etc.). Because of this serialization, it is not typically necessary to write
+ concurrency control into the code for the object, other than to avoid calls
+ to PeekMessage and SendMessage during processing that must not be interrupted
+ by other method invocations or calls to other objects in the same
+ apartment/thread.*/
+
+ /// CoInitializeEx(NULL, COINIT_APARTMENTTHREADED ); //|
+ /// COINIT_SPEED_OVER_MEMORY
+ HRESULT hr = CoInitializeEx(
+ NULL, COINIT_MULTITHREADED); // Use COINIT_MULTITHREADED since Voice
+ // Engine uses COINIT_MULTITHREADED
+ if (FAILED(hr)) {
+ // Avoid calling CoUninitialize() since CoInitializeEx() failed.
+ _CoUninitializeIsRequired = FALSE;
+
+ if (hr == RPC_E_CHANGED_MODE) {
+ // Calling thread has already initialized COM to be used in a
+ // single-threaded apartment (STA). We are then prevented from using STA.
+ // Details: hr = 0x80010106 <=> "Cannot change thread mode after it is
+ // set".
+ //
+ RTC_DLOG(LS_INFO) << __FUNCTION__
+ << ": CoInitializeEx(NULL, COINIT_APARTMENTTHREADED)"
+ " => RPC_E_CHANGED_MODE, error 0x"
+ << rtc::ToHex(hr);
+ }
+ }
+
+ _hInstance = reinterpret_cast<HINSTANCE>(GetModuleHandle(NULL));
+ _wndClass = {0};
+ _wndClass.lpfnWndProc = &WndProc;
+ _wndClass.lpszClassName = TEXT("DeviceInfoDS");
+ _wndClass.hInstance = _hInstance;
+
+ if (RegisterClass(&_wndClass)) {
+ _hwnd = CreateWindow(_wndClass.lpszClassName, NULL, 0, CW_USEDEFAULT,
+ CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, NULL,
+ NULL, _hInstance, this);
+
+ DEV_BROADCAST_DEVICEINTERFACE di = { 0 };
+ di.dbcc_size = sizeof(di);
+ di.dbcc_devicetype = DBT_DEVTYP_DEVICEINTERFACE;
+ di.dbcc_classguid = KSCATEGORY_VIDEO_CAMERA;
+
+ _hdevnotify = RegisterDeviceNotification(_hwnd, &di,
+ DEVICE_NOTIFY_WINDOW_HANDLE);
+ }
+}
+
+DeviceInfoDS::~DeviceInfoDS() {
+ RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+ RELEASE_AND_CLEAR(_dsDevEnum);
+ if (_CoUninitializeIsRequired) {
+ CoUninitialize();
+ }
+ if (_hdevnotify)
+ {
+ UnregisterDeviceNotification(_hdevnotify);
+ }
+ if (_hwnd != NULL) {
+ DestroyWindow(_hwnd);
+ }
+ UnregisterClass(_wndClass.lpszClassName, _hInstance);
+}
+
+int32_t DeviceInfoDS::Init() {
+ HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
+ IID_ICreateDevEnum, (void**)&_dsDevEnum);
+ if (hr != NOERROR) {
+ RTC_LOG(LS_INFO) << "Failed to create CLSID_SystemDeviceEnum, error 0x"
+ << rtc::ToHex(hr);
+ return -1;
+ }
+ return 0;
+}
+uint32_t DeviceInfoDS::NumberOfDevices() {
+ MutexLock lock(&_apiLock);
+ return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0);
+}
+
+int32_t DeviceInfoDS::GetDeviceName(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length,
+ pid_t* pid) {
+ MutexLock lock(&_apiLock);
+ const int32_t result = GetDeviceInfo(
+ deviceNumber, deviceNameUTF8, deviceNameLength, deviceUniqueIdUTF8,
+ deviceUniqueIdUTF8Length, productUniqueIdUTF8, productUniqueIdUTF8Length);
+ return result > (int32_t)deviceNumber ? 0 : -1;
+}
+
+int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length)
+
+{
+ // enumerate all video capture devices
+ RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+ HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
+ &_dsMonikerDevEnum, 0);
+ if (hr != NOERROR) {
+ RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x"
+ << rtc::ToHex(hr) << ". No webcam exist?";
+ return 0;
+ }
+
+ _dsMonikerDevEnum->Reset();
+ ULONG cFetched;
+ IMoniker* pM;
+ int index = 0;
+ while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched)) {
+ IPropertyBag* pBag;
+ hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void**)&pBag);
+ if (S_OK == hr) {
+ // Find the description or friendly name.
+ VARIANT varName;
+ VariantInit(&varName);
+ hr = pBag->Read(L"Description", &varName, 0);
+ if (FAILED(hr)) {
+ hr = pBag->Read(L"FriendlyName", &varName, 0);
+ }
+ if (SUCCEEDED(hr)) {
+ // ignore all VFW drivers
+ if ((wcsstr(varName.bstrVal, (L"(VFW)")) == NULL) &&
+ (_wcsnicmp(varName.bstrVal, (L"Google Camera Adapter"), 21) != 0)) {
+ // Found a valid device.
+ if (index == static_cast<int>(deviceNumber)) {
+ int convResult = 0;
+ if (deviceNameLength > 0) {
+ convResult = WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1,
+ (char*)deviceNameUTF8,
+ deviceNameLength, NULL, NULL);
+ if (convResult == 0) {
+ RTC_LOG(LS_INFO) << "Failed to convert device name to UTF8, "
+ "error = "
+ << GetLastError();
+ return -1;
+ }
+ }
+ if (deviceUniqueIdUTF8Length > 0) {
+ hr = pBag->Read(L"DevicePath", &varName, 0);
+ if (FAILED(hr)) {
+ strncpy_s((char*)deviceUniqueIdUTF8, deviceUniqueIdUTF8Length,
+ (char*)deviceNameUTF8, convResult);
+ RTC_LOG(LS_INFO) << "Failed to get "
+ "deviceUniqueIdUTF8 using "
+ "deviceNameUTF8";
+ } else {
+ convResult = WideCharToMultiByte(
+ CP_UTF8, 0, varName.bstrVal, -1, (char*)deviceUniqueIdUTF8,
+ deviceUniqueIdUTF8Length, NULL, NULL);
+ if (convResult == 0) {
+ RTC_LOG(LS_INFO) << "Failed to convert device "
+ "name to UTF8, error = "
+ << GetLastError();
+ return -1;
+ }
+ if (productUniqueIdUTF8 && productUniqueIdUTF8Length > 0) {
+ GetProductId(deviceUniqueIdUTF8, productUniqueIdUTF8,
+ productUniqueIdUTF8Length);
+ }
+ }
+ }
+ }
+ ++index; // increase the number of valid devices
+ }
+ }
+ VariantClear(&varName);
+ pBag->Release();
+ pM->Release();
+ }
+ }
+ if (deviceNameLength) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << " " << deviceNameUTF8;
+ }
+ return index;
+}
+
+IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length) {
+ const int32_t deviceUniqueIdUTF8Length = (int32_t)strlen(
+ (char*)deviceUniqueIdUTF8); // UTF8 is also NULL terminated
+ if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) {
+ RTC_LOG(LS_INFO) << "Device name too long";
+ return NULL;
+ }
+
+ // enumerate all video capture devices
+ RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+ HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
+ &_dsMonikerDevEnum, 0);
+ if (hr != NOERROR) {
+ RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x"
+ << rtc::ToHex(hr) << ". No webcam exist?";
+ return 0;
+ }
+ _dsMonikerDevEnum->Reset();
+ ULONG cFetched;
+ IMoniker* pM;
+
+ IBaseFilter* captureFilter = NULL;
+ bool deviceFound = false;
+ while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched) && !deviceFound) {
+ IPropertyBag* pBag;
+ hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void**)&pBag);
+ if (S_OK == hr) {
+ // Find the description or friendly name.
+ VARIANT varName;
+ VariantInit(&varName);
+ if (deviceUniqueIdUTF8Length > 0) {
+ hr = pBag->Read(L"DevicePath", &varName, 0);
+ if (FAILED(hr)) {
+ hr = pBag->Read(L"Description", &varName, 0);
+ if (FAILED(hr)) {
+ hr = pBag->Read(L"FriendlyName", &varName, 0);
+ }
+ }
+ if (SUCCEEDED(hr)) {
+ char tempDevicePathUTF8[256];
+ tempDevicePathUTF8[0] = 0;
+ WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1,
+ tempDevicePathUTF8, sizeof(tempDevicePathUTF8),
+ NULL, NULL);
+ if (strncmp(tempDevicePathUTF8, (const char*)deviceUniqueIdUTF8,
+ deviceUniqueIdUTF8Length) == 0) {
+ // We have found the requested device
+ deviceFound = true;
+ hr =
+ pM->BindToObject(0, 0, IID_IBaseFilter, (void**)&captureFilter);
+ if
+ FAILED(hr) {
+ RTC_LOG(LS_ERROR) << "Failed to bind to the selected "
+ "capture device "
+ << hr;
+ }
+
+ if (productUniqueIdUTF8 &&
+ productUniqueIdUTF8Length > 0) // Get the device name
+ {
+ GetProductId(deviceUniqueIdUTF8, productUniqueIdUTF8,
+ productUniqueIdUTF8Length);
+ }
+ }
+ }
+ }
+ VariantClear(&varName);
+ pBag->Release();
+ }
+ pM->Release();
+ }
+ return captureFilter;
+}
+
+int32_t DeviceInfoDS::GetWindowsCapability(
+ const int32_t capabilityIndex,
+ VideoCaptureCapabilityWindows& windowsCapability) {
+ MutexLock lock(&_apiLock);
+
+ if (capabilityIndex < 0 || static_cast<size_t>(capabilityIndex) >=
+ _captureCapabilitiesWindows.size()) {
+ return -1;
+ }
+
+ windowsCapability = _captureCapabilitiesWindows[capabilityIndex];
+ return 0;
+}
+
+int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
+
+{
+ // Reset old capability list
+ _captureCapabilities.clear();
+
+ const int32_t deviceUniqueIdUTF8Length =
+ (int32_t)strlen((char*)deviceUniqueIdUTF8);
+ if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) {
+ RTC_LOG(LS_INFO) << "Device name too long";
+ return -1;
+ }
+ RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device "
+ << deviceUniqueIdUTF8;
+
+ char productId[kVideoCaptureProductIdLength];
+ IBaseFilter* captureDevice = DeviceInfoDS::GetDeviceFilter(
+ deviceUniqueIdUTF8, productId, kVideoCaptureProductIdLength);
+ if (!captureDevice)
+ return -1;
+ IPin* outputCapturePin = GetOutputPin(captureDevice, GUID_NULL);
+ if (!outputCapturePin) {
+ RTC_LOG(LS_INFO) << "Failed to get capture device output pin";
+ RELEASE_AND_CLEAR(captureDevice);
+ return -1;
+ }
+ IAMExtDevice* extDevice = NULL;
+ HRESULT hr =
+ captureDevice->QueryInterface(IID_IAMExtDevice, (void**)&extDevice);
+ if (SUCCEEDED(hr) && extDevice) {
+ RTC_LOG(LS_INFO) << "This is an external device";
+ extDevice->Release();
+ }
+
+ IAMStreamConfig* streamConfig = NULL;
+ hr = outputCapturePin->QueryInterface(IID_IAMStreamConfig,
+ (void**)&streamConfig);
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to get IID_IAMStreamConfig interface "
+ "from capture device";
+ return -1;
+ }
+
+ // this gets the FPS
+ IAMVideoControl* videoControlConfig = NULL;
+ HRESULT hrVC = captureDevice->QueryInterface(IID_IAMVideoControl,
+ (void**)&videoControlConfig);
+ if (FAILED(hrVC)) {
+ RTC_LOG(LS_INFO) << "IID_IAMVideoControl Interface NOT SUPPORTED";
+ }
+
+ AM_MEDIA_TYPE* pmt = NULL;
+ VIDEO_STREAM_CONFIG_CAPS caps;
+ int count, size;
+
+ hr = streamConfig->GetNumberOfCapabilities(&count, &size);
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to GetNumberOfCapabilities";
+ RELEASE_AND_CLEAR(videoControlConfig);
+ RELEASE_AND_CLEAR(streamConfig);
+ RELEASE_AND_CLEAR(outputCapturePin);
+ RELEASE_AND_CLEAR(captureDevice);
+ return -1;
+ }
+
+ // Check if the device support formattype == FORMAT_VideoInfo2 and
+ // FORMAT_VideoInfo. Prefer FORMAT_VideoInfo since some cameras (ZureCam) has
+ // been seen having problem with MJPEG and FORMAT_VideoInfo2 Interlace flag is
+ // only supported in FORMAT_VideoInfo2
+ bool supportFORMAT_VideoInfo2 = false;
+ bool supportFORMAT_VideoInfo = false;
+ bool foundInterlacedFormat = false;
+ GUID preferedVideoFormat = FORMAT_VideoInfo;
+ for (int32_t tmp = 0; tmp < count; ++tmp) {
+ hr = streamConfig->GetStreamCaps(tmp, &pmt, reinterpret_cast<BYTE*>(&caps));
+ if (hr == S_OK) {
+ if (pmt->majortype == MEDIATYPE_Video &&
+ pmt->formattype == FORMAT_VideoInfo2) {
+ RTC_LOG(LS_INFO) << "Device support FORMAT_VideoInfo2";
+ supportFORMAT_VideoInfo2 = true;
+ VIDEOINFOHEADER2* h =
+ reinterpret_cast<VIDEOINFOHEADER2*>(pmt->pbFormat);
+ RTC_DCHECK(h);
+ foundInterlacedFormat |=
+ h->dwInterlaceFlags &
+ (AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOnly);
+ }
+ if (pmt->majortype == MEDIATYPE_Video &&
+ pmt->formattype == FORMAT_VideoInfo) {
+ RTC_LOG(LS_INFO) << "Device support FORMAT_VideoInfo2";
+ supportFORMAT_VideoInfo = true;
+ }
+
+ FreeMediaType(pmt);
+ pmt = NULL;
+ }
+ }
+ if (supportFORMAT_VideoInfo2) {
+ if (supportFORMAT_VideoInfo && !foundInterlacedFormat) {
+ preferedVideoFormat = FORMAT_VideoInfo;
+ } else {
+ preferedVideoFormat = FORMAT_VideoInfo2;
+ }
+ }
+
+ for (int32_t tmp = 0; tmp < count; ++tmp) {
+ hr = streamConfig->GetStreamCaps(tmp, &pmt, reinterpret_cast<BYTE*>(&caps));
+ if (hr != S_OK) {
+ RTC_LOG(LS_INFO) << "Failed to GetStreamCaps";
+ RELEASE_AND_CLEAR(videoControlConfig);
+ RELEASE_AND_CLEAR(streamConfig);
+ RELEASE_AND_CLEAR(outputCapturePin);
+ RELEASE_AND_CLEAR(captureDevice);
+ return -1;
+ }
+
+ if (pmt->majortype == MEDIATYPE_Video &&
+ pmt->formattype == preferedVideoFormat) {
+ VideoCaptureCapabilityWindows capability;
+ int64_t avgTimePerFrame = 0;
+
+ if (pmt->formattype == FORMAT_VideoInfo) {
+ VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
+ RTC_DCHECK(h);
+ capability.directShowCapabilityIndex = tmp;
+ capability.width = h->bmiHeader.biWidth;
+ capability.height = h->bmiHeader.biHeight;
+ avgTimePerFrame = h->AvgTimePerFrame;
+ }
+ if (pmt->formattype == FORMAT_VideoInfo2) {
+ VIDEOINFOHEADER2* h =
+ reinterpret_cast<VIDEOINFOHEADER2*>(pmt->pbFormat);
+ RTC_DCHECK(h);
+ capability.directShowCapabilityIndex = tmp;
+ capability.width = h->bmiHeader.biWidth;
+ capability.height = h->bmiHeader.biHeight;
+ capability.interlaced =
+ h->dwInterlaceFlags &
+ (AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOnly);
+ avgTimePerFrame = h->AvgTimePerFrame;
+ }
+
+ if (hrVC == S_OK) {
+ LONGLONG* frameDurationList = NULL;
+ LONGLONG maxFPS = 0;
+ long listSize = 0;
+ SIZE size;
+ size.cx = capability.width;
+ size.cy = capability.height;
+
+ // GetMaxAvailableFrameRate doesn't return max frame rate always
+ // eg: Logitech Notebook. This may be due to a bug in that API
+ // because GetFrameRateList array is reversed in the above camera. So
+ // a util method written. Can't assume the first value will return
+ // the max fps.
+ hrVC = videoControlConfig->GetFrameRateList(
+ outputCapturePin, tmp, size, &listSize, &frameDurationList);
+
+ if (hrVC == S_OK) {
+ maxFPS = GetMaxOfFrameArray(frameDurationList, listSize);
+ }
+
+ CoTaskMemFree(frameDurationList);
+ frameDurationList = NULL;
+ listSize = 0;
+
+ // On some odd cameras, you may get a 0 for duration. Some others may
+ // not update the out vars. GetMaxOfFrameArray returns the lowest
+ // duration (highest FPS), or 0 if there was no list with elements.
+ if (0 != maxFPS) {
+ capability.maxFPS = static_cast<int>(10000000 / maxFPS);
+ capability.supportFrameRateControl = true;
+ } else // use existing method
+ {
+ RTC_LOG(LS_INFO) << "GetMaxAvailableFrameRate NOT SUPPORTED";
+ if (avgTimePerFrame > 0)
+ capability.maxFPS = static_cast<int>(10000000 / avgTimePerFrame);
+ else
+ capability.maxFPS = 0;
+ }
+ } else // use existing method in case IAMVideoControl is not supported
+ {
+ if (avgTimePerFrame > 0)
+ capability.maxFPS = static_cast<int>(10000000 / avgTimePerFrame);
+ else
+ capability.maxFPS = 0;
+ }
+
+ // can't switch MEDIATYPE :~(
+ if (pmt->subtype == MEDIASUBTYPE_I420) {
+ capability.videoType = VideoType::kI420;
+ } else if (pmt->subtype == MEDIASUBTYPE_IYUV) {
+ capability.videoType = VideoType::kIYUV;
+ } else if (pmt->subtype == MEDIASUBTYPE_RGB24) {
+ capability.videoType = VideoType::kRGB24;
+ } else if (pmt->subtype == MEDIASUBTYPE_YUY2) {
+ capability.videoType = VideoType::kYUY2;
+ } else if (pmt->subtype == MEDIASUBTYPE_RGB565) {
+ capability.videoType = VideoType::kRGB565;
+ } else if (pmt->subtype == MEDIASUBTYPE_MJPG) {
+ capability.videoType = VideoType::kMJPEG;
+ } else if (pmt->subtype == MEDIASUBTYPE_dvsl ||
+ pmt->subtype == MEDIASUBTYPE_dvsd ||
+ pmt->subtype ==
+ MEDIASUBTYPE_dvhd) // If this is an external DV camera
+ {
+ capability.videoType =
+ VideoType::kYUY2; // MS DV filter seems to create this type
+ } else if (pmt->subtype ==
+ MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
+ {
+ capability.videoType = VideoType::kUYVY;
+ } else if (pmt->subtype ==
+ MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses
+ // BT. 709 color. Not entiry correct to use
+ // UYVY. http://en.wikipedia.org/wiki/YCbCr
+ {
+ RTC_LOG(LS_INFO) << "Device support HDYC.";
+ capability.videoType = VideoType::kUYVY;
+ } else {
+ WCHAR strGuid[39];
+ StringFromGUID2(pmt->subtype, strGuid, 39);
+ RTC_LOG(LS_WARNING)
+ << "Device support unknown media type " << strGuid << ", width "
+ << capability.width << ", height " << capability.height;
+ continue;
+ }
+
+ _captureCapabilities.push_back(capability);
+ _captureCapabilitiesWindows.push_back(capability);
+ RTC_LOG(LS_INFO) << "Camera capability, width:" << capability.width
+ << " height:" << capability.height
+ << " type:" << static_cast<int>(capability.videoType)
+ << " fps:" << capability.maxFPS;
+ }
+ FreeMediaType(pmt);
+ pmt = NULL;
+ }
+ RELEASE_AND_CLEAR(streamConfig);
+ RELEASE_AND_CLEAR(videoControlConfig);
+ RELEASE_AND_CLEAR(outputCapturePin);
+ RELEASE_AND_CLEAR(captureDevice); // Release the capture device
+
+ // Store the new used device name
+ _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+ _lastUsedDeviceName =
+ (char*)realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1);
+ memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8,
+ _lastUsedDeviceNameLength + 1);
+ RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size();
+
+ return static_cast<int32_t>(_captureCapabilities.size());
+}
+
+// Constructs a product ID from the Windows DevicePath. on a USB device the
+// devicePath contains product id and vendor id. This seems to work for firewire
+// as well.
+// Example of device path:
+// "\\?\usb#vid_0408&pid_2010&mi_00#7&258e7aaf&0&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
+// "\\?\avc#sony&dv-vcr&camcorder&dv#65b2d50301460008#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
+void DeviceInfoDS::GetProductId(const char* devicePath,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length) {
+ *productUniqueIdUTF8 = '\0';
+ char* startPos = strstr((char*)devicePath, "\\\\?\\");
+ if (!startPos) {
+ strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+ RTC_LOG(LS_INFO) << "Failed to get the product Id";
+ return;
+ }
+ startPos += 4;
+
+ char* pos = strchr(startPos, '&');
+ if (!pos || pos >= (char*)devicePath + strlen((char*)devicePath)) {
+ strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+ RTC_LOG(LS_INFO) << "Failed to get the product Id";
+ return;
+ }
+ // Find the second occurrence.
+ pos = strchr(pos + 1, '&');
+ uint32_t bytesToCopy = (uint32_t)(pos - startPos);
+ if (pos && (bytesToCopy < productUniqueIdUTF8Length) &&
+ bytesToCopy <= kVideoCaptureProductIdLength) {
+ strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length,
+ (char*)startPos, bytesToCopy);
+ } else {
+ strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+ RTC_LOG(LS_INFO) << "Failed to get the product Id";
+ }
+}
+
+int32_t DeviceInfoDS::DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY) {
+ MutexLock lock(&_apiLock);
+ HWND window = (HWND)parentWindow;
+
+ IBaseFilter* filter = GetDeviceFilter(deviceUniqueIdUTF8, NULL, 0);
+ if (!filter)
+ return -1;
+
+ ISpecifyPropertyPages* pPages = NULL;
+ CAUUID uuid;
+ HRESULT hr = S_OK;
+
+ hr = filter->QueryInterface(IID_ISpecifyPropertyPages, (LPVOID*)&pPages);
+ if (!SUCCEEDED(hr)) {
+ filter->Release();
+ return -1;
+ }
+ hr = pPages->GetPages(&uuid);
+ if (!SUCCEEDED(hr)) {
+ filter->Release();
+ return -1;
+ }
+
+ WCHAR tempDialogTitleWide[256];
+ tempDialogTitleWide[0] = 0;
+ int size = 255;
+
+ // UTF-8 to wide char
+ MultiByteToWideChar(CP_UTF8, 0, (char*)dialogTitleUTF8, -1,
+ tempDialogTitleWide, size);
+
+ // Invoke a dialog box to display.
+
+ hr = OleCreatePropertyFrame(
+ window, // You must create the parent window.
+ positionX, // Horizontal position for the dialog box.
+ positionY, // Vertical position for the dialog box.
+ tempDialogTitleWide, // String used for the dialog box caption.
+ 1, // Number of pointers passed in pPlugin.
+ (LPUNKNOWN*)&filter, // Pointer to the filter.
+ uuid.cElems, // Number of property pages.
+ uuid.pElems, // Array of property page CLSIDs.
+ LOCALE_USER_DEFAULT, // Locale ID for the dialog box.
+ 0, NULL); // Reserved
+ // Release memory.
+ if (uuid.pElems) {
+ CoTaskMemFree(uuid.pElems);
+ }
+ filter->Release();
+ return 0;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.h b/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.h
new file mode 100644
index 0000000000..e6dfaed366
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_
+#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_
+
+#include <dshow.h>
+#include <Ks.h>
+#include <dbt.h>
+
+#include "modules/video_capture/device_info_impl.h"
+#include "modules/video_capture/video_capture.h"
+#include "modules/video_capture/video_capture_impl.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+struct VideoCaptureCapabilityWindows : public VideoCaptureCapability {
+ uint32_t directShowCapabilityIndex;
+ bool supportFrameRateControl;
+ VideoCaptureCapabilityWindows() {
+ directShowCapabilityIndex = 0;
+ supportFrameRateControl = false;
+ }
+};
+
+class DeviceInfoDS : public DeviceInfoImpl {
+ public:
+ // Factory function.
+ static DeviceInfoDS* Create();
+
+ DeviceInfoDS();
+ ~DeviceInfoDS() override;
+
+ int32_t Init() override;
+ uint32_t NumberOfDevices() override;
+
+ /*
+ * Returns the available capture devices.
+ */
+ int32_t GetDeviceName(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length,
+ pid_t* pid) override;
+
+ /*
+ * Display OS /capture device specific settings dialog
+ */
+ int32_t DisplayCaptureSettingsDialogBox(const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY) override;
+
+ // Windows specific
+
+ /* Gets a capture device filter
+ The user of this API is responsible for releasing the filter when it not
+ needed.
+ */
+ IBaseFilter* GetDeviceFilter(const char* deviceUniqueIdUTF8,
+ char* productUniqueIdUTF8 = NULL,
+ uint32_t productUniqueIdUTF8Length = 0);
+
+ int32_t GetWindowsCapability(
+ int32_t capabilityIndex,
+ VideoCaptureCapabilityWindows& windowsCapability);
+
+ static void GetProductId(const char* devicePath,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length);
+
+ protected:
+ int32_t GetDeviceInfo(uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8,
+ uint32_t productUniqueIdUTF8Length);
+
+ int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock);
+
+ private:
+ ICreateDevEnum* _dsDevEnum;
+ IEnumMoniker* _dsMonikerDevEnum;
+ bool _CoUninitializeIsRequired;
+ std::vector<VideoCaptureCapabilityWindows> _captureCapabilitiesWindows;
+ HWND _hwnd;
+ WNDCLASS _wndClass;
+ HINSTANCE _hInstance;
+ HDEVNOTIFY _hdevnotify;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_
diff --git a/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc b/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc
new file mode 100644
index 0000000000..47fecfe4a1
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <initguid.h> // Must come before the help_functions_ds.h include so
+ // that DEFINE_GUID() entries will be defined in this
+ // object file.
+
+#include <cguid.h>
+
+#include "modules/video_capture/windows/help_functions_ds.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+// This returns minimum :), which will give max frame rate...
+LONGLONG GetMaxOfFrameArray(LONGLONG* maxFps, long size) {
+ if (!maxFps || size <= 0) {
+ return 0;
+ }
+ LONGLONG maxFPS = maxFps[0];
+ for (int i = 0; i < size; i++) {
+ if (maxFPS > maxFps[i])
+ maxFPS = maxFps[i];
+ }
+ return maxFPS;
+}
+
+IPin* GetInputPin(IBaseFilter* filter) {
+ IPin* pin = NULL;
+ IEnumPins* pPinEnum = NULL;
+ filter->EnumPins(&pPinEnum);
+ if (pPinEnum == NULL) {
+ return NULL;
+ }
+
+ // get first unconnected pin
+ pPinEnum->Reset(); // set to first pin
+
+ while (S_OK == pPinEnum->Next(1, &pin, NULL)) {
+ PIN_DIRECTION pPinDir;
+ pin->QueryDirection(&pPinDir);
+ if (PINDIR_INPUT == pPinDir) // This is an input pin
+ {
+ IPin* tempPin = NULL;
+ if (S_OK != pin->ConnectedTo(&tempPin)) // The pint is not connected
+ {
+ pPinEnum->Release();
+ return pin;
+ }
+ }
+ pin->Release();
+ }
+ pPinEnum->Release();
+ return NULL;
+}
+
+IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category) {
+ IPin* pin = NULL;
+ IEnumPins* pPinEnum = NULL;
+ filter->EnumPins(&pPinEnum);
+ if (pPinEnum == NULL) {
+ return NULL;
+ }
+ // get first unconnected pin
+ pPinEnum->Reset(); // set to first pin
+ while (S_OK == pPinEnum->Next(1, &pin, NULL)) {
+ PIN_DIRECTION pPinDir;
+ pin->QueryDirection(&pPinDir);
+ if (PINDIR_OUTPUT == pPinDir) // This is an output pin
+ {
+ if (Category == GUID_NULL || PinMatchesCategory(pin, Category)) {
+ pPinEnum->Release();
+ return pin;
+ }
+ }
+ pin->Release();
+ pin = NULL;
+ }
+ pPinEnum->Release();
+ return NULL;
+}
+
+BOOL PinMatchesCategory(IPin* pPin, REFGUID Category) {
+ BOOL bFound = FALSE;
+ IKsPropertySet* pKs = NULL;
+ HRESULT hr = pPin->QueryInterface(IID_PPV_ARGS(&pKs));
+ if (SUCCEEDED(hr)) {
+ GUID PinCategory;
+ DWORD cbReturned;
+ hr = pKs->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
+ &PinCategory, sizeof(GUID), &cbReturned);
+ if (SUCCEEDED(hr) && (cbReturned == sizeof(GUID))) {
+ bFound = (PinCategory == Category);
+ }
+ pKs->Release();
+ }
+ return bFound;
+}
+
+void ResetMediaType(AM_MEDIA_TYPE* media_type) {
+ if (!media_type)
+ return;
+ if (media_type->cbFormat != 0) {
+ CoTaskMemFree(media_type->pbFormat);
+ media_type->cbFormat = 0;
+ media_type->pbFormat = nullptr;
+ }
+ if (media_type->pUnk) {
+ media_type->pUnk->Release();
+ media_type->pUnk = nullptr;
+ }
+}
+
+void FreeMediaType(AM_MEDIA_TYPE* media_type) {
+ if (!media_type)
+ return;
+ ResetMediaType(media_type);
+ CoTaskMemFree(media_type);
+}
+
+HRESULT CopyMediaType(AM_MEDIA_TYPE* target, const AM_MEDIA_TYPE* source) {
+ RTC_DCHECK_NE(source, target);
+ *target = *source;
+ if (source->cbFormat != 0) {
+ RTC_DCHECK(source->pbFormat);
+ target->pbFormat =
+ reinterpret_cast<BYTE*>(CoTaskMemAlloc(source->cbFormat));
+ if (target->pbFormat == nullptr) {
+ target->cbFormat = 0;
+ return E_OUTOFMEMORY;
+ } else {
+ CopyMemory(target->pbFormat, source->pbFormat, target->cbFormat);
+ }
+ }
+
+ if (target->pUnk != nullptr)
+ target->pUnk->AddRef();
+
+ return S_OK;
+}
+
+wchar_t* DuplicateWideString(const wchar_t* str) {
+ size_t len = lstrlenW(str);
+ wchar_t* ret =
+ reinterpret_cast<LPWSTR>(CoTaskMemAlloc((len + 1) * sizeof(wchar_t)));
+ lstrcpyW(ret, str);
+ return ret;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.h b/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.h
new file mode 100644
index 0000000000..29479157a8
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_
+#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_
+
+#include <dshow.h>
+
+#include <type_traits>
+#include <utility>
+
+#include "api/scoped_refptr.h"
+#include "rtc_base/ref_counter.h"
+
+DEFINE_GUID(MEDIASUBTYPE_I420,
+ 0x30323449,
+ 0x0000,
+ 0x0010,
+ 0x80,
+ 0x00,
+ 0x00,
+ 0xAA,
+ 0x00,
+ 0x38,
+ 0x9B,
+ 0x71);
+DEFINE_GUID(MEDIASUBTYPE_HDYC,
+ 0x43594448,
+ 0x0000,
+ 0x0010,
+ 0x80,
+ 0x00,
+ 0x00,
+ 0xAA,
+ 0x00,
+ 0x38,
+ 0x9B,
+ 0x71);
+
+#define RELEASE_AND_CLEAR(p) \
+ if (p) { \
+ (p)->Release(); \
+ (p) = NULL; \
+ }
+
+namespace webrtc {
+namespace videocapturemodule {
+LONGLONG GetMaxOfFrameArray(LONGLONG* maxFps, long size);
+
+IPin* GetInputPin(IBaseFilter* filter);
+IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category);
+BOOL PinMatchesCategory(IPin* pPin, REFGUID Category);
+void ResetMediaType(AM_MEDIA_TYPE* media_type);
+void FreeMediaType(AM_MEDIA_TYPE* media_type);
+HRESULT CopyMediaType(AM_MEDIA_TYPE* target, const AM_MEDIA_TYPE* source);
+
+// Helper function to make using scoped_refptr with COM interface pointers
+// a little less awkward. rtc::scoped_refptr doesn't support the & operator
+// or a way to receive values via an out ptr.
+// The function is intentionally not called QueryInterface to make things less
+// confusing for the compiler to figure out what the caller wants to do when
+// called from within the context of a class that also implements COM
+// interfaces.
+template <class T>
+HRESULT GetComInterface(IUnknown* object, rtc::scoped_refptr<T>* ptr) {
+ // This helper function is not meant to magically free ptr. If we do that
+ // we add code bloat to most places where it's not needed and make the code
+ // less readable since it's not clear at the call site that the pointer
+ // would get freed even inf QI() fails.
+ RTC_DCHECK(!ptr->get());
+ void* new_ptr = nullptr;
+ HRESULT hr = object->QueryInterface(__uuidof(T), &new_ptr);
+ if (SUCCEEDED(hr))
+ ptr->swap(reinterpret_cast<T**>(&new_ptr));
+ return hr;
+}
+
+// Provides a reference count implementation for COM (IUnknown derived) classes.
+// The implementation uses atomics for managing the ref count.
+template <class T>
+class ComRefCount : public T {
+ public:
+ ComRefCount() {}
+
+ template <class P0>
+ explicit ComRefCount(P0&& p0) : T(std::forward<P0>(p0)) {}
+
+ STDMETHOD_(ULONG, AddRef)() override {
+ ref_count_.IncRef();
+ return 1;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ const auto status = ref_count_.DecRef();
+ if (status == rtc::RefCountReleaseStatus::kDroppedLastRef) {
+ delete this;
+ return 0;
+ }
+ return 1;
+ }
+
+ protected:
+ ~ComRefCount() {}
+
+ private:
+ webrtc::webrtc_impl::RefCounter ref_count_{0};
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_
diff --git a/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc b/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc
new file mode 100644
index 0000000000..0c5acb668d
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc
@@ -0,0 +1,959 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/windows/sink_filter_ds.h"
+
+#include <dvdmedia.h> // VIDEOINFOHEADER2
+#include <initguid.h>
+
+#include <algorithm>
+#include <list>
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/string_utils.h"
+
+DEFINE_GUID(CLSID_SINKFILTER,
+ 0x88cdbbdc,
+ 0xa73b,
+ 0x4afa,
+ 0xac,
+ 0xbf,
+ 0x15,
+ 0xd5,
+ 0xe2,
+ 0xce,
+ 0x12,
+ 0xc3);
+
+namespace webrtc {
+namespace videocapturemodule {
+namespace {
+
+// Simple enumeration implementation that enumerates over a single pin :-/
+class EnumPins : public IEnumPins {
+ public:
+ EnumPins(IPin* pin) : pin_(pin) {}
+
+ protected:
+ virtual ~EnumPins() {}
+
+ private:
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override {
+ if (riid == IID_IUnknown || riid == IID_IEnumPins) {
+ *ppv = static_cast<IEnumPins*>(this);
+ AddRef();
+ return S_OK;
+ }
+ return E_NOINTERFACE;
+ }
+
+ STDMETHOD(Clone)(IEnumPins** pins) {
+ RTC_DCHECK_NOTREACHED();
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(Next)(ULONG count, IPin** pins, ULONG* fetched) {
+ RTC_DCHECK(count > 0);
+ RTC_DCHECK(pins);
+ // fetched may be NULL.
+
+ if (pos_ > 0) {
+ if (fetched)
+ *fetched = 0;
+ return S_FALSE;
+ }
+
+ ++pos_;
+ pins[0] = pin_.get();
+ pins[0]->AddRef();
+ if (fetched)
+ *fetched = 1;
+
+ return count == 1 ? S_OK : S_FALSE;
+ }
+
+ STDMETHOD(Skip)(ULONG count) {
+ RTC_DCHECK_NOTREACHED();
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(Reset)() {
+ pos_ = 0;
+ return S_OK;
+ }
+
+ rtc::scoped_refptr<IPin> pin_;
+ int pos_ = 0;
+};
+
+bool IsMediaTypePartialMatch(const AM_MEDIA_TYPE& a, const AM_MEDIA_TYPE& b) {
+ if (b.majortype != GUID_NULL && a.majortype != b.majortype)
+ return false;
+
+ if (b.subtype != GUID_NULL && a.subtype != b.subtype)
+ return false;
+
+ if (b.formattype != GUID_NULL) {
+ // if the format block is specified then it must match exactly
+ if (a.formattype != b.formattype)
+ return false;
+
+ if (a.cbFormat != b.cbFormat)
+ return false;
+
+ if (a.cbFormat != 0 && memcmp(a.pbFormat, b.pbFormat, a.cbFormat) != 0)
+ return false;
+ }
+
+ return true;
+}
+
+bool IsMediaTypeFullySpecified(const AM_MEDIA_TYPE& type) {
+ return type.majortype != GUID_NULL && type.formattype != GUID_NULL;
+}
+
+BYTE* AllocMediaTypeFormatBuffer(AM_MEDIA_TYPE* media_type, ULONG length) {
+ RTC_DCHECK(length);
+ if (media_type->cbFormat == length)
+ return media_type->pbFormat;
+
+ BYTE* buffer = static_cast<BYTE*>(CoTaskMemAlloc(length));
+ if (!buffer)
+ return nullptr;
+
+ if (media_type->pbFormat) {
+ RTC_DCHECK(media_type->cbFormat);
+ CoTaskMemFree(media_type->pbFormat);
+ media_type->pbFormat = nullptr;
+ }
+
+ media_type->cbFormat = length;
+ media_type->pbFormat = buffer;
+ return buffer;
+}
+
+void GetSampleProperties(IMediaSample* sample, AM_SAMPLE2_PROPERTIES* props) {
+ rtc::scoped_refptr<IMediaSample2> sample2;
+ if (SUCCEEDED(GetComInterface(sample, &sample2))) {
+ sample2->GetProperties(sizeof(*props), reinterpret_cast<BYTE*>(props));
+ return;
+ }
+
+ // Get the properties the hard way.
+ props->cbData = sizeof(*props);
+ props->dwTypeSpecificFlags = 0;
+ props->dwStreamId = AM_STREAM_MEDIA;
+ props->dwSampleFlags = 0;
+
+ if (sample->IsDiscontinuity() == S_OK)
+ props->dwSampleFlags |= AM_SAMPLE_DATADISCONTINUITY;
+
+ if (sample->IsPreroll() == S_OK)
+ props->dwSampleFlags |= AM_SAMPLE_PREROLL;
+
+ if (sample->IsSyncPoint() == S_OK)
+ props->dwSampleFlags |= AM_SAMPLE_SPLICEPOINT;
+
+ if (SUCCEEDED(sample->GetTime(&props->tStart, &props->tStop)))
+ props->dwSampleFlags |= AM_SAMPLE_TIMEVALID | AM_SAMPLE_STOPVALID;
+
+ if (sample->GetMediaType(&props->pMediaType) == S_OK)
+ props->dwSampleFlags |= AM_SAMPLE_TYPECHANGED;
+
+ sample->GetPointer(&props->pbBuffer);
+ props->lActual = sample->GetActualDataLength();
+ props->cbBuffer = sample->GetSize();
+}
+
+// Returns true if the media type is supported, false otherwise.
+// For supported types, the `capability` will be populated accordingly.
+bool TranslateMediaTypeToVideoCaptureCapability(
+ const AM_MEDIA_TYPE* media_type,
+ VideoCaptureCapability* capability) {
+ RTC_DCHECK(capability);
+ if (!media_type || media_type->majortype != MEDIATYPE_Video ||
+ !media_type->pbFormat) {
+ return false;
+ }
+
+ const BITMAPINFOHEADER* bih = nullptr;
+ if (media_type->formattype == FORMAT_VideoInfo) {
+ bih = &reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat)->bmiHeader;
+ } else if (media_type->formattype != FORMAT_VideoInfo2) {
+ bih = &reinterpret_cast<VIDEOINFOHEADER2*>(media_type->pbFormat)->bmiHeader;
+ } else {
+ return false;
+ }
+
+ RTC_LOG(LS_INFO) << "TranslateMediaTypeToVideoCaptureCapability width:"
+ << bih->biWidth << " height:" << bih->biHeight
+ << " Compression:0x" << rtc::ToHex(bih->biCompression);
+
+ const GUID& sub_type = media_type->subtype;
+ if (sub_type == MEDIASUBTYPE_MJPG &&
+ bih->biCompression == MAKEFOURCC('M', 'J', 'P', 'G')) {
+ capability->videoType = VideoType::kMJPEG;
+ } else if (sub_type == MEDIASUBTYPE_I420 &&
+ bih->biCompression == MAKEFOURCC('I', '4', '2', '0')) {
+ capability->videoType = VideoType::kI420;
+ } else if (sub_type == MEDIASUBTYPE_YUY2 &&
+ bih->biCompression == MAKEFOURCC('Y', 'U', 'Y', '2')) {
+ capability->videoType = VideoType::kYUY2;
+ } else if (sub_type == MEDIASUBTYPE_UYVY &&
+ bih->biCompression == MAKEFOURCC('U', 'Y', 'V', 'Y')) {
+ capability->videoType = VideoType::kUYVY;
+ } else if (sub_type == MEDIASUBTYPE_HDYC) {
+ capability->videoType = VideoType::kUYVY;
+ } else if (sub_type == MEDIASUBTYPE_RGB24 && bih->biCompression == BI_RGB) {
+ capability->videoType = VideoType::kRGB24;
+ } else {
+ return false;
+ }
+
+ // Store the incoming width and height
+ capability->width = bih->biWidth;
+
+ // Store the incoming height,
+ // for RGB24 we assume the frame to be upside down
+ if (sub_type == MEDIASUBTYPE_RGB24 && bih->biHeight > 0) {
+ capability->height = -(bih->biHeight);
+ } else {
+ capability->height = abs(bih->biHeight);
+ }
+
+ return true;
+}
+
+class MediaTypesEnum : public IEnumMediaTypes {
+ public:
+ MediaTypesEnum(const VideoCaptureCapability& capability)
+ : capability_(capability),
+ format_preference_order_(
+ {// Default preferences, sorted by cost-to-convert-to-i420.
+ VideoType::kI420, VideoType::kYUY2, VideoType::kRGB24,
+ VideoType::kUYVY, VideoType::kMJPEG}) {
+ // Use the preferred video type, if supported.
+ auto it = std::find(format_preference_order_.begin(),
+ format_preference_order_.end(), capability_.videoType);
+ if (it != format_preference_order_.end()) {
+ RTC_LOG(LS_INFO) << "Selected video type: " << *it;
+ // Move it to the front of the list, if it isn't already there.
+ if (it != format_preference_order_.begin()) {
+ format_preference_order_.splice(format_preference_order_.begin(),
+ format_preference_order_, it,
+ std::next(it));
+ }
+ } else {
+ RTC_LOG(LS_WARNING) << "Unsupported video type: " << *it
+ << ", using default preference list.";
+ }
+ }
+
+ protected:
+ virtual ~MediaTypesEnum() {}
+
+ private:
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override {
+ if (riid == IID_IUnknown || riid == IID_IEnumMediaTypes) {
+ *ppv = static_cast<IEnumMediaTypes*>(this);
+ AddRef();
+ return S_OK;
+ }
+ return E_NOINTERFACE;
+ }
+
+ // IEnumMediaTypes
+ STDMETHOD(Clone)(IEnumMediaTypes** pins) {
+ RTC_DCHECK_NOTREACHED();
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(Next)(ULONG count, AM_MEDIA_TYPE** types, ULONG* fetched) {
+ RTC_DCHECK(count > 0);
+ RTC_DCHECK(types);
+ // fetched may be NULL.
+ if (fetched)
+ *fetched = 0;
+
+ for (ULONG i = 0;
+ i < count && pos_ < static_cast<int>(format_preference_order_.size());
+ ++i) {
+ AM_MEDIA_TYPE* media_type = reinterpret_cast<AM_MEDIA_TYPE*>(
+ CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE)));
+ ZeroMemory(media_type, sizeof(*media_type));
+ types[i] = media_type;
+ VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(
+ AllocMediaTypeFormatBuffer(media_type, sizeof(VIDEOINFOHEADER)));
+ ZeroMemory(vih, sizeof(*vih));
+ vih->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ vih->bmiHeader.biPlanes = 1;
+ vih->bmiHeader.biClrImportant = 0;
+ vih->bmiHeader.biClrUsed = 0;
+ if (capability_.maxFPS != 0)
+ vih->AvgTimePerFrame = 10000000 / capability_.maxFPS;
+
+ SetRectEmpty(&vih->rcSource); // we want the whole image area rendered.
+ SetRectEmpty(&vih->rcTarget); // no particular destination rectangle
+
+ media_type->majortype = MEDIATYPE_Video;
+ media_type->formattype = FORMAT_VideoInfo;
+ media_type->bTemporalCompression = FALSE;
+
+ // Set format information.
+ auto format_it = std::next(format_preference_order_.begin(), pos_++);
+ SetMediaInfoFromVideoType(*format_it, &vih->bmiHeader, media_type);
+
+ vih->bmiHeader.biWidth = capability_.width;
+ vih->bmiHeader.biHeight = capability_.height;
+ vih->bmiHeader.biSizeImage = ((vih->bmiHeader.biBitCount / 4) *
+ capability_.height * capability_.width) /
+ 2;
+
+ RTC_DCHECK(vih->bmiHeader.biSizeImage);
+ media_type->lSampleSize = vih->bmiHeader.biSizeImage;
+ media_type->bFixedSizeSamples = true;
+ if (fetched)
+ ++(*fetched);
+ }
+ return pos_ == static_cast<int>(format_preference_order_.size()) ? S_FALSE
+ : S_OK;
+ }
+
+ static void SetMediaInfoFromVideoType(VideoType video_type,
+ BITMAPINFOHEADER* bitmap_header,
+ AM_MEDIA_TYPE* media_type) {
+ switch (video_type) {
+ case VideoType::kI420:
+ bitmap_header->biCompression = MAKEFOURCC('I', '4', '2', '0');
+ bitmap_header->biBitCount = 12; // bit per pixel
+ media_type->subtype = MEDIASUBTYPE_I420;
+ break;
+ case VideoType::kYUY2:
+ bitmap_header->biCompression = MAKEFOURCC('Y', 'U', 'Y', '2');
+ bitmap_header->biBitCount = 16; // bit per pixel
+ media_type->subtype = MEDIASUBTYPE_YUY2;
+ break;
+ case VideoType::kRGB24:
+ bitmap_header->biCompression = BI_RGB;
+ bitmap_header->biBitCount = 24; // bit per pixel
+ media_type->subtype = MEDIASUBTYPE_RGB24;
+ break;
+ case VideoType::kUYVY:
+ bitmap_header->biCompression = MAKEFOURCC('U', 'Y', 'V', 'Y');
+ bitmap_header->biBitCount = 16; // bit per pixel
+ media_type->subtype = MEDIASUBTYPE_UYVY;
+ break;
+ case VideoType::kMJPEG:
+ bitmap_header->biCompression = MAKEFOURCC('M', 'J', 'P', 'G');
+ bitmap_header->biBitCount = 12; // bit per pixel
+ media_type->subtype = MEDIASUBTYPE_MJPG;
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ }
+
+ STDMETHOD(Skip)(ULONG count) {
+ RTC_DCHECK_NOTREACHED();
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(Reset)() {
+ pos_ = 0;
+ return S_OK;
+ }
+
+ int pos_ = 0;
+ const VideoCaptureCapability capability_;
+ std::list<VideoType> format_preference_order_;
+};
+
+} // namespace
+
+CaptureInputPin::CaptureInputPin(CaptureSinkFilter* filter) {
+ capture_checker_.Detach();
+ // No reference held to avoid circular references.
+ info_.pFilter = filter;
+ info_.dir = PINDIR_INPUT;
+}
+
+CaptureInputPin::~CaptureInputPin() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ ResetMediaType(&media_type_);
+}
+
+HRESULT CaptureInputPin::SetRequestedCapability(
+ const VideoCaptureCapability& capability) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ RTC_DCHECK(Filter()->IsStopped());
+ requested_capability_ = capability;
+ resulting_capability_ = VideoCaptureCapability();
+ return S_OK;
+}
+
+void CaptureInputPin::OnFilterActivated() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ runtime_error_ = false;
+ flushing_ = false;
+ capture_checker_.Detach();
+ capture_thread_id_ = 0;
+}
+
+void CaptureInputPin::OnFilterDeactivated() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ // Expedite shutdown by raising the flushing flag so no further processing
+ // on the capture thread occurs. When the graph is stopped and all filters
+ // have been told to stop, the media controller (graph) will wait for the
+ // capture thread to stop.
+ flushing_ = true;
+ if (allocator_)
+ allocator_->Decommit();
+}
+
+CaptureSinkFilter* CaptureInputPin::Filter() const {
+ return static_cast<CaptureSinkFilter*>(info_.pFilter);
+}
+
+HRESULT CaptureInputPin::AttemptConnection(IPin* receive_pin,
+ const AM_MEDIA_TYPE* media_type) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ RTC_DCHECK(Filter()->IsStopped());
+
+ // Check that the connection is valid -- need to do this for every
+ // connect attempt since BreakConnect will undo it.
+ HRESULT hr = CheckDirection(receive_pin);
+ if (FAILED(hr))
+ return hr;
+
+ if (!TranslateMediaTypeToVideoCaptureCapability(media_type,
+ &resulting_capability_)) {
+ ClearAllocator(true);
+ return VFW_E_TYPE_NOT_ACCEPTED;
+ }
+
+ // See if the other pin will accept this type.
+ hr = receive_pin->ReceiveConnection(static_cast<IPin*>(this), media_type);
+ if (FAILED(hr)) {
+ receive_pin_ = nullptr; // Should already be null, but just in case.
+ return hr;
+ }
+
+ // Should have been set as part of the connect process.
+ RTC_DCHECK_EQ(receive_pin_, receive_pin);
+
+ ResetMediaType(&media_type_);
+ CopyMediaType(&media_type_, media_type);
+
+ return S_OK;
+}
+
+std::vector<AM_MEDIA_TYPE*> CaptureInputPin::DetermineCandidateFormats(
+ IPin* receive_pin,
+ const AM_MEDIA_TYPE* media_type) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ RTC_DCHECK(receive_pin);
+ RTC_DCHECK(media_type);
+
+ std::vector<AM_MEDIA_TYPE*> ret;
+
+ for (int i = 0; i < 2; i++) {
+ IEnumMediaTypes* types = nullptr;
+ if (i == 0) {
+ // First time around, try types from receive_pin.
+ receive_pin->EnumMediaTypes(&types);
+ } else {
+ // Then try ours.
+ EnumMediaTypes(&types);
+ }
+
+ if (types) {
+ while (true) {
+ ULONG fetched = 0;
+ AM_MEDIA_TYPE* this_type = nullptr;
+ if (types->Next(1, &this_type, &fetched) != S_OK)
+ break;
+
+ if (IsMediaTypePartialMatch(*this_type, *media_type)) {
+ ret.push_back(this_type);
+ } else {
+ FreeMediaType(this_type);
+ }
+ }
+ types->Release();
+ }
+ }
+
+ return ret;
+}
+
+void CaptureInputPin::ClearAllocator(bool decommit) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ if (!allocator_)
+ return;
+ if (decommit)
+ allocator_->Decommit();
+ allocator_ = nullptr;
+}
+
+HRESULT CaptureInputPin::CheckDirection(IPin* pin) const {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ PIN_DIRECTION pd;
+ pin->QueryDirection(&pd);
+ // Fairly basic check, make sure we don't pair input with input etc.
+ return pd == info_.dir ? VFW_E_INVALID_DIRECTION : S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::QueryInterface(REFIID riid,
+ void** ppv) {
+ (*ppv) = nullptr;
+ if (riid == IID_IUnknown || riid == IID_IMemInputPin) {
+ *ppv = static_cast<IMemInputPin*>(this);
+ } else if (riid == IID_IPin) {
+ *ppv = static_cast<IPin*>(this);
+ }
+
+ if (!(*ppv))
+ return E_NOINTERFACE;
+
+ static_cast<IMemInputPin*>(this)->AddRef();
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::Connect(IPin* receive_pin, const AM_MEDIA_TYPE* media_type) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ if (!media_type || !receive_pin)
+ return E_POINTER;
+
+ if (!Filter()->IsStopped())
+ return VFW_E_NOT_STOPPED;
+
+ if (receive_pin_) {
+ RTC_DCHECK_NOTREACHED();
+ return VFW_E_ALREADY_CONNECTED;
+ }
+
+ if (IsMediaTypeFullySpecified(*media_type))
+ return AttemptConnection(receive_pin, media_type);
+
+ auto types = DetermineCandidateFormats(receive_pin, media_type);
+ bool connected = false;
+ for (auto* type : types) {
+ if (!connected && AttemptConnection(receive_pin, media_type) == S_OK)
+ connected = true;
+
+ FreeMediaType(type);
+ }
+
+ return connected ? S_OK : VFW_E_NO_ACCEPTABLE_TYPES;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::ReceiveConnection(IPin* connector,
+ const AM_MEDIA_TYPE* media_type) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ RTC_DCHECK(Filter()->IsStopped());
+
+ if (receive_pin_) {
+ RTC_DCHECK_NOTREACHED();
+ return VFW_E_ALREADY_CONNECTED;
+ }
+
+ HRESULT hr = CheckDirection(connector);
+ if (FAILED(hr))
+ return hr;
+
+ if (!TranslateMediaTypeToVideoCaptureCapability(media_type,
+ &resulting_capability_))
+ return VFW_E_TYPE_NOT_ACCEPTED;
+
+ // Complete the connection
+
+ receive_pin_ = connector;
+ ResetMediaType(&media_type_);
+ CopyMediaType(&media_type_, media_type);
+
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::Disconnect() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ if (!Filter()->IsStopped())
+ return VFW_E_NOT_STOPPED;
+
+ if (!receive_pin_)
+ return S_FALSE;
+
+ ClearAllocator(true);
+ receive_pin_ = nullptr;
+
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::ConnectedTo(IPin** pin) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+
+ if (!receive_pin_)
+ return VFW_E_NOT_CONNECTED;
+
+ *pin = receive_pin_.get();
+ receive_pin_->AddRef();
+
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::ConnectionMediaType(AM_MEDIA_TYPE* media_type) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+
+ if (!receive_pin_)
+ return VFW_E_NOT_CONNECTED;
+
+ CopyMediaType(media_type, &media_type_);
+
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::QueryPinInfo(PIN_INFO* info) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ *info = info_;
+ if (info_.pFilter)
+ info_.pFilter->AddRef();
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::QueryDirection(PIN_DIRECTION* pin_dir) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ *pin_dir = info_.dir;
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::QueryId(LPWSTR* id) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ size_t len = lstrlenW(info_.achName);
+ *id = reinterpret_cast<LPWSTR>(CoTaskMemAlloc((len + 1) * sizeof(wchar_t)));
+ lstrcpyW(*id, info_.achName);
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::QueryAccept(const AM_MEDIA_TYPE* media_type) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ RTC_DCHECK(Filter()->IsStopped());
+ VideoCaptureCapability capability(resulting_capability_);
+ return TranslateMediaTypeToVideoCaptureCapability(media_type, &capability)
+ ? S_FALSE
+ : S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::EnumMediaTypes(IEnumMediaTypes** types) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ *types = new ComRefCount<MediaTypesEnum>(requested_capability_);
+ (*types)->AddRef();
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::QueryInternalConnections(IPin** pins, ULONG* count) {
+ return E_NOTIMPL;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::EndOfStream() {
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::BeginFlush() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ flushing_ = true;
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::EndFlush() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ flushing_ = false;
+ runtime_error_ = false;
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::NewSegment(REFERENCE_TIME start,
+ REFERENCE_TIME stop,
+ double rate) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::GetAllocator(IMemAllocator** allocator) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ if (allocator_ == nullptr) {
+ HRESULT hr = CoCreateInstance(CLSID_MemoryAllocator, 0,
+ CLSCTX_INPROC_SERVER, IID_IMemAllocator,
+ reinterpret_cast<void**>(allocator));
+ if (FAILED(hr))
+ return hr;
+ allocator_.swap(allocator);
+ }
+ *allocator = allocator_.get();
+ allocator_->AddRef();
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::NotifyAllocator(IMemAllocator* allocator, BOOL read_only) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ allocator_.swap(&allocator);
+ if (allocator_)
+ allocator_->AddRef();
+ if (allocator)
+ allocator->Release();
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::GetAllocatorRequirements(ALLOCATOR_PROPERTIES* props) {
+ return E_NOTIMPL;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::Receive(IMediaSample* media_sample) {
+ RTC_DCHECK_RUN_ON(&capture_checker_);
+
+ CaptureSinkFilter* const filter = static_cast<CaptureSinkFilter*>(Filter());
+
+ if (flushing_.load(std::memory_order_relaxed))
+ return S_FALSE;
+
+ if (runtime_error_.load(std::memory_order_relaxed))
+ return VFW_E_RUNTIME_ERROR;
+
+ if (!capture_thread_id_) {
+ // Make sure we set the thread name only once.
+ capture_thread_id_ = GetCurrentThreadId();
+ rtc::SetCurrentThreadName("webrtc_video_capture");
+ }
+
+ AM_SAMPLE2_PROPERTIES sample_props = {};
+ GetSampleProperties(media_sample, &sample_props);
+ // Has the format changed in this sample?
+ if (sample_props.dwSampleFlags & AM_SAMPLE_TYPECHANGED) {
+ // Check the derived class accepts the new format.
+ // This shouldn't fail as the source must call QueryAccept first.
+
+ // Note: This will modify resulting_capability_.
+ // That should be OK as long as resulting_capability_ is only modified
+ // on this thread while it is running (filter is not stopped), and only
+ // modified on the main thread when the filter is stopped (i.e. this thread
+ // is not running).
+ if (!TranslateMediaTypeToVideoCaptureCapability(sample_props.pMediaType,
+ &resulting_capability_)) {
+ // Raise a runtime error if we fail the media type
+ runtime_error_ = true;
+ EndOfStream();
+ Filter()->NotifyEvent(EC_ERRORABORT, VFW_E_TYPE_NOT_ACCEPTED, 0);
+ return VFW_E_INVALIDMEDIATYPE;
+ }
+ }
+
+ filter->ProcessCapturedFrame(sample_props.pbBuffer, sample_props.lActual,
+ resulting_capability_);
+
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureInputPin::ReceiveMultiple(IMediaSample** samples,
+ long count,
+ long* processed) {
+ HRESULT hr = S_OK;
+ *processed = 0;
+ while (count-- > 0) {
+ hr = Receive(samples[*processed]);
+ if (hr != S_OK)
+ break;
+ ++(*processed);
+ }
+ return hr;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::ReceiveCanBlock() {
+ return S_FALSE;
+}
+
+// ----------------------------------------------------------------------------
+
+CaptureSinkFilter::CaptureSinkFilter(VideoCaptureImpl* capture_observer)
+ : input_pin_(new ComRefCount<CaptureInputPin>(this)),
+ capture_observer_(capture_observer) {}
+
+CaptureSinkFilter::~CaptureSinkFilter() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+}
+
+HRESULT CaptureSinkFilter::SetRequestedCapability(
+ const VideoCaptureCapability& capability) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ // Called on the same thread as capture is started on.
+ return input_pin_->SetRequestedCapability(capability);
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::GetState(DWORD msecs, FILTER_STATE* state) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ *state = state_;
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::SetSyncSource(IReferenceClock* clock) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::GetSyncSource(IReferenceClock** clock) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ return E_NOTIMPL;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::Pause() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ state_ = State_Paused;
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::Run(REFERENCE_TIME tStart) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ if (state_ == State_Stopped)
+ Pause();
+
+ state_ = State_Running;
+ input_pin_->OnFilterActivated();
+
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::Stop() {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ if (state_ == State_Stopped)
+ return S_OK;
+
+ state_ = State_Stopped;
+ input_pin_->OnFilterDeactivated();
+
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::EnumPins(IEnumPins** pins) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ *pins = new ComRefCount<class EnumPins>(input_pin_.get());
+ (*pins)->AddRef();
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::FindPin(LPCWSTR id,
+ IPin** pin) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ // There's no ID assigned to our input pin, so looking it up based on one
+ // is pointless (and in practice, this method isn't being used).
+ return VFW_E_NOT_FOUND;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::QueryFilterInfo(FILTER_INFO* info) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ *info = info_;
+ if (info->pGraph)
+ info->pGraph->AddRef();
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::JoinFilterGraph(IFilterGraph* graph, LPCWSTR name) {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ RTC_DCHECK(IsStopped());
+
+ // Note, since a reference to the filter is held by the graph manager,
+ // filters must not hold a reference to the graph. If they would, we'd have
+ // a circular reference. Instead, a pointer to the graph can be held without
+ // reference. See documentation for IBaseFilter::JoinFilterGraph for more.
+ info_.pGraph = graph; // No AddRef().
+ sink_ = nullptr;
+
+ if (info_.pGraph) {
+ // make sure we don't hold on to the reference we may receive.
+ // Note that this assumes the same object identity, but so be it.
+ rtc::scoped_refptr<IMediaEventSink> sink;
+ GetComInterface(info_.pGraph, &sink);
+ sink_ = sink.get();
+ }
+
+ info_.achName[0] = L'\0';
+ if (name)
+ lstrcpynW(info_.achName, name, arraysize(info_.achName));
+
+ return S_OK;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::QueryVendorInfo(LPWSTR* vendor_info) {
+ return E_NOTIMPL;
+}
+
+void CaptureSinkFilter::ProcessCapturedFrame(
+ unsigned char* buffer,
+ size_t length,
+ const VideoCaptureCapability& frame_info) {
+ // Called on the capture thread.
+ capture_observer_->IncomingFrame(buffer, length, frame_info);
+}
+
+void CaptureSinkFilter::NotifyEvent(long code,
+ LONG_PTR param1,
+ LONG_PTR param2) {
+ // Called on the capture thread.
+ if (!sink_)
+ return;
+
+ if (EC_COMPLETE == code)
+ param2 = reinterpret_cast<LONG_PTR>(static_cast<IBaseFilter*>(this));
+ sink_->Notify(code, param1, param2);
+}
+
+bool CaptureSinkFilter::IsStopped() const {
+ RTC_DCHECK_RUN_ON(&main_checker_);
+ return state_ == State_Stopped;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP
+CaptureSinkFilter::QueryInterface(REFIID riid, void** ppv) {
+ if (riid == IID_IUnknown || riid == IID_IPersist || riid == IID_IBaseFilter) {
+ *ppv = static_cast<IBaseFilter*>(this);
+ AddRef();
+ return S_OK;
+ }
+ return E_NOINTERFACE;
+}
+
+COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::GetClassID(CLSID* clsid) {
+ *clsid = CLSID_SINKFILTER;
+ return S_OK;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.h b/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.h
new file mode 100644
index 0000000000..b0fabda3cd
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
+#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
+
+#include <dshow.h>
+
+#include <atomic>
+#include <memory>
+#include <vector>
+
+#include "api/sequence_checker.h"
+#include "modules/video_capture/video_capture_impl.h"
+#include "modules/video_capture/windows/help_functions_ds.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+// forward declarations
+class CaptureSinkFilter;
+
+// Input pin for camera input
+// Implements IMemInputPin, IPin.
+class CaptureInputPin : public IMemInputPin, public IPin {
+ public:
+ CaptureInputPin(CaptureSinkFilter* filter);
+
+ HRESULT SetRequestedCapability(const VideoCaptureCapability& capability);
+
+ // Notifications from the filter.
+ void OnFilterActivated();
+ void OnFilterDeactivated();
+
+ protected:
+ virtual ~CaptureInputPin();
+
+ private:
+ CaptureSinkFilter* Filter() const;
+
+ HRESULT AttemptConnection(IPin* receive_pin, const AM_MEDIA_TYPE* media_type);
+ std::vector<AM_MEDIA_TYPE*> DetermineCandidateFormats(
+ IPin* receive_pin,
+ const AM_MEDIA_TYPE* media_type);
+ void ClearAllocator(bool decommit);
+ HRESULT CheckDirection(IPin* pin) const;
+
+ // IUnknown
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override;
+
+ // clang-format off
+ // clang isn't sure what to do with the longer STDMETHOD() function
+ // declarations.
+
+ // IPin
+ STDMETHOD(Connect)(IPin* receive_pin,
+ const AM_MEDIA_TYPE* media_type) override;
+ STDMETHOD(ReceiveConnection)(IPin* connector,
+ const AM_MEDIA_TYPE* media_type) override;
+ STDMETHOD(Disconnect)() override;
+ STDMETHOD(ConnectedTo)(IPin** pin) override;
+ STDMETHOD(ConnectionMediaType)(AM_MEDIA_TYPE* media_type) override;
+ STDMETHOD(QueryPinInfo)(PIN_INFO* info) override;
+ STDMETHOD(QueryDirection)(PIN_DIRECTION* pin_dir) override;
+ STDMETHOD(QueryId)(LPWSTR* id) override;
+ STDMETHOD(QueryAccept)(const AM_MEDIA_TYPE* media_type) override;
+ STDMETHOD(EnumMediaTypes)(IEnumMediaTypes** types) override;
+ STDMETHOD(QueryInternalConnections)(IPin** pins, ULONG* count) override;
+ STDMETHOD(EndOfStream)() override;
+ STDMETHOD(BeginFlush)() override;
+ STDMETHOD(EndFlush)() override;
+ STDMETHOD(NewSegment)(REFERENCE_TIME start, REFERENCE_TIME stop,
+ double rate) override;
+
+ // IMemInputPin
+ STDMETHOD(GetAllocator)(IMemAllocator** allocator) override;
+ STDMETHOD(NotifyAllocator)(IMemAllocator* allocator, BOOL read_only) override;
+ STDMETHOD(GetAllocatorRequirements)(ALLOCATOR_PROPERTIES* props) override;
+ STDMETHOD(Receive)(IMediaSample* sample) override;
+ STDMETHOD(ReceiveMultiple)(IMediaSample** samples, long count,
+ long* processed) override;
+ STDMETHOD(ReceiveCanBlock)() override;
+ // clang-format on
+
+ SequenceChecker main_checker_;
+ SequenceChecker capture_checker_;
+
+ VideoCaptureCapability requested_capability_ RTC_GUARDED_BY(main_checker_);
+ // Accessed on the main thread when Filter()->IsStopped() (capture thread not
+ // running), otherwise accessed on the capture thread.
+ VideoCaptureCapability resulting_capability_;
+ DWORD capture_thread_id_ = 0;
+ rtc::scoped_refptr<IMemAllocator> allocator_ RTC_GUARDED_BY(main_checker_);
+ rtc::scoped_refptr<IPin> receive_pin_ RTC_GUARDED_BY(main_checker_);
+ std::atomic_bool flushing_{false};
+ std::atomic_bool runtime_error_{false};
+ // Holds a referenceless pointer to the owning filter, the name and
+ // direction of the pin. The filter pointer can be considered const.
+ PIN_INFO info_ = {};
+ AM_MEDIA_TYPE media_type_ RTC_GUARDED_BY(main_checker_) = {};
+};
+
+// Implement IBaseFilter (including IPersist and IMediaFilter).
+class CaptureSinkFilter : public IBaseFilter {
+ public:
+ CaptureSinkFilter(VideoCaptureImpl* capture_observer);
+
+ HRESULT SetRequestedCapability(const VideoCaptureCapability& capability);
+
+ // Called on the capture thread.
+ void ProcessCapturedFrame(unsigned char* buffer,
+ size_t length,
+ const VideoCaptureCapability& frame_info);
+
+ void NotifyEvent(long code, LONG_PTR param1, LONG_PTR param2);
+ bool IsStopped() const;
+
+ // IUnknown
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override;
+
+ // IPersist
+ STDMETHOD(GetClassID)(CLSID* clsid) override;
+
+ // IMediaFilter.
+ STDMETHOD(GetState)(DWORD msecs, FILTER_STATE* state) override;
+ STDMETHOD(SetSyncSource)(IReferenceClock* clock) override;
+ STDMETHOD(GetSyncSource)(IReferenceClock** clock) override;
+ STDMETHOD(Pause)() override;
+ STDMETHOD(Run)(REFERENCE_TIME start) override;
+ STDMETHOD(Stop)() override;
+
+ // IBaseFilter
+ STDMETHOD(EnumPins)(IEnumPins** pins) override;
+ STDMETHOD(FindPin)(LPCWSTR id, IPin** pin) override;
+ STDMETHOD(QueryFilterInfo)(FILTER_INFO* info) override;
+ STDMETHOD(JoinFilterGraph)(IFilterGraph* graph, LPCWSTR name) override;
+ STDMETHOD(QueryVendorInfo)(LPWSTR* vendor_info) override;
+
+ protected:
+ virtual ~CaptureSinkFilter();
+
+ private:
+ SequenceChecker main_checker_;
+ const rtc::scoped_refptr<ComRefCount<CaptureInputPin>> input_pin_;
+ VideoCaptureImpl* const capture_observer_;
+ FILTER_INFO info_ RTC_GUARDED_BY(main_checker_) = {};
+ // Set/cleared in JoinFilterGraph. The filter must be stopped (no capture)
+ // at that time, so no lock is required. While the state is not stopped,
+ // the sink will be used from the capture thread.
+ IMediaEventSink* sink_ = nullptr;
+ FILTER_STATE state_ RTC_GUARDED_BY(main_checker_) = State_Stopped;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
diff --git a/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc b/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc
new file mode 100644
index 0000000000..b13ac074f8
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc
@@ -0,0 +1,322 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/windows/video_capture_ds.h"
+
+#include <dvdmedia.h> // VIDEOINFOHEADER2
+
+#include "modules/video_capture/video_capture_config.h"
+#include "modules/video_capture/windows/help_functions_ds.h"
+#include "modules/video_capture/windows/sink_filter_ds.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+VideoCaptureDS::VideoCaptureDS()
+ : _captureFilter(NULL),
+ _graphBuilder(NULL),
+ _mediaControl(NULL),
+ _inputSendPin(NULL),
+ _outputCapturePin(NULL),
+ _dvFilter(NULL),
+ _inputDvPin(NULL),
+ _outputDvPin(NULL) {}
+
+VideoCaptureDS::~VideoCaptureDS() {
+ if (_mediaControl) {
+ _mediaControl->Stop();
+ }
+ if (_graphBuilder) {
+ if (sink_filter_)
+ _graphBuilder->RemoveFilter(sink_filter_.get());
+ if (_captureFilter)
+ _graphBuilder->RemoveFilter(_captureFilter);
+ if (_dvFilter)
+ _graphBuilder->RemoveFilter(_dvFilter);
+ }
+ RELEASE_AND_CLEAR(_inputSendPin);
+ RELEASE_AND_CLEAR(_outputCapturePin);
+
+ RELEASE_AND_CLEAR(_captureFilter); // release the capture device
+ RELEASE_AND_CLEAR(_dvFilter);
+
+ RELEASE_AND_CLEAR(_mediaControl);
+
+ RELEASE_AND_CLEAR(_inputDvPin);
+ RELEASE_AND_CLEAR(_outputDvPin);
+
+ RELEASE_AND_CLEAR(_graphBuilder);
+}
+
+int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) {
+ const int32_t nameLength = (int32_t)strlen((char*)deviceUniqueIdUTF8);
+ if (nameLength >= kVideoCaptureUniqueNameLength)
+ return -1;
+
+ // Store the device name
+ _deviceUniqueId = new (std::nothrow) char[nameLength + 1];
+ memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+ if (_dsInfo.Init() != 0)
+ return -1;
+
+ _captureFilter = _dsInfo.GetDeviceFilter(deviceUniqueIdUTF8);
+ if (!_captureFilter) {
+ RTC_LOG(LS_INFO) << "Failed to create capture filter.";
+ return -1;
+ }
+
+ // Get the interface for DirectShow's GraphBuilder
+ HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
+ IID_IGraphBuilder, (void**)&_graphBuilder);
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to create graph builder.";
+ return -1;
+ }
+
+ hr = _graphBuilder->QueryInterface(IID_IMediaControl, (void**)&_mediaControl);
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to create media control builder.";
+ return -1;
+ }
+ hr = _graphBuilder->AddFilter(_captureFilter, CAPTURE_FILTER_NAME);
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to add the capture device to the graph.";
+ return -1;
+ }
+
+ _outputCapturePin = GetOutputPin(_captureFilter, PIN_CATEGORY_CAPTURE);
+ if (!_outputCapturePin) {
+ RTC_LOG(LS_INFO) << "Failed to get output capture pin";
+ return -1;
+ }
+
+ // Create the sink filte used for receiving Captured frames.
+ sink_filter_ = new ComRefCount<CaptureSinkFilter>(this);
+
+ hr = _graphBuilder->AddFilter(sink_filter_.get(), SINK_FILTER_NAME);
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to add the send filter to the graph.";
+ return -1;
+ }
+
+ _inputSendPin = GetInputPin(sink_filter_.get());
+ if (!_inputSendPin) {
+ RTC_LOG(LS_INFO) << "Failed to get input send pin";
+ return -1;
+ }
+
+ if (SetCameraOutput(_requestedCapability) != 0) {
+ return -1;
+ }
+ RTC_LOG(LS_INFO) << "Capture device '" << deviceUniqueIdUTF8
+ << "' initialized.";
+ return 0;
+}
+
+int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) {
+ if (capability != _requestedCapability) {
+ DisconnectGraph();
+
+ if (SetCameraOutput(capability) != 0) {
+ return -1;
+ }
+ }
+ HRESULT hr = _mediaControl->Pause();
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO)
+ << "Failed to Pause the Capture device. Is it already occupied? " << hr;
+ return -1;
+ }
+ hr = _mediaControl->Run();
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to start the Capture device.";
+ return -1;
+ }
+ return 0;
+}
+
+int32_t VideoCaptureDS::StopCapture() {
+ HRESULT hr = _mediaControl->StopWhenReady();
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to stop the capture graph. " << hr;
+ return -1;
+ }
+ return 0;
+}
+
+bool VideoCaptureDS::CaptureStarted() {
+ OAFilterState state = 0;
+ HRESULT hr = _mediaControl->GetState(1000, &state);
+ if (hr != S_OK && hr != VFW_S_CANT_CUE) {
+ RTC_LOG(LS_INFO) << "Failed to get the CaptureStarted status";
+ }
+ RTC_LOG(LS_INFO) << "CaptureStarted " << state;
+ return state == State_Running;
+}
+
+int32_t VideoCaptureDS::CaptureSettings(VideoCaptureCapability& settings) {
+ settings = _requestedCapability;
+ return 0;
+}
+
+int32_t VideoCaptureDS::SetCameraOutput(
+ const VideoCaptureCapability& requestedCapability) {
+ // Get the best matching capability
+ VideoCaptureCapability capability;
+ int32_t capabilityIndex;
+
+ // Store the new requested size
+ _requestedCapability = requestedCapability;
+ // Match the requested capability with the supported.
+ if ((capabilityIndex = _dsInfo.GetBestMatchedCapability(
+ _deviceUniqueId, _requestedCapability, capability)) < 0) {
+ return -1;
+ }
+ // Reduce the frame rate if possible.
+ if (capability.maxFPS > requestedCapability.maxFPS) {
+ capability.maxFPS = requestedCapability.maxFPS;
+ } else if (capability.maxFPS <= 0) {
+ capability.maxFPS = 30;
+ }
+
+ // Convert it to the windows capability index since they are not nexessary
+ // the same
+ VideoCaptureCapabilityWindows windowsCapability;
+ if (_dsInfo.GetWindowsCapability(capabilityIndex, windowsCapability) != 0) {
+ return -1;
+ }
+
+ IAMStreamConfig* streamConfig = NULL;
+ AM_MEDIA_TYPE* pmt = NULL;
+ VIDEO_STREAM_CONFIG_CAPS caps;
+
+ HRESULT hr = _outputCapturePin->QueryInterface(IID_IAMStreamConfig,
+ (void**)&streamConfig);
+ if (hr) {
+ RTC_LOG(LS_INFO) << "Can't get the Capture format settings.";
+ return -1;
+ }
+
+ // Get the windows capability from the capture device
+ bool isDVCamera = false;
+ hr = streamConfig->GetStreamCaps(windowsCapability.directShowCapabilityIndex,
+ &pmt, reinterpret_cast<BYTE*>(&caps));
+ if (hr == S_OK) {
+ if (pmt->formattype == FORMAT_VideoInfo2) {
+ VIDEOINFOHEADER2* h = reinterpret_cast<VIDEOINFOHEADER2*>(pmt->pbFormat);
+ if (capability.maxFPS > 0 && windowsCapability.supportFrameRateControl) {
+ h->AvgTimePerFrame = REFERENCE_TIME(10000000.0 / capability.maxFPS);
+ }
+ } else {
+ VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
+ if (capability.maxFPS > 0 && windowsCapability.supportFrameRateControl) {
+ h->AvgTimePerFrame = REFERENCE_TIME(10000000.0 / capability.maxFPS);
+ }
+ }
+
+ // Set the sink filter to request this capability
+ sink_filter_->SetRequestedCapability(capability);
+ // Order the capture device to use this capability
+ hr += streamConfig->SetFormat(pmt);
+
+ // Check if this is a DV camera and we need to add MS DV Filter
+ if (pmt->subtype == MEDIASUBTYPE_dvsl ||
+ pmt->subtype == MEDIASUBTYPE_dvsd ||
+ pmt->subtype == MEDIASUBTYPE_dvhd) {
+ isDVCamera = true; // This is a DV camera. Use MS DV filter
+ }
+
+ FreeMediaType(pmt);
+ pmt = NULL;
+ }
+ RELEASE_AND_CLEAR(streamConfig);
+
+ if (FAILED(hr)) {
+ RTC_LOG(LS_INFO) << "Failed to set capture device output format";
+ return -1;
+ }
+
+ if (isDVCamera) {
+ hr = ConnectDVCamera();
+ } else {
+ hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputSendPin, NULL);
+ }
+ if (hr != S_OK) {
+ RTC_LOG(LS_INFO) << "Failed to connect the Capture graph " << hr;
+ return -1;
+ }
+ return 0;
+}
+
+int32_t VideoCaptureDS::DisconnectGraph() {
+ HRESULT hr = _mediaControl->Stop();
+ hr += _graphBuilder->Disconnect(_outputCapturePin);
+ hr += _graphBuilder->Disconnect(_inputSendPin);
+
+ // if the DV camera filter exist
+ if (_dvFilter) {
+ _graphBuilder->Disconnect(_inputDvPin);
+ _graphBuilder->Disconnect(_outputDvPin);
+ }
+ if (hr != S_OK) {
+ RTC_LOG(LS_ERROR)
+ << "Failed to Stop the Capture device for reconfiguration " << hr;
+ return -1;
+ }
+ return 0;
+}
+
+HRESULT VideoCaptureDS::ConnectDVCamera() {
+ HRESULT hr = S_OK;
+
+ if (!_dvFilter) {
+ hr = CoCreateInstance(CLSID_DVVideoCodec, NULL, CLSCTX_INPROC,
+ IID_IBaseFilter, (void**)&_dvFilter);
+ if (hr != S_OK) {
+ RTC_LOG(LS_INFO) << "Failed to create the dv decoder: " << hr;
+ return hr;
+ }
+ hr = _graphBuilder->AddFilter(_dvFilter, L"VideoDecoderDV");
+ if (hr != S_OK) {
+ RTC_LOG(LS_INFO) << "Failed to add the dv decoder to the graph: " << hr;
+ return hr;
+ }
+ _inputDvPin = GetInputPin(_dvFilter);
+ if (_inputDvPin == NULL) {
+ RTC_LOG(LS_INFO) << "Failed to get input pin from DV decoder";
+ return -1;
+ }
+ _outputDvPin = GetOutputPin(_dvFilter, GUID_NULL);
+ if (_outputDvPin == NULL) {
+ RTC_LOG(LS_INFO) << "Failed to get output pin from DV decoder";
+ return -1;
+ }
+ }
+ hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputDvPin, NULL);
+ if (hr != S_OK) {
+ RTC_LOG(LS_INFO) << "Failed to connect capture device to the dv devoder: "
+ << hr;
+ return hr;
+ }
+
+ hr = _graphBuilder->ConnectDirect(_outputDvPin, _inputSendPin, NULL);
+ if (hr != S_OK) {
+ if (hr == HRESULT_FROM_WIN32(ERROR_TOO_MANY_OPEN_FILES)) {
+ RTC_LOG(LS_INFO) << "Failed to connect the capture device, busy";
+ } else {
+ RTC_LOG(LS_INFO) << "Failed to connect capture device to the send graph: "
+ << hr;
+ }
+ }
+ return hr;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.h b/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.h
new file mode 100644
index 0000000000..0f01cfaf67
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_
+#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_
+
+#include "api/scoped_refptr.h"
+#include "modules/video_capture/video_capture_impl.h"
+#include "modules/video_capture/windows/device_info_ds.h"
+
+#define CAPTURE_FILTER_NAME L"VideoCaptureFilter"
+#define SINK_FILTER_NAME L"SinkFilter"
+
+namespace webrtc {
+namespace videocapturemodule {
+// Forward declaraion
+class CaptureSinkFilter;
+
+class VideoCaptureDS : public VideoCaptureImpl {
+ public:
+ VideoCaptureDS();
+
+ virtual int32_t Init(const char* deviceUniqueIdUTF8);
+
+ /*************************************************************************
+ *
+ * Start/Stop
+ *
+ *************************************************************************/
+ int32_t StartCapture(const VideoCaptureCapability& capability) override;
+ int32_t StopCapture() override;
+
+ /**************************************************************************
+ *
+ * Properties of the set device
+ *
+ **************************************************************************/
+
+ bool CaptureStarted() override;
+ int32_t CaptureSettings(VideoCaptureCapability& settings) override;
+
+ protected:
+ ~VideoCaptureDS() override;
+
+ // Help functions
+
+ int32_t SetCameraOutput(const VideoCaptureCapability& requestedCapability);
+ int32_t DisconnectGraph();
+ HRESULT ConnectDVCamera();
+
+ DeviceInfoDS _dsInfo;
+
+ IBaseFilter* _captureFilter;
+ IGraphBuilder* _graphBuilder;
+ IMediaControl* _mediaControl;
+ rtc::scoped_refptr<CaptureSinkFilter> sink_filter_;
+ IPin* _inputSendPin;
+ IPin* _outputCapturePin;
+
+ // Microsoft DV interface (external DV cameras)
+ IBaseFilter* _dvFilter;
+ IPin* _inputDvPin;
+ IPin* _outputDvPin;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_
diff --git a/third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc b/third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc
new file mode 100644
index 0000000000..481326c1d2
--- /dev/null
+++ b/third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/scoped_refptr.h"
+#include "modules/video_capture/windows/video_capture_ds.h"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+// static
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() {
+ // TODO(tommi): Use the Media Foundation version on Vista and up.
+ return DeviceInfoDS::Create();
+}
+
+rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
+ const char* device_id) {
+ if (device_id == nullptr)
+ return nullptr;
+
+ // TODO(tommi): Use Media Foundation implementation for Vista and up.
+ auto capture = rtc::make_ref_counted<VideoCaptureDS>();
+ if (capture->Init(device_id) != 0) {
+ return nullptr;
+ }
+
+ return capture;
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc