From 26a029d407be480d791972afb5975cf62c9360a6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 19 Apr 2024 02:47:55 +0200 Subject: Adding upstream version 124.0.1. Signed-off-by: Daniel Baumann --- .../libwebrtc/modules/video_capture/BUILD.gn | 188 ++++ third_party/libwebrtc/modules/video_capture/DEPS | 6 + third_party/libwebrtc/modules/video_capture/OWNERS | 4 + .../modules/video_capture/device_info_impl.cc | 224 +++++ .../modules/video_capture/device_info_impl.h | 63 ++ .../modules/video_capture/linux/camera_portal.cc | 244 ++++++ .../modules/video_capture/linux/camera_portal.h | 47 + .../video_capture/linux/device_info_linux.cc | 59 ++ .../video_capture/linux/device_info_pipewire.cc | 113 +++ .../video_capture/linux/device_info_pipewire.h | 53 ++ .../video_capture/linux/device_info_v4l2.cc | 528 +++++++++++ .../modules/video_capture/linux/device_info_v4l2.h | 72 ++ .../video_capture/linux/pipewire_session.cc | 400 +++++++++ .../modules/video_capture/linux/pipewire_session.h | 145 ++++ .../video_capture/linux/video_capture_linux.cc | 76 ++ .../video_capture/linux/video_capture_pipewire.cc | 414 +++++++++ .../video_capture/linux/video_capture_pipewire.h | 60 ++ .../video_capture/linux/video_capture_v4l2.cc | 485 +++++++++++ .../video_capture/linux/video_capture_v4l2.h | 62 ++ .../video_capture/raw_video_sink_interface.h | 34 + .../video_capture/test/video_capture_unittest.cc | 376 ++++++++ .../modules/video_capture/video_capture.h | 166 ++++ .../modules/video_capture/video_capture_config.h | 33 + .../modules/video_capture/video_capture_defines.h | 59 ++ .../modules/video_capture/video_capture_factory.cc | 50 ++ .../modules/video_capture/video_capture_factory.h | 48 + .../video_capture/video_capture_factory_null.cc | 27 + .../modules/video_capture/video_capture_impl.cc | 345 ++++++++ .../modules/video_capture/video_capture_impl.h | 137 +++ .../video_capture_internal_impl_gn/moz.build | 376 ++++++++ .../video_capture_module_gn/moz.build | 241 ++++++ .../modules/video_capture/video_capture_options.cc | 55 ++ .../modules/video_capture/video_capture_options.h | 83 ++ .../video_capture/windows/device_info_ds.cc | 713 +++++++++++++++ .../modules/video_capture/windows/device_info_ds.h | 108 +++ .../video_capture/windows/help_functions_ds.cc | 158 ++++ .../video_capture/windows/help_functions_ds.h | 118 +++ .../video_capture/windows/sink_filter_ds.cc | 961 +++++++++++++++++++++ .../modules/video_capture/windows/sink_filter_ds.h | 162 ++++ .../video_capture/windows/video_capture_ds.cc | 337 ++++++++ .../video_capture/windows/video_capture_ds.h | 75 ++ .../windows/video_capture_factory_windows.cc | 38 + 42 files changed, 7943 insertions(+) create mode 100644 third_party/libwebrtc/modules/video_capture/BUILD.gn create mode 100644 third_party/libwebrtc/modules/video_capture/DEPS create mode 100644 third_party/libwebrtc/modules/video_capture/OWNERS create mode 100644 third_party/libwebrtc/modules/video_capture/device_info_impl.cc create mode 100644 third_party/libwebrtc/modules/video_capture/device_info_impl.h create mode 100644 third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc create mode 100644 third_party/libwebrtc/modules/video_capture/linux/camera_portal.h create mode 100644 third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc create mode 100644 third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc create mode 100644 third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h create mode 100644 third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc create mode 100644 third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h create mode 100644 third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc create mode 100644 third_party/libwebrtc/modules/video_capture/linux/pipewire_session.h create mode 100644 third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc create mode 100644 third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc create mode 100644 third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.h create mode 100644 third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc create mode 100644 third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h create mode 100644 third_party/libwebrtc/modules/video_capture/raw_video_sink_interface.h create mode 100644 third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture.h create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_config.h create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_defines.h create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_factory.cc create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_factory.h create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_factory_null.cc create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_impl.cc create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_impl.h create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_options.cc create mode 100644 third_party/libwebrtc/modules/video_capture/video_capture_options.h create mode 100644 third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc create mode 100644 third_party/libwebrtc/modules/video_capture/windows/device_info_ds.h create mode 100644 third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc create mode 100644 third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.h create mode 100644 third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc create mode 100644 third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.h create mode 100644 third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc create mode 100644 third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.h create mode 100644 third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc (limited to 'third_party/libwebrtc/modules/video_capture') diff --git a/third_party/libwebrtc/modules/video_capture/BUILD.gn b/third_party/libwebrtc/modules/video_capture/BUILD.gn new file mode 100644 index 0000000000..45a0272eee --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/BUILD.gn @@ -0,0 +1,188 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +# Note this target is missing an implementation for the video capture. +# Targets must link with either 'video_capture' or +# 'video_capture_internal_impl' depending on whether they want to +# use the internal capturer. +rtc_library("video_capture_module") { + visibility = [ "*" ] + sources = [ + "device_info_impl.cc", + "device_info_impl.h", + "raw_video_sink_interface.h", + "video_capture.h", + "video_capture_config.h", + "video_capture_defines.h", + "video_capture_factory.cc", + "video_capture_factory.h", + "video_capture_impl.cc", + "video_capture_impl.h", + ] + + deps = [ + "../../api:scoped_refptr", + "../../api:sequence_checker", + "../../api/video:video_frame", + "../../api/video:video_rtp_headers", + "../../common_video", + "../../media:rtc_media_base", + "../../rtc_base:event_tracer", + "../../rtc_base:logging", + "../../rtc_base:macromagic", + "../../rtc_base:race_checker", + "../../rtc_base:refcount", + "../../rtc_base:stringutils", + "../../rtc_base:timeutils", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/system:rtc_export", + "../../system_wrappers", + "//third_party/libyuv", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +if (!build_with_chromium || is_linux || is_chromeos) { + rtc_source_set("video_capture_internal_impl") { + visibility = [ "*" ] + deps = [ + ":video_capture_module", + "../../api:scoped_refptr", + "../../api:sequence_checker", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:macromagic", + "../../rtc_base:platform_thread", + "../../rtc_base:refcount", + "../../rtc_base:stringutils", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/system:rtc_export", + "../../system_wrappers", + ] + sources = [ + "video_capture_options.cc", + "video_capture_options.h", + ] + + if (is_linux || is_bsd || is_chromeos) { + sources += [ + "linux/device_info_linux.cc", + "linux/device_info_v4l2.cc", + "linux/device_info_v4l2.h", + "linux/video_capture_linux.cc", + "linux/video_capture_v4l2.cc", + "linux/video_capture_v4l2.h", + ] + deps += [ "../../media:rtc_media_base" ] + + if (rtc_use_pipewire) { + sources += [ + "linux/camera_portal.cc", + "linux/camera_portal.h", + "linux/device_info_pipewire.cc", + "linux/device_info_pipewire.h", + "linux/pipewire_session.cc", + "linux/pipewire_session.h", + "linux/video_capture_pipewire.cc", + "linux/video_capture_pipewire.h", + ] + + configs += [ "../portal:pipewire_base" ] + + public_configs = [ "../portal:pipewire_config" ] + + deps += [ + "../../api:refcountedbase", + "../../common_video", + "../../media:rtc_media_base", + "../portal", + ] + if (build_with_mozilla) { + configs -= [ "../portal:pipewire_base" ] + public_deps = [ "//third_party/pipewire" ] + } + } + } + if (is_win) { + sources += [ + "windows/device_info_ds.cc", + "windows/device_info_ds.h", + "windows/help_functions_ds.cc", + "windows/help_functions_ds.h", + "windows/sink_filter_ds.cc", + "windows/sink_filter_ds.h", + "windows/video_capture_ds.cc", + "windows/video_capture_ds.h", + "windows/video_capture_factory_windows.cc", + ] + + libs = [ + "ole32.lib", + "oleaut32.lib", + "strmiids.lib", + "user32.lib", + ] + } + if (is_fuchsia) { + sources += [ "video_capture_factory_null.cc" ] + } + + if (!build_with_mozilla && is_android) { + include_dirs = [ + "/config/external/nspr", + "/nsprpub/lib/ds", + "/nsprpub/pr/include", + ] + + sources += [ + "android/device_info_android.cc", + "android/video_capture_android.cc", + ] + } + } + + if (!is_android && rtc_include_tests) { + rtc_test("video_capture_tests") { + sources = [ "test/video_capture_unittest.cc" ] + ldflags = [] + if (is_linux || is_chromeos || is_mac) { + ldflags += [ + "-lpthread", + "-lm", + ] + } + if (is_linux || is_chromeos) { + ldflags += [ + "-lrt", + "-lXext", + "-lX11", + ] + } + + deps = [ + ":video_capture_internal_impl", + ":video_capture_module", + "../../api:scoped_refptr", + "../../api/video:video_frame", + "../../api/video:video_rtp_headers", + "../../common_video", + "../../rtc_base:timeutils", + "../../rtc_base/synchronization:mutex", + "../../system_wrappers", + "../../test:frame_utils", + "../../test:test_main", + "../../test:test_support", + "../../test:video_test_common", + "//testing/gtest", + "//third_party/abseil-cpp/absl/memory", + ] + } + } +} diff --git a/third_party/libwebrtc/modules/video_capture/DEPS b/third_party/libwebrtc/modules/video_capture/DEPS new file mode 100644 index 0000000000..9ad1d576bc --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/DEPS @@ -0,0 +1,6 @@ +include_rules = [ + "+common_video", + "+media/base", + "+system_wrappers", + "+third_party/libyuv", +] diff --git a/third_party/libwebrtc/modules/video_capture/OWNERS b/third_party/libwebrtc/modules/video_capture/OWNERS new file mode 100644 index 0000000000..364d66d36f --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/OWNERS @@ -0,0 +1,4 @@ +ilnik@webrtc.org +mflodman@webrtc.org +perkj@webrtc.org +tkchin@webrtc.org diff --git a/third_party/libwebrtc/modules/video_capture/device_info_impl.cc b/third_party/libwebrtc/modules/video_capture/device_info_impl.cc new file mode 100644 index 0000000000..2a6afb3147 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/device_info_impl.cc @@ -0,0 +1,224 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/device_info_impl.h" + +#include + +#include "absl/strings/match.h" +#include "absl/strings/string_view.h" +#include "rtc_base/logging.h" + +#ifndef abs +#define abs(a) (a >= 0 ? a : -a) +#endif + +namespace webrtc { +namespace videocapturemodule { + +DeviceInfoImpl::DeviceInfoImpl() + : _lastUsedDeviceName(NULL), _lastUsedDeviceNameLength(0) {} + +DeviceInfoImpl::~DeviceInfoImpl(void) { + MutexLock lock(&_apiLock); + free(_lastUsedDeviceName); +} + +int32_t DeviceInfoImpl::NumberOfCapabilities(const char* deviceUniqueIdUTF8) { + if (!deviceUniqueIdUTF8) + return -1; + + MutexLock lock(&_apiLock); + + // Is it the same device that is asked for again. + if (absl::EqualsIgnoreCase( + deviceUniqueIdUTF8, + absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { + return static_cast(_captureCapabilities.size()); + } + + int32_t ret = CreateCapabilityMap(deviceUniqueIdUTF8); + return ret; +} + +int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8, + const uint32_t deviceCapabilityNumber, + VideoCaptureCapability& capability) { + RTC_DCHECK(deviceUniqueIdUTF8); + + MutexLock lock(&_apiLock); + + if (!absl::EqualsIgnoreCase( + deviceUniqueIdUTF8, + absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { + if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8)) { + return -1; + } + } + + // Make sure the number is valid + if (deviceCapabilityNumber >= (unsigned int)_captureCapabilities.size()) { + RTC_LOG(LS_ERROR) << deviceUniqueIdUTF8 << " Invalid deviceCapabilityNumber " + << deviceCapabilityNumber << ">= number of capabilities (" + << _captureCapabilities.size() << ")."; + return -1; + } + + capability = _captureCapabilities[deviceCapabilityNumber]; + return 0; +} + +int32_t DeviceInfoImpl::GetBestMatchedCapability( + const char* deviceUniqueIdUTF8, + const VideoCaptureCapability& requested, + VideoCaptureCapability& resulting) { + if (!deviceUniqueIdUTF8) + return -1; + + MutexLock lock(&_apiLock); + if (!absl::EqualsIgnoreCase( + deviceUniqueIdUTF8, + absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { + if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8)) { + return -1; + } + } + + int32_t bestformatIndex = -1; + int32_t bestWidth = 0; + int32_t bestHeight = 0; + int32_t bestFrameRate = 0; + VideoType bestVideoType = VideoType::kUnknown; + + const int32_t numberOfCapabilies = + static_cast(_captureCapabilities.size()); + + bool hasNonRGB24Capability = false; + for (int32_t tmp = 0; tmp < numberOfCapabilies; + ++tmp) // Loop through all capabilities + { + VideoCaptureCapability& capability = _captureCapabilities[tmp]; + if (capability.videoType != VideoType::kRGB24) { + hasNonRGB24Capability = true; + } + } + + for (int32_t tmp = 0; tmp < numberOfCapabilies; + ++tmp) // Loop through all capabilities + { + VideoCaptureCapability& capability = _captureCapabilities[tmp]; + if (hasNonRGB24Capability && capability.videoType == VideoType::kRGB24) { + continue; + } + + const int32_t diffWidth = capability.width - requested.width; + const int32_t diffHeight = capability.height - requested.height; + const int32_t diffFrameRate = capability.maxFPS - requested.maxFPS; + + const int32_t currentbestDiffWith = bestWidth - requested.width; + const int32_t currentbestDiffHeight = bestHeight - requested.height; + const int32_t currentbestDiffFrameRate = bestFrameRate - requested.maxFPS; + + if ((diffHeight >= 0 && + diffHeight <= abs(currentbestDiffHeight)) // Height better or equalt + // that previouse. + || (currentbestDiffHeight < 0 && diffHeight >= currentbestDiffHeight)) { + if (diffHeight == + currentbestDiffHeight) // Found best height. Care about the width) + { + if ((diffWidth >= 0 && + diffWidth <= abs(currentbestDiffWith)) // Width better or equal + || (currentbestDiffWith < 0 && diffWidth >= currentbestDiffWith)) { + if (diffWidth == currentbestDiffWith && + diffHeight == currentbestDiffHeight) // Same size as previously + { + // Also check the best frame rate if the diff is the same as + // previouse + if (((diffFrameRate >= 0 && + diffFrameRate <= + currentbestDiffFrameRate) // Frame rate to high but + // better match than previouse + // and we have not selected IUV + || (currentbestDiffFrameRate < 0 && + diffFrameRate >= + currentbestDiffFrameRate)) // Current frame rate is + // lower than requested. + // This is better. + ) { + if ((currentbestDiffFrameRate == + diffFrameRate) // Same frame rate as previous or frame rate + // allready good enough + || (currentbestDiffFrameRate >= 0)) { + if (bestVideoType != requested.videoType && + requested.videoType != VideoType::kUnknown && + (capability.videoType == requested.videoType || + capability.videoType == VideoType::kI420 || + capability.videoType == VideoType::kYUY2 || + capability.videoType == VideoType::kYV12 || + capability.videoType == VideoType::kNV12)) { + bestVideoType = capability.videoType; + bestformatIndex = tmp; + } + // If width height and frame rate is full filled we can use the + // camera for encoding if it is supported. + if (capability.height == requested.height && + capability.width == requested.width && + capability.maxFPS >= requested.maxFPS) { + bestformatIndex = tmp; + } + } else // Better frame rate + { + bestWidth = capability.width; + bestHeight = capability.height; + bestFrameRate = capability.maxFPS; + bestVideoType = capability.videoType; + bestformatIndex = tmp; + } + } + } else // Better width than previously + { + bestWidth = capability.width; + bestHeight = capability.height; + bestFrameRate = capability.maxFPS; + bestVideoType = capability.videoType; + bestformatIndex = tmp; + } + } // else width no good + } else // Better height + { + bestWidth = capability.width; + bestHeight = capability.height; + bestFrameRate = capability.maxFPS; + bestVideoType = capability.videoType; + bestformatIndex = tmp; + } + } // else height not good + } // end for + + RTC_LOG(LS_VERBOSE) << "Best camera format: " << bestWidth << "x" + << bestHeight << "@" << bestFrameRate + << "fps, color format: " + << static_cast(bestVideoType); + + // Copy the capability + if (bestformatIndex < 0) + return -1; + resulting = _captureCapabilities[bestformatIndex]; + return bestformatIndex; +} + +// Default implementation. This should be overridden by Mobile implementations. +int32_t DeviceInfoImpl::GetOrientation(const char* deviceUniqueIdUTF8, + VideoRotation& orientation) { + orientation = kVideoRotation_0; + return -1; +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/device_info_impl.h b/third_party/libwebrtc/modules/video_capture/device_info_impl.h new file mode 100644 index 0000000000..8acbef6d69 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/device_info_impl.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_ +#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_ + +#include + +#include + +#include "api/video/video_rotation.h" +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_defines.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { +namespace videocapturemodule { +class DeviceInfoImpl : public VideoCaptureModule::DeviceInfo { + public: + DeviceInfoImpl(); + ~DeviceInfoImpl(void) override; + int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8) override; + int32_t GetCapability(const char* deviceUniqueIdUTF8, + uint32_t deviceCapabilityNumber, + VideoCaptureCapability& capability) override; + + int32_t GetBestMatchedCapability(const char* deviceUniqueIdUTF8, + const VideoCaptureCapability& requested, + VideoCaptureCapability& resulting) override; + int32_t GetOrientation(const char* deviceUniqueIdUTF8, + VideoRotation& orientation) override; + + protected: + /* Initialize this object*/ + + virtual int32_t Init() = 0; + int32_t Refresh() override { return 0; } + /* + * Fills the member variable _captureCapabilities with capabilities for the + * given device name. + */ + virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock) = 0; + + protected: + // Data members + typedef std::vector VideoCaptureCapabilities; + VideoCaptureCapabilities _captureCapabilities RTC_GUARDED_BY(_apiLock); + Mutex _apiLock; + char* _lastUsedDeviceName RTC_GUARDED_BY(_apiLock); + uint32_t _lastUsedDeviceNameLength RTC_GUARDED_BY(_apiLock); +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_ diff --git a/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc b/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc new file mode 100644 index 0000000000..85b9f20228 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc @@ -0,0 +1,244 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/camera_portal.h" + +#include +#include + +#include "modules/portal/pipewire_utils.h" +#include "modules/portal/xdg_desktop_portal_utils.h" + +namespace webrtc { + +using xdg_portal::RequestResponse; +using xdg_portal::RequestResponseFromPortalResponse; +using xdg_portal::RequestSessionProxy; + +constexpr char kCameraInterfaceName[] = "org.freedesktop.portal.Camera"; + +class CameraPortalPrivate { + public: + explicit CameraPortalPrivate(CameraPortal::PortalNotifier* notifier); + ~CameraPortalPrivate(); + + void Start(); + + private: + void OnPortalDone(xdg_portal::RequestResponse result, + int fd = kInvalidPipeWireFd); + + static void OnProxyRequested(GObject* object, + GAsyncResult* result, + gpointer user_data); + void ProxyRequested(GDBusProxy* proxy); + + static void OnAccessResponse(GDBusProxy* proxy, + GAsyncResult* result, + gpointer user_data); + static void OnResponseSignalEmitted(GDBusConnection* connection, + const char* sender_name, + const char* object_path, + const char* interface_name, + const char* signal_name, + GVariant* parameters, + gpointer user_data); + static void OnOpenResponse(GDBusProxy* proxy, + GAsyncResult* result, + gpointer user_data); + + CameraPortal::PortalNotifier* notifier_ = nullptr; + + GDBusConnection* connection_ = nullptr; + GDBusProxy* proxy_ = nullptr; + GCancellable* cancellable_ = nullptr; + guint access_request_signal_id_ = 0; +}; + +CameraPortalPrivate::CameraPortalPrivate(CameraPortal::PortalNotifier* notifier) + : notifier_(notifier) {} + +CameraPortalPrivate::~CameraPortalPrivate() { + if (access_request_signal_id_) { + g_dbus_connection_signal_unsubscribe(connection_, + access_request_signal_id_); + access_request_signal_id_ = 0; + } + if (cancellable_) { + g_cancellable_cancel(cancellable_); + g_object_unref(cancellable_); + cancellable_ = nullptr; + } + if (proxy_) { + g_object_unref(proxy_); + proxy_ = nullptr; + connection_ = nullptr; + } +} + +void CameraPortalPrivate::Start() { + cancellable_ = g_cancellable_new(); + Scoped error; + RequestSessionProxy(kCameraInterfaceName, OnProxyRequested, cancellable_, + this); +} + +// static +void CameraPortalPrivate::OnProxyRequested(GObject* gobject, + GAsyncResult* result, + gpointer user_data) { + CameraPortalPrivate* that = static_cast(user_data); + Scoped error; + GDBusProxy* proxy = g_dbus_proxy_new_finish(result, error.receive()); + if (!proxy) { + // Ignore the error caused by user cancelling the request via `cancellable_` + if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED)) + return; + RTC_LOG(LS_ERROR) << "Failed to get a proxy for the portal: " + << error->message; + that->OnPortalDone(RequestResponse::kError); + return; + } + + RTC_LOG(LS_VERBOSE) << "Successfully created proxy for the portal."; + that->ProxyRequested(proxy); +} + +void CameraPortalPrivate::ProxyRequested(GDBusProxy* proxy) { + GVariantBuilder builder; + Scoped variant_string; + std::string access_handle; + + proxy_ = proxy; + connection_ = g_dbus_proxy_get_connection(proxy); + + g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT); + variant_string = + g_strdup_printf("capture%d", g_random_int_range(0, G_MAXINT)); + g_variant_builder_add(&builder, "{sv}", "handle_token", + g_variant_new_string(variant_string.get())); + + access_handle = + xdg_portal::PrepareSignalHandle(variant_string.get(), connection_); + access_request_signal_id_ = xdg_portal::SetupRequestResponseSignal( + access_handle.c_str(), OnResponseSignalEmitted, this, connection_); + + RTC_LOG(LS_VERBOSE) << "Requesting camera access from the portal."; + g_dbus_proxy_call(proxy_, "AccessCamera", g_variant_new("(a{sv})", &builder), + G_DBUS_CALL_FLAGS_NONE, /*timeout_msec=*/-1, cancellable_, + reinterpret_cast(OnAccessResponse), + this); +} + +// static +void CameraPortalPrivate::OnAccessResponse(GDBusProxy* proxy, + GAsyncResult* result, + gpointer user_data) { + CameraPortalPrivate* that = static_cast(user_data); + RTC_DCHECK(that); + + Scoped error; + Scoped variant( + g_dbus_proxy_call_finish(proxy, result, error.receive())); + if (!variant) { + if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED)) + return; + RTC_LOG(LS_ERROR) << "Failed to access portal:" << error->message; + if (that->access_request_signal_id_) { + g_dbus_connection_signal_unsubscribe(that->connection_, + that->access_request_signal_id_); + that->access_request_signal_id_ = 0; + } + that->OnPortalDone(RequestResponse::kError); + } +} + +// static +void CameraPortalPrivate::OnResponseSignalEmitted(GDBusConnection* connection, + const char* sender_name, + const char* object_path, + const char* interface_name, + const char* signal_name, + GVariant* parameters, + gpointer user_data) { + CameraPortalPrivate* that = static_cast(user_data); + RTC_DCHECK(that); + + uint32_t portal_response; + g_variant_get(parameters, "(u@a{sv})", &portal_response, nullptr); + if (portal_response) { + RTC_LOG(LS_INFO) << "Camera access denied by the XDG portal."; + that->OnPortalDone(RequestResponseFromPortalResponse(portal_response)); + return; + } + + RTC_LOG(LS_VERBOSE) << "Camera access granted by the XDG portal."; + + GVariantBuilder builder; + g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT); + + g_dbus_proxy_call( + that->proxy_, "OpenPipeWireRemote", g_variant_new("(a{sv})", &builder), + G_DBUS_CALL_FLAGS_NONE, /*timeout_msec=*/-1, that->cancellable_, + reinterpret_cast(OnOpenResponse), that); +} + +void CameraPortalPrivate::OnOpenResponse(GDBusProxy* proxy, + GAsyncResult* result, + gpointer user_data) { + CameraPortalPrivate* that = static_cast(user_data); + RTC_DCHECK(that); + + Scoped error; + Scoped outlist; + Scoped variant(g_dbus_proxy_call_with_unix_fd_list_finish( + proxy, outlist.receive(), result, error.receive())); + if (!variant) { + if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED)) + return; + RTC_LOG(LS_ERROR) << "Failed to open PipeWire remote:" << error->message; + if (that->access_request_signal_id_) { + g_dbus_connection_signal_unsubscribe(that->connection_, + that->access_request_signal_id_); + that->access_request_signal_id_ = 0; + } + that->OnPortalDone(RequestResponse::kError); + return; + } + + int32_t index; + g_variant_get(variant.get(), "(h)", &index); + + int fd = g_unix_fd_list_get(outlist.get(), index, error.receive()); + + if (fd == kInvalidPipeWireFd) { + RTC_LOG(LS_ERROR) << "Failed to get file descriptor from the list: " + << error->message; + that->OnPortalDone(RequestResponse::kError); + return; + } + + that->OnPortalDone(RequestResponse::kSuccess, fd); +} + +void CameraPortalPrivate::OnPortalDone(RequestResponse result, int fd) { + notifier_->OnCameraRequestResult(result, fd); +} + +CameraPortal::CameraPortal(PortalNotifier* notifier) + : private_(std::make_unique(notifier)) {} + +CameraPortal::~CameraPortal() {} + +void CameraPortal::Start() { + private_->Start(); +} + +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/linux/camera_portal.h b/third_party/libwebrtc/modules/video_capture/linux/camera_portal.h new file mode 100644 index 0000000000..36f2ec8b8a --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/camera_portal.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_CAMERA_PORTAL_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_CAMERA_PORTAL_H_ + +#include +#include + +#include "modules/portal/portal_request_response.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class CameraPortalPrivate; + +class RTC_EXPORT CameraPortal { + public: + class PortalNotifier { + public: + virtual void OnCameraRequestResult(xdg_portal::RequestResponse result, + int fd) = 0; + + protected: + PortalNotifier() = default; + virtual ~PortalNotifier() = default; + }; + + explicit CameraPortal(PortalNotifier* notifier); + ~CameraPortal(); + + void Start(); + + private: + std::unique_ptr private_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_LINUX_CAMERA_PORTAL_H_ diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc new file mode 100644 index 0000000000..cae63c7c2d --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +// v4l includes +#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD +#include +#elif defined(__sun) +#include +#else +#include +#endif + +#include + +#if defined(WEBRTC_USE_PIPEWIRE) +#include "modules/video_capture/linux/device_info_pipewire.h" +#endif +#include "modules/video_capture/linux/device_info_v4l2.h" +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "modules/video_capture/video_capture_options.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { + return new videocapturemodule::DeviceInfoV4l2(); +} + +VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo( + VideoCaptureOptions* options) { +#if defined(WEBRTC_USE_PIPEWIRE) + if (options->allow_pipewire()) { + return new videocapturemodule::DeviceInfoPipeWire(options); + } +#endif + if (options->allow_v4l2()) + return new videocapturemodule::DeviceInfoV4l2(); + + return nullptr; +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc new file mode 100644 index 0000000000..ad6cea57b8 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/device_info_pipewire.h" + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "modules/video_capture/video_capture_options.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +DeviceInfoPipeWire::DeviceInfoPipeWire(VideoCaptureOptions* options) + : DeviceInfoImpl(), pipewire_session_(options->pipewire_session()) {} + +int32_t DeviceInfoPipeWire::Init() { + return 0; +} + +DeviceInfoPipeWire::~DeviceInfoPipeWire() = default; + +uint32_t DeviceInfoPipeWire::NumberOfDevices() { + return pipewire_session_->nodes().size(); +} + +int32_t DeviceInfoPipeWire::GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8, + uint32_t productUniqueIdUTF8Length, + pid_t* pid, + bool* deviceIsPlaceholder) { + if (deviceNumber >= NumberOfDevices()) + return -1; + + const PipeWireNode& node = pipewire_session_->nodes().at(deviceNumber); + + if (deviceNameLength <= node.display_name().length()) { + RTC_LOG(LS_INFO) << "deviceNameUTF8 buffer passed is too small"; + return -1; + } + if (deviceUniqueIdUTF8Length <= node.unique_id().length()) { + RTC_LOG(LS_INFO) << "deviceUniqueIdUTF8 buffer passed is too small"; + return -1; + } + if (productUniqueIdUTF8 && + productUniqueIdUTF8Length <= node.model_id().length()) { + RTC_LOG(LS_INFO) << "productUniqueIdUTF8 buffer passed is too small"; + return -1; + } + + memset(deviceNameUTF8, 0, deviceNameLength); + node.display_name().copy(deviceNameUTF8, deviceNameLength); + + memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length); + node.unique_id().copy(deviceUniqueIdUTF8, deviceUniqueIdUTF8Length); + + if (productUniqueIdUTF8) { + memset(productUniqueIdUTF8, 0, productUniqueIdUTF8Length); + node.model_id().copy(productUniqueIdUTF8, productUniqueIdUTF8Length); + } + + return 0; +} + +int32_t DeviceInfoPipeWire::CreateCapabilityMap( + const char* deviceUniqueIdUTF8) { + for (auto& node : pipewire_session_->nodes()) { + if (node.unique_id().compare(deviceUniqueIdUTF8) != 0) + continue; + + _captureCapabilities = node.capabilities(); + _lastUsedDeviceNameLength = node.unique_id().length(); + _lastUsedDeviceName = static_cast( + realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1)); + memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, + _lastUsedDeviceNameLength + 1); + return _captureCapabilities.size(); + } + return -1; +} + +int32_t DeviceInfoPipeWire::DisplayCaptureSettingsDialogBox( + const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + uint32_t /*positionX*/, + uint32_t /*positionY*/) { + return -1; +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h b/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h new file mode 100644 index 0000000000..1a1324e92b --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_PIPEWIRE_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_PIPEWIRE_H_ + +#include + +#include "modules/video_capture/device_info_impl.h" +#include "modules/video_capture/linux/pipewire_session.h" + +namespace webrtc { +namespace videocapturemodule { +class DeviceInfoPipeWire : public DeviceInfoImpl { + public: + explicit DeviceInfoPipeWire(VideoCaptureOptions* options); + ~DeviceInfoPipeWire() override; + uint32_t NumberOfDevices() override; + int32_t GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = nullptr, + uint32_t productUniqueIdUTF8Length = 0, + pid_t* pid = 0, + bool* deviceIsPlaceholder = 0) override; + /* + * Fills the membervariable _captureCapabilities with capabilites for the + * given device name. + */ + int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); + int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + uint32_t /*positionX*/, + uint32_t /*positionY*/) override; + int32_t Init() override; + + private: + rtc::scoped_refptr pipewire_session_; +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_PIPEWIRE_H_ diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc new file mode 100644 index 0000000000..eaeed26b7c --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc @@ -0,0 +1,528 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/device_info_v4l2.h" + +#include +#include +#include +#include +#include +#include +#include +#include +// v4l includes +#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD +#include +#elif defined(__sun) +#include +#else +#include +#endif + +#include + +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "rtc_base/logging.h" + +// These defines are here to support building on kernel 3.16 which some +// downstream projects, e.g. Firefox, use. +// TODO(apehrson): Remove them and their undefs when no longer needed. +#ifndef V4L2_PIX_FMT_ABGR32 +#define ABGR32_OVERRIDE 1 +#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_ARGB32 +#define ARGB32_OVERRIDE 1 +#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_RGBA32 +#define RGBA32_OVERRIDE 1 +#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') +#endif + +#ifdef WEBRTC_LINUX +#define EVENT_SIZE ( sizeof (struct inotify_event) ) +#define BUF_LEN ( 1024 * ( EVENT_SIZE + 16 ) ) +#endif + +// These defines are here to support building on kernel 3.16 which some +// downstream projects, e.g. Firefox, use. +// TODO(apehrson): Remove them and their undefs when no longer needed. +#ifndef V4L2_PIX_FMT_ABGR32 +#define ABGR32_OVERRIDE 1 +#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_ARGB32 +#define ARGB32_OVERRIDE 1 +#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_RGBA32 +#define RGBA32_OVERRIDE 1 +#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') +#endif + +namespace webrtc { +namespace videocapturemodule { +#ifdef WEBRTC_LINUX +void DeviceInfoV4l2::HandleEvent(inotify_event* event, int fd) +{ + if (event->mask & IN_CREATE) { + if (fd == _fd_v4l) { + DeviceChange(); + } else if ((event->mask & IN_ISDIR) && (fd == _fd_dev)) { + if (_wd_v4l < 0) { + // Sometimes inotify_add_watch failed if we call it immediately after receiving this event + // Adding 5ms delay to let file system settle down + usleep(5*1000); + _wd_v4l = inotify_add_watch(_fd_v4l, "/dev/v4l/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF); + if (_wd_v4l >= 0) { + DeviceChange(); + } + } + } + } else if (event->mask & IN_DELETE) { + if (fd == _fd_v4l) { + DeviceChange(); + } + } else if (event->mask & IN_DELETE_SELF) { + if (fd == _fd_v4l) { + inotify_rm_watch(_fd_v4l, _wd_v4l); + _wd_v4l = -1; + } else { + assert(false); + } + } +} + +int DeviceInfoV4l2::EventCheck(int fd) +{ + struct pollfd fds = { + .fd = fd, + .events = POLLIN, + .revents = 0 + }; + + return poll(&fds, 1, 100); +} + +int DeviceInfoV4l2::HandleEvents(int fd) +{ + char buffer[BUF_LEN]; + + ssize_t r = read(fd, buffer, BUF_LEN); + + if (r <= 0) { + return r; + } + + ssize_t buffer_i = 0; + inotify_event* pevent; + size_t eventSize; + int count = 0; + + while (buffer_i < r) + { + pevent = (inotify_event *) (&buffer[buffer_i]); + eventSize = sizeof(inotify_event) + pevent->len; + char event[sizeof(inotify_event) + FILENAME_MAX + 1] // null-terminated + __attribute__ ((aligned(__alignof__(struct inotify_event)))); + + memcpy(event, pevent, eventSize); + + HandleEvent((inotify_event*)(event), fd); + + buffer_i += eventSize; + count++; + } + + return count; +} + +int DeviceInfoV4l2::ProcessInotifyEvents() +{ + while (!_isShutdown) { + if (EventCheck(_fd_dev) > 0) { + if (HandleEvents(_fd_dev) < 0) { + break; + } + } + if (EventCheck(_fd_v4l) > 0) { + if (HandleEvents(_fd_v4l) < 0) { + break; + } + } + } + return 0; +} + +void DeviceInfoV4l2::InotifyProcess() +{ + _fd_v4l = inotify_init(); + _fd_dev = inotify_init(); + if (_fd_v4l >= 0 && _fd_dev >= 0) { + _wd_v4l = inotify_add_watch(_fd_v4l, "/dev/v4l/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF); + _wd_dev = inotify_add_watch(_fd_dev, "/dev/", IN_CREATE); + ProcessInotifyEvents(); + + if (_wd_v4l >= 0) { + inotify_rm_watch(_fd_v4l, _wd_v4l); + } + + if (_wd_dev >= 0) { + inotify_rm_watch(_fd_dev, _wd_dev); + } + + close(_fd_v4l); + close(_fd_dev); + } +} +#endif + +DeviceInfoV4l2::DeviceInfoV4l2() : DeviceInfoImpl() +#ifdef WEBRTC_LINUX + , _isShutdown(false) +#endif +{ +#ifdef WEBRTC_LINUX + _inotifyEventThread = rtc::PlatformThread::SpawnJoinable( + [this] { + InotifyProcess(); + }, "InotifyEventThread"); +#endif +} + +int32_t DeviceInfoV4l2::Init() { + return 0; +} + +DeviceInfoV4l2::~DeviceInfoV4l2() { +#ifdef WEBRTC_LINUX + _isShutdown = true; + + if (!_inotifyEventThread.empty()) { + _inotifyEventThread.Finalize(); + } +#endif +} + +uint32_t DeviceInfoV4l2::NumberOfDevices() { + uint32_t count = 0; + char device[20]; + int fd = -1; + struct v4l2_capability cap; + + /* detect /dev/video [0-63]VideoCaptureModule entries */ + for (int n = 0; n < 64; n++) { + snprintf(device, sizeof(device), "/dev/video%d", n); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities and make sure this is a video capture device + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !IsVideoCaptureDevice(&cap)) { + close(fd); + continue; + } + + close(fd); + count++; + } + } + + return count; +} + +int32_t DeviceInfoV4l2::GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* /*productUniqueIdUTF8*/, + uint32_t /*productUniqueIdUTF8Length*/, + pid_t* /*pid*/, + bool* /*deviceIsPlaceholder*/) { + // Travel through /dev/video [0-63] + uint32_t count = 0; + char device[20]; + int fd = -1; + bool found = false; + struct v4l2_capability cap; + for (int n = 0; n < 64; n++) { + snprintf(device, sizeof(device), "/dev/video%d", n); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities and make sure this is a video capture device + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !IsVideoCaptureDevice(&cap)) { + close(fd); + continue; + } + if (count == deviceNumber) { + // Found the device + found = true; + break; + } else { + close(fd); + count++; + } + } + } + + if (!found) + return -1; + + // query device capabilities + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { + RTC_LOG(LS_INFO) << "error in querying the device capability for device " + << device << ". errno = " << errno; + close(fd); + return -1; + } + + close(fd); + + char cameraName[64]; + memset(deviceNameUTF8, 0, deviceNameLength); + memcpy(cameraName, cap.card, sizeof(cap.card)); + + if (deviceNameLength > strlen(cameraName)) { + memcpy(deviceNameUTF8, cameraName, strlen(cameraName)); + } else { + RTC_LOG(LS_INFO) << "buffer passed is too small"; + return -1; + } + + if (cap.bus_info[0] != 0) { // may not available in all drivers + // copy device id + size_t len = strlen(reinterpret_cast(cap.bus_info)); + if (deviceUniqueIdUTF8Length > len) { + memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length); + memcpy(deviceUniqueIdUTF8, cap.bus_info, len); + } else { + RTC_LOG(LS_INFO) << "buffer passed is too small"; + return -1; + } + } + + return 0; +} + +int32_t DeviceInfoV4l2::CreateCapabilityMap(const char* deviceUniqueIdUTF8) { + int fd; + char device[32]; + bool found = false; + + const int32_t deviceUniqueIdUTF8Length = strlen(deviceUniqueIdUTF8); + if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) { + RTC_LOG(LS_INFO) << "Device name too long"; + return -1; + } + RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device " + << deviceUniqueIdUTF8; + + /* detect /dev/video [0-63] entries */ + for (int n = 0; n < 64; ++n) { + snprintf(device, sizeof(device), "/dev/video%d", n); + fd = open(device, O_RDONLY); + if (fd == -1) + continue; + + // query device capabilities + struct v4l2_capability cap; + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { + // skip devices without video capture capability + if (!IsVideoCaptureDevice(&cap)) { + close(fd); + continue; + } + + if (cap.bus_info[0] != 0) { + if (strncmp(reinterpret_cast(cap.bus_info), + deviceUniqueIdUTF8, + strlen(deviceUniqueIdUTF8)) == 0) { // match with device id + found = true; + break; // fd matches with device unique id supplied + } + } else { // match for device name + if (IsDeviceNameMatches(reinterpret_cast(cap.card), + deviceUniqueIdUTF8)) { + found = true; + break; + } + } + } + close(fd); // close since this is not the matching device + } + + if (!found) { + RTC_LOG(LS_INFO) << "no matching device found"; + return -1; + } + + // now fd will point to the matching device + // reset old capability list. + _captureCapabilities.clear(); + + int size = FillCapabilities(fd); + close(fd); + + // Store the new used device name + _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length; + _lastUsedDeviceName = reinterpret_cast( + realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1)); + memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, + _lastUsedDeviceNameLength + 1); + + RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); + + return size; +} + +int32_t DeviceInfoV4l2::DisplayCaptureSettingsDialogBox( + const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + uint32_t /*positionX*/, + uint32_t /*positionY*/) { + return -1; +} + +bool DeviceInfoV4l2::IsDeviceNameMatches(const char* name, + const char* deviceUniqueIdUTF8) { + if (strncmp(deviceUniqueIdUTF8, name, strlen(name)) == 0) + return true; + return false; +} + +bool DeviceInfoV4l2::IsVideoCaptureDevice(struct v4l2_capability* cap) +{ + if (cap->capabilities & V4L2_CAP_DEVICE_CAPS) { + return cap->device_caps & V4L2_CAP_VIDEO_CAPTURE; + } else { + return cap->capabilities & V4L2_CAP_VIDEO_CAPTURE; + } +} + +int32_t DeviceInfoV4l2::FillCapabilities(int fd) { + // set image format + struct v4l2_format video_fmt; + memset(&video_fmt, 0, sizeof(struct v4l2_format)); + + video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + video_fmt.fmt.pix.sizeimage = 0; + + unsigned int videoFormats[] = { + V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_JPEG, V4L2_PIX_FMT_YUV420, + V4L2_PIX_FMT_YVU420, V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY, + V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_BGR24, V4L2_PIX_FMT_RGB24, + V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_ABGR32, V4L2_PIX_FMT_ARGB32, + V4L2_PIX_FMT_RGBA32, V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_RGB32, + }; + constexpr int totalFmts = sizeof(videoFormats) / sizeof(unsigned int); + + int sizes = 13; + unsigned int size[][2] = {{128, 96}, {160, 120}, {176, 144}, {320, 240}, + {352, 288}, {640, 480}, {704, 576}, {800, 600}, + {960, 720}, {1280, 720}, {1024, 768}, {1440, 1080}, + {1920, 1080}}; + + for (int fmts = 0; fmts < totalFmts; fmts++) { + for (int i = 0; i < sizes; i++) { + video_fmt.fmt.pix.pixelformat = videoFormats[fmts]; + video_fmt.fmt.pix.width = size[i][0]; + video_fmt.fmt.pix.height = size[i][1]; + + if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) >= 0) { + if ((video_fmt.fmt.pix.width == size[i][0]) && + (video_fmt.fmt.pix.height == size[i][1])) { + VideoCaptureCapability cap; + cap.width = video_fmt.fmt.pix.width; + cap.height = video_fmt.fmt.pix.height; + if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV) { + cap.videoType = VideoType::kYUY2; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420) { + cap.videoType = VideoType::kI420; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_YVU420) { + cap.videoType = VideoType::kYV12; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG || + videoFormats[fmts] == V4L2_PIX_FMT_JPEG) { + cap.videoType = VideoType::kMJPEG; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY) { + cap.videoType = VideoType::kUYVY; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_NV12) { + cap.videoType = VideoType::kNV12; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_BGR24) { + // NB that for RGB formats, `VideoType` follows naming conventions + // of libyuv[1], where e.g. the format for FOURCC "ARGB" stores + // pixels in BGRA order in memory. V4L2[2] on the other hand names + // its formats based on the order of the RGB components as stored in + // memory. Applies to all RGB formats below. + // [1]https://chromium.googlesource.com/libyuv/libyuv/+/refs/heads/main/docs/formats.md#the-argb-fourcc + // [2]https://www.kernel.org/doc/html/v6.2/userspace-api/media/v4l/pixfmt-rgb.html#bits-per-component + cap.videoType = VideoType::kRGB24; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB24) { + cap.videoType = VideoType::kBGR24; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB565) { + cap.videoType = VideoType::kRGB565; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_ABGR32) { + cap.videoType = VideoType::kARGB; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_ARGB32) { + cap.videoType = VideoType::kBGRA; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_BGR32) { + cap.videoType = VideoType::kARGB; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB32) { + cap.videoType = VideoType::kBGRA; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGBA32) { + cap.videoType = VideoType::kABGR; + } else { + RTC_DCHECK_NOTREACHED(); + } + + // get fps of current camera mode + // V4l2 does not have a stable method of knowing so we just guess. + if (cap.width >= 800 && cap.videoType != VideoType::kMJPEG) { + cap.maxFPS = 15; + } else { + cap.maxFPS = 30; + } + + _captureCapabilities.push_back(cap); + RTC_LOG(LS_VERBOSE) << "Camera capability, width:" << cap.width + << " height:" << cap.height + << " type:" << static_cast(cap.videoType) + << " fps:" << cap.maxFPS; + } + } + } + } + + RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); + return _captureCapabilities.size(); +} + +} // namespace videocapturemodule +} // namespace webrtc + +#ifdef ABGR32_OVERRIDE +#undef ABGR32_OVERRIDE +#undef V4L2_PIX_FMT_ABGR32 +#endif + +#ifdef ARGB32_OVERRIDE +#undef ARGB32_OVERRIDE +#undef V4L2_PIX_FMT_ARGB32 +#endif + +#ifdef RGBA32_OVERRIDE +#undef RGBA32_OVERRIDE +#undef V4L2_PIX_FMT_RGBA32 +#endif diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h new file mode 100644 index 0000000000..55415845ad --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ + +#include + +#include "modules/video_capture/device_info_impl.h" + +#include "rtc_base/platform_thread.h" +#ifdef WEBRTC_LINUX +#include +#endif + +struct v4l2_capability; + +namespace webrtc { +namespace videocapturemodule { +class DeviceInfoV4l2 : public DeviceInfoImpl { + public: + DeviceInfoV4l2(); + ~DeviceInfoV4l2() override; + uint32_t NumberOfDevices() override; + int32_t GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = 0, + uint32_t productUniqueIdUTF8Length = 0, + pid_t* pid = 0, + bool* deviceIsPlaceholder = 0) override; + /* + * Fills the membervariable _captureCapabilities with capabilites for the + * given device name. + */ + int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); + int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + uint32_t /*positionX*/, + uint32_t /*positionY*/) override; + int32_t FillCapabilities(int fd) RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); + int32_t Init() override; + + private: + bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8); + bool IsVideoCaptureDevice(struct v4l2_capability* cap); + +#ifdef WEBRTC_LINUX + void HandleEvent(inotify_event* event, int fd); + int EventCheck(int fd); + int HandleEvents(int fd); + int ProcessInotifyEvents(); + rtc::PlatformThread _inotifyEventThread; + void InotifyProcess(); + int _fd_v4l, _fd_dev, _wd_v4l, _wd_dev; /* accessed on InotifyEventThread thread */ + std::atomic _isShutdown; +#endif +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ diff --git a/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc b/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc new file mode 100644 index 0000000000..4d1b200aca --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc @@ -0,0 +1,400 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/pipewire_session.h" + +#include +#include +#include +#include +#include + +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_capture/device_info_impl.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { +namespace videocapturemodule { + +VideoType PipeWireRawFormatToVideoType(uint32_t id) { + switch (id) { + case SPA_VIDEO_FORMAT_I420: + return VideoType::kI420; + case SPA_VIDEO_FORMAT_NV12: + return VideoType::kNV12; + case SPA_VIDEO_FORMAT_YUY2: + return VideoType::kYUY2; + case SPA_VIDEO_FORMAT_UYVY: + return VideoType::kUYVY; + case SPA_VIDEO_FORMAT_RGB: + return VideoType::kRGB24; + default: + return VideoType::kUnknown; + } +} + +PipeWireNode::PipeWireNode(PipeWireSession* session, + uint32_t id, + const spa_dict* props) + : session_(session), + id_(id), + display_name_(spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION)), + unique_id_(rtc::ToString(id)) { + RTC_LOG(LS_VERBOSE) << "Found Camera: " << display_name_; + + proxy_ = static_cast(pw_registry_bind( + session_->pw_registry_, id, PW_TYPE_INTERFACE_Node, PW_VERSION_NODE, 0)); + + static const pw_node_events node_events{ + .version = PW_VERSION_NODE_EVENTS, + .info = OnNodeInfo, + .param = OnNodeParam, + }; + + pw_node_add_listener(proxy_, &node_listener_, &node_events, this); +} + +PipeWireNode::~PipeWireNode() { + pw_proxy_destroy(proxy_); + spa_hook_remove(&node_listener_); +} + +// static +void PipeWireNode::OnNodeInfo(void* data, const pw_node_info* info) { + PipeWireNode* that = static_cast(data); + + if (info->change_mask & PW_NODE_CHANGE_MASK_PROPS) { + const char* vid_str; + const char* pid_str; + absl::optional vid; + absl::optional pid; + + vid_str = spa_dict_lookup(info->props, SPA_KEY_DEVICE_VENDOR_ID); + pid_str = spa_dict_lookup(info->props, SPA_KEY_DEVICE_PRODUCT_ID); + vid = vid_str ? rtc::StringToNumber(vid_str) : absl::nullopt; + pid = pid_str ? rtc::StringToNumber(pid_str) : absl::nullopt; + + if (vid && pid) { + char model_str[10]; + snprintf(model_str, sizeof(model_str), "%04x:%04x", vid.value(), + pid.value()); + that->model_id_ = model_str; + } + } else if (info->change_mask & PW_NODE_CHANGE_MASK_PARAMS) { + for (uint32_t i = 0; i < info->n_params; i++) { + uint32_t id = info->params[i].id; + if (id == SPA_PARAM_EnumFormat && + info->params[i].flags & SPA_PARAM_INFO_READ) { + pw_node_enum_params(that->proxy_, 0, id, 0, UINT32_MAX, nullptr); + break; + } + } + that->session_->PipeWireSync(); + } +} + +// static +void PipeWireNode::OnNodeParam(void* data, + int seq, + uint32_t id, + uint32_t index, + uint32_t next, + const spa_pod* param) { + PipeWireNode* that = static_cast(data); + auto* obj = reinterpret_cast(param); + const spa_pod_prop* prop = nullptr; + VideoCaptureCapability cap; + spa_pod* val; + uint32_t n_items, choice; + + cap.videoType = VideoType::kUnknown; + cap.maxFPS = 0; + + prop = spa_pod_object_find_prop(obj, prop, SPA_FORMAT_VIDEO_framerate); + if (prop) { + val = spa_pod_get_values(&prop->value, &n_items, &choice); + if (val->type == SPA_TYPE_Fraction) { + spa_fraction* fract; + + fract = static_cast(SPA_POD_BODY(val)); + + if (choice == SPA_CHOICE_None) + cap.maxFPS = 1.0 * fract[0].num / fract[0].denom; + else if (choice == SPA_CHOICE_Range && fract[1].num > 0) + cap.maxFPS = 1.0 * fract[1].num / fract[1].denom; + } + } + + prop = spa_pod_object_find_prop(obj, prop, SPA_FORMAT_VIDEO_size); + if (!prop) + return; + + val = spa_pod_get_values(&prop->value, &n_items, &choice); + if (val->type != SPA_TYPE_Rectangle) + return; + + if (choice != SPA_CHOICE_None) + return; + + if (!ParseFormat(param, &cap)) + return; + + spa_rectangle* rect; + rect = static_cast(SPA_POD_BODY(val)); + cap.width = rect[0].width; + cap.height = rect[0].height; + + RTC_LOG(LS_VERBOSE) << "Found Format(" << that->display_name_ + << "): " << static_cast(cap.videoType) << "(" + << cap.width << "x" << cap.height << "@" << cap.maxFPS + << ")"; + + that->capabilities_.push_back(cap); +} + +// static +bool PipeWireNode::ParseFormat(const spa_pod* param, + VideoCaptureCapability* cap) { + auto* obj = reinterpret_cast(param); + uint32_t media_type, media_subtype; + + if (spa_format_parse(param, &media_type, &media_subtype) < 0) { + RTC_LOG(LS_ERROR) << "Failed to parse video format."; + return false; + } + + if (media_type != SPA_MEDIA_TYPE_video) + return false; + + if (media_subtype == SPA_MEDIA_SUBTYPE_raw) { + const spa_pod_prop* prop = nullptr; + uint32_t n_items, choice; + spa_pod* val; + uint32_t* id; + + prop = spa_pod_object_find_prop(obj, prop, SPA_FORMAT_VIDEO_format); + if (!prop) + return false; + + val = spa_pod_get_values(&prop->value, &n_items, &choice); + if (val->type != SPA_TYPE_Id) + return false; + + if (choice != SPA_CHOICE_None) + return false; + + id = static_cast(SPA_POD_BODY(val)); + + cap->videoType = PipeWireRawFormatToVideoType(id[0]); + if (cap->videoType == VideoType::kUnknown) { + RTC_LOG(LS_INFO) << "Unsupported PipeWire pixel format " << id[0]; + return false; + } + + } else if (media_subtype == SPA_MEDIA_SUBTYPE_mjpg) { + cap->videoType = VideoType::kMJPEG; + } else { + RTC_LOG(LS_INFO) << "Unsupported PipeWire media subtype " << media_subtype; + } + + return cap->videoType != VideoType::kUnknown; +} + +CameraPortalNotifier::CameraPortalNotifier(PipeWireSession* session) + : session_(session) {} + +void CameraPortalNotifier::OnCameraRequestResult( + xdg_portal::RequestResponse result, + int fd) { + if (result == xdg_portal::RequestResponse::kSuccess) { + session_->InitPipeWire(fd); + } else if (result == xdg_portal::RequestResponse::kUserCancelled) { + session_->Finish(VideoCaptureOptions::Status::DENIED); + } else { + session_->Finish(VideoCaptureOptions::Status::ERROR); + } +} + +PipeWireSession::PipeWireSession() + : status_(VideoCaptureOptions::Status::UNINITIALIZED) {} + +PipeWireSession::~PipeWireSession() { + Cleanup(); +} + +void PipeWireSession::Init(VideoCaptureOptions::Callback* callback, int fd) { + { + webrtc::MutexLock lock(&callback_lock_); + callback_ = callback; + } + + if (fd != kInvalidPipeWireFd) { + InitPipeWire(fd); + } else { + portal_notifier_ = std::make_unique(this); + portal_ = std::make_unique(portal_notifier_.get()); + portal_->Start(); + } +} + +void PipeWireSession::InitPipeWire(int fd) { + if (!InitializePipeWire()) + Finish(VideoCaptureOptions::Status::UNAVAILABLE); + + if (!StartPipeWire(fd)) + Finish(VideoCaptureOptions::Status::ERROR); +} + +bool PipeWireSession::StartPipeWire(int fd) { + pw_init(/*argc=*/nullptr, /*argv=*/nullptr); + + pw_main_loop_ = pw_thread_loop_new("pipewire-main-loop", nullptr); + + pw_context_ = + pw_context_new(pw_thread_loop_get_loop(pw_main_loop_), nullptr, 0); + if (!pw_context_) { + RTC_LOG(LS_ERROR) << "Failed to create PipeWire context"; + return false; + } + + pw_core_ = pw_context_connect_fd(pw_context_, fd, nullptr, 0); + if (!pw_core_) { + RTC_LOG(LS_ERROR) << "Failed to connect PipeWire context"; + return false; + } + + static const pw_core_events core_events{ + .version = PW_VERSION_CORE_EVENTS, + .done = &OnCoreDone, + .error = &OnCoreError, + }; + + pw_core_add_listener(pw_core_, &core_listener_, &core_events, this); + + static const pw_registry_events registry_events{ + .version = PW_VERSION_REGISTRY_EVENTS, + .global = OnRegistryGlobal, + .global_remove = OnRegistryGlobalRemove, + }; + + pw_registry_ = pw_core_get_registry(pw_core_, PW_VERSION_REGISTRY, 0); + pw_registry_add_listener(pw_registry_, ®istry_listener_, ®istry_events, + this); + + PipeWireSync(); + + if (pw_thread_loop_start(pw_main_loop_) < 0) { + RTC_LOG(LS_ERROR) << "Failed to start main PipeWire loop"; + return false; + } + + return true; +} + +void PipeWireSession::StopPipeWire() { + if (pw_main_loop_) + pw_thread_loop_stop(pw_main_loop_); + + if (pw_core_) { + pw_core_disconnect(pw_core_); + pw_core_ = nullptr; + } + + if (pw_context_) { + pw_context_destroy(pw_context_); + pw_context_ = nullptr; + } + + if (pw_main_loop_) { + pw_thread_loop_destroy(pw_main_loop_); + pw_main_loop_ = nullptr; + } +} + +void PipeWireSession::PipeWireSync() { + sync_seq_ = pw_core_sync(pw_core_, PW_ID_CORE, sync_seq_); +} + +// static +void PipeWireSession::OnCoreError(void* data, + uint32_t id, + int seq, + int res, + const char* message) { + RTC_LOG(LS_ERROR) << "PipeWire remote error: " << message; +} + +// static +void PipeWireSession::OnCoreDone(void* data, uint32_t id, int seq) { + PipeWireSession* that = static_cast(data); + + if (id == PW_ID_CORE) { + if (seq == that->sync_seq_) { + RTC_LOG(LS_VERBOSE) << "Enumerating PipeWire camera devices complete."; + that->Finish(VideoCaptureOptions::Status::SUCCESS); + } + } +} + +// static +void PipeWireSession::OnRegistryGlobal(void* data, + uint32_t id, + uint32_t permissions, + const char* type, + uint32_t version, + const spa_dict* props) { + PipeWireSession* that = static_cast(data); + + if (type != absl::string_view(PW_TYPE_INTERFACE_Node)) + return; + + if (!spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION)) + return; + + auto node_role = spa_dict_lookup(props, PW_KEY_MEDIA_ROLE); + if (!node_role || strcmp(node_role, "Camera")) + return; + + that->nodes_.emplace_back(that, id, props); + that->PipeWireSync(); +} + +// static +void PipeWireSession::OnRegistryGlobalRemove(void* data, uint32_t id) { + PipeWireSession* that = static_cast(data); + + for (auto it = that->nodes_.begin(); it != that->nodes().end(); ++it) { + if ((*it).id() == id) { + that->nodes_.erase(it); + break; + } + } +} + +void PipeWireSession::Finish(VideoCaptureOptions::Status status) { + webrtc::MutexLock lock(&callback_lock_); + + if (callback_) { + callback_->OnInitialized(status); + callback_ = nullptr; + } +} + +void PipeWireSession::Cleanup() { + webrtc::MutexLock lock(&callback_lock_); + callback_ = nullptr; + + StopPipeWire(); +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.h b/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.h new file mode 100644 index 0000000000..fdc06a6b2a --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.h @@ -0,0 +1,145 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_PIPEWIRE_SESSION_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_PIPEWIRE_SESSION_H_ + +#include +#include + +#include +#include +#include + +#include "api/ref_counted_base.h" +#include "api/scoped_refptr.h" +#include "modules/portal/pipewire_utils.h" +#include "modules/video_capture/linux/camera_portal.h" +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_options.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace videocapturemodule { + +class PipeWireSession; +class VideoCaptureModulePipeWire; + +// PipeWireNode objects are the local representation of PipeWire node objects. +// The portal API ensured that only camera nodes are visible to the client. +// So they all represent one camera that is available via PipeWire. +class PipeWireNode { + public: + PipeWireNode(PipeWireSession* session, uint32_t id, const spa_dict* props); + ~PipeWireNode(); + + uint32_t id() const { return id_; } + std::string display_name() const { return display_name_; } + std::string unique_id() const { return unique_id_; } + std::string model_id() const { return model_id_; } + std::vector capabilities() const { + return capabilities_; + } + + private: + static void OnNodeInfo(void* data, const pw_node_info* info); + static void OnNodeParam(void* data, + int seq, + uint32_t id, + uint32_t index, + uint32_t next, + const spa_pod* param); + static bool ParseFormat(const spa_pod* param, VideoCaptureCapability* cap); + + pw_proxy* proxy_; + spa_hook node_listener_; + PipeWireSession* session_; + uint32_t id_; + std::string display_name_; + std::string unique_id_; + std::string model_id_; + std::vector capabilities_; +}; + +class CameraPortalNotifier : public CameraPortal::PortalNotifier { + public: + CameraPortalNotifier(PipeWireSession* session); + ~CameraPortalNotifier() = default; + + void OnCameraRequestResult(xdg_portal::RequestResponse result, + int fd) override; + + private: + PipeWireSession* session_; +}; + +class PipeWireSession : public rtc::RefCountedNonVirtual { + public: + PipeWireSession(); + ~PipeWireSession(); + + void Init(VideoCaptureOptions::Callback* callback, + int fd = kInvalidPipeWireFd); + + const std::deque& nodes() const { return nodes_; } + + friend class CameraPortalNotifier; + friend class PipeWireNode; + friend class VideoCaptureModulePipeWire; + + private: + void InitPipeWire(int fd); + bool StartPipeWire(int fd); + void StopPipeWire(); + void PipeWireSync(); + + static void OnCoreError(void* data, + uint32_t id, + int seq, + int res, + const char* message); + static void OnCoreDone(void* data, uint32_t id, int seq); + + static void OnRegistryGlobal(void* data, + uint32_t id, + uint32_t permissions, + const char* type, + uint32_t version, + const spa_dict* props); + static void OnRegistryGlobalRemove(void* data, uint32_t id); + + void Finish(VideoCaptureOptions::Status status); + void Cleanup(); + + webrtc::Mutex callback_lock_; + VideoCaptureOptions::Callback* callback_ RTC_GUARDED_BY(&callback_lock_) = + nullptr; + + VideoCaptureOptions::Status status_; + + struct pw_thread_loop* pw_main_loop_ = nullptr; + struct pw_context* pw_context_ = nullptr; + struct pw_core* pw_core_ = nullptr; + struct spa_hook core_listener_; + + struct pw_registry* pw_registry_ = nullptr; + struct spa_hook registry_listener_; + + int sync_seq_ = 0; + + std::deque nodes_; + std::unique_ptr portal_; + std::unique_ptr portal_notifier_; +}; + +} // namespace videocapturemodule +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_LINUX_PIPEWIRE_SESSION_H_ diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc new file mode 100644 index 0000000000..b2c206d775 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +// v4l includes +#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD +#include +#elif defined(__sun) +#include +#else +#include +#endif + +#include +#include + +#include "api/scoped_refptr.h" +#include "media/base/video_common.h" +#if defined(WEBRTC_USE_PIPEWIRE) +#include "modules/video_capture/linux/video_capture_pipewire.h" +#endif +#include "modules/video_capture/linux/video_capture_v4l2.h" +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_options.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +rtc::scoped_refptr VideoCaptureImpl::Create( + const char* deviceUniqueId) { + auto implementation = rtc::make_ref_counted(); + + if (implementation->Init(deviceUniqueId) != 0) + return nullptr; + + return implementation; +} + +rtc::scoped_refptr VideoCaptureImpl::Create( + VideoCaptureOptions* options, + const char* deviceUniqueId) { +#if defined(WEBRTC_USE_PIPEWIRE) + if (options->allow_pipewire()) { + auto implementation = + rtc::make_ref_counted(options); + + if (implementation->Init(deviceUniqueId) == 0) + return implementation; + } +#endif + if (options->allow_v4l2()) { + auto implementation = rtc::make_ref_counted(); + + if (implementation->Init(deviceUniqueId) == 0) + return implementation; + } + return nullptr; +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc new file mode 100644 index 0000000000..8af483636a --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc @@ -0,0 +1,414 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/video_capture_pipewire.h" + +#include +#include +#include +#include + +#include + +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/portal/pipewire_utils.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { +namespace videocapturemodule { + +struct { + uint32_t spa_format; + VideoType video_type; +} constexpr kSupportedFormats[] = { + {SPA_VIDEO_FORMAT_I420, VideoType::kI420}, + {SPA_VIDEO_FORMAT_NV12, VideoType::kNV12}, + {SPA_VIDEO_FORMAT_YUY2, VideoType::kYUY2}, + {SPA_VIDEO_FORMAT_UYVY, VideoType::kUYVY}, + {SPA_VIDEO_FORMAT_RGB, VideoType::kRGB24}, +}; + +VideoType VideoCaptureModulePipeWire::PipeWireRawFormatToVideoType( + uint32_t spa_format) { + for (const auto& spa_and_pixel_format : kSupportedFormats) { + if (spa_and_pixel_format.spa_format == spa_format) + return spa_and_pixel_format.video_type; + } + RTC_LOG(LS_INFO) << "Unsupported pixel format: " << spa_format; + return VideoType::kUnknown; +} + +VideoCaptureModulePipeWire::VideoCaptureModulePipeWire( + VideoCaptureOptions* options) + : VideoCaptureImpl(), + session_(options->pipewire_session()), + initialized_(false), + started_(false) {} + +VideoCaptureModulePipeWire::~VideoCaptureModulePipeWire() { + RTC_DCHECK_RUN_ON(&api_checker_); + + StopCapture(); +} + +int32_t VideoCaptureModulePipeWire::Init(const char* deviceUniqueId) { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + RTC_DCHECK_RUN_ON(&api_checker_); + + absl::optional id; + id = rtc::StringToNumber(deviceUniqueId); + if (id == absl::nullopt) + return -1; + + node_id_ = id.value(); + + const int len = strlen(deviceUniqueId); + _deviceUniqueId = new (std::nothrow) char[len + 1]; + memcpy(_deviceUniqueId, deviceUniqueId, len + 1); + + return 0; +} + +static spa_pod* BuildFormat(spa_pod_builder* builder, + uint32_t format, + uint32_t width, + uint32_t height, + float frame_rate) { + spa_pod_frame frames[2]; + + spa_pod_builder_push_object(builder, &frames[0], SPA_TYPE_OBJECT_Format, + SPA_PARAM_EnumFormat); + spa_pod_builder_add(builder, SPA_FORMAT_mediaType, + SPA_POD_Id(SPA_MEDIA_TYPE_video), SPA_FORMAT_mediaSubtype, + SPA_POD_Id(format), 0); + + if (format == SPA_MEDIA_SUBTYPE_raw) { + spa_pod_builder_prop(builder, SPA_FORMAT_VIDEO_format, 0); + spa_pod_builder_push_choice(builder, &frames[1], SPA_CHOICE_Enum, 0); + spa_pod_builder_id(builder, kSupportedFormats[0].spa_format); + for (const auto& spa_and_pixel_format : kSupportedFormats) + spa_pod_builder_id(builder, spa_and_pixel_format.spa_format); + spa_pod_builder_pop(builder, &frames[1]); + } + + spa_rectangle preferred_size = spa_rectangle{width, height}; + spa_rectangle min_size = spa_rectangle{1, 1}; + spa_rectangle max_size = spa_rectangle{4096, 4096}; + spa_pod_builder_add( + builder, SPA_FORMAT_VIDEO_size, + SPA_POD_CHOICE_RANGE_Rectangle(&preferred_size, &min_size, &max_size), 0); + + spa_fraction preferred_frame_rate = + spa_fraction{static_cast(frame_rate), 1}; + spa_fraction min_frame_rate = spa_fraction{0, 1}; + spa_fraction max_frame_rate = spa_fraction{INT32_MAX, 1}; + spa_pod_builder_add( + builder, SPA_FORMAT_VIDEO_framerate, + SPA_POD_CHOICE_RANGE_Fraction(&preferred_frame_rate, &min_frame_rate, + &max_frame_rate), + 0); + + return static_cast(spa_pod_builder_pop(builder, &frames[0])); +} + +int32_t VideoCaptureModulePipeWire::StartCapture( + const VideoCaptureCapability& capability) { + RTC_DCHECK_RUN_ON(&api_checker_); + + if (initialized_) { + if (capability == _requestedCapability) { + return 0; + } else { + StopCapture(); + } + } + + uint8_t buffer[1024] = {}; + + // We don't want members above to be guarded by capture_checker_ as + // it's meant to be for members that are accessed on the API thread + // only when we are not capturing. The code above can be called many + // times while sharing instance of VideoCapturePipeWire between + // websites and therefore it would not follow the requirements of this + // checker. + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + PipeWireThreadLoopLock thread_loop_lock(session_->pw_main_loop_); + + RTC_LOG(LS_VERBOSE) << "Creating new PipeWire stream for node " << node_id_; + + pw_properties* reuse_props = + pw_properties_new_string("pipewire.client.reuse=1"); + stream_ = pw_stream_new(session_->pw_core_, "camera-stream", reuse_props); + + if (!stream_) { + RTC_LOG(LS_ERROR) << "Failed to create camera stream!"; + return -1; + } + + static const pw_stream_events stream_events{ + .version = PW_VERSION_STREAM_EVENTS, + .state_changed = &OnStreamStateChanged, + .param_changed = &OnStreamParamChanged, + .process = &OnStreamProcess, + }; + + pw_stream_add_listener(stream_, &stream_listener_, &stream_events, this); + + spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)}; + std::vector params; + uint32_t width = capability.width; + uint32_t height = capability.height; + uint32_t frame_rate = capability.maxFPS; + bool prefer_jpeg = (width > 640) || (height > 480); + + params.push_back( + BuildFormat(&builder, SPA_MEDIA_SUBTYPE_raw, width, height, frame_rate)); + params.insert( + prefer_jpeg ? params.begin() : params.end(), + BuildFormat(&builder, SPA_MEDIA_SUBTYPE_mjpg, width, height, frame_rate)); + + int res = pw_stream_connect( + stream_, PW_DIRECTION_INPUT, node_id_, + static_cast(PW_STREAM_FLAG_AUTOCONNECT | + PW_STREAM_FLAG_DONT_RECONNECT | + PW_STREAM_FLAG_MAP_BUFFERS), + params.data(), params.size()); + if (res != 0) { + RTC_LOG(LS_ERROR) << "Could not connect to camera stream: " + << spa_strerror(res); + return -1; + } + + _requestedCapability = capability; + initialized_ = true; + + return 0; +} + +int32_t VideoCaptureModulePipeWire::StopCapture() { + RTC_DCHECK_RUN_ON(&api_checker_); + + PipeWireThreadLoopLock thread_loop_lock(session_->pw_main_loop_); + // PipeWireSession is guarded by API checker so just make sure we do + // race detection when the PipeWire loop is locked/stopped to not run + // any callback at this point. + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + if (stream_) { + pw_stream_destroy(stream_); + stream_ = nullptr; + } + + _requestedCapability = VideoCaptureCapability(); + return 0; +} + +bool VideoCaptureModulePipeWire::CaptureStarted() { + RTC_DCHECK_RUN_ON(&api_checker_); + MutexLock lock(&api_lock_); + + return started_; +} + +int32_t VideoCaptureModulePipeWire::CaptureSettings( + VideoCaptureCapability& settings) { + RTC_DCHECK_RUN_ON(&api_checker_); + + settings = _requestedCapability; + + return 0; +} + +void VideoCaptureModulePipeWire::OnStreamParamChanged( + void* data, + uint32_t id, + const struct spa_pod* format) { + VideoCaptureModulePipeWire* that = + static_cast(data); + RTC_DCHECK(that); + RTC_CHECK_RUNS_SERIALIZED(&that->capture_checker_); + + if (format && id == SPA_PARAM_Format) + that->OnFormatChanged(format); +} + +void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + + uint32_t media_type, media_subtype; + + if (spa_format_parse(format, &media_type, &media_subtype) < 0) { + RTC_LOG(LS_ERROR) << "Failed to parse video format."; + return; + } + + switch (media_subtype) { + case SPA_MEDIA_SUBTYPE_raw: { + struct spa_video_info_raw f; + spa_format_video_raw_parse(format, &f); + configured_capability_.width = f.size.width; + configured_capability_.height = f.size.height; + configured_capability_.videoType = PipeWireRawFormatToVideoType(f.format); + configured_capability_.maxFPS = f.framerate.num / f.framerate.denom; + break; + } + case SPA_MEDIA_SUBTYPE_mjpg: { + struct spa_video_info_mjpg f; + spa_format_video_mjpg_parse(format, &f); + configured_capability_.width = f.size.width; + configured_capability_.height = f.size.height; + configured_capability_.videoType = VideoType::kMJPEG; + configured_capability_.maxFPS = f.framerate.num / f.framerate.denom; + break; + } + default: + configured_capability_.videoType = VideoType::kUnknown; + } + + if (configured_capability_.videoType == VideoType::kUnknown) { + RTC_LOG(LS_ERROR) << "Unsupported video format."; + return; + } + + RTC_LOG(LS_VERBOSE) << "Configured capture format = " + << static_cast(configured_capability_.videoType); + + uint8_t buffer[1024] = {}; + auto builder = spa_pod_builder{buffer, sizeof(buffer)}; + + // Setup buffers and meta header for new format. + std::vector params; + spa_pod_frame frame; + spa_pod_builder_push_object(&builder, &frame, SPA_TYPE_OBJECT_ParamBuffers, + SPA_PARAM_Buffers); + + if (media_subtype == SPA_MEDIA_SUBTYPE_raw) { + // Enforce stride without padding. + size_t stride; + switch (configured_capability_.videoType) { + case VideoType::kI420: + case VideoType::kNV12: + stride = configured_capability_.width; + break; + case VideoType::kYUY2: + case VideoType::kUYVY: + stride = configured_capability_.width * 2; + break; + case VideoType::kRGB24: + stride = configured_capability_.width * 3; + break; + default: + RTC_LOG(LS_ERROR) << "Unsupported video format."; + return; + } + spa_pod_builder_add(&builder, SPA_PARAM_BUFFERS_stride, SPA_POD_Int(stride), + 0); + } + + spa_pod_builder_add( + &builder, SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(8, 1, 32), + SPA_PARAM_BUFFERS_dataType, + SPA_POD_CHOICE_FLAGS_Int((1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr)), + 0); + params.push_back( + static_cast(spa_pod_builder_pop(&builder, &frame))); + + params.push_back(reinterpret_cast(spa_pod_builder_add_object( + &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, + SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size, + SPA_POD_Int(sizeof(struct spa_meta_header))))); + params.push_back(reinterpret_cast(spa_pod_builder_add_object( + &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, + SPA_POD_Id(SPA_META_VideoTransform), SPA_PARAM_META_size, + SPA_POD_Int(sizeof(struct spa_meta_videotransform))))); + pw_stream_update_params(stream_, params.data(), params.size()); +} + +void VideoCaptureModulePipeWire::OnStreamStateChanged( + void* data, + pw_stream_state old_state, + pw_stream_state state, + const char* error_message) { + VideoCaptureModulePipeWire* that = + static_cast(data); + RTC_DCHECK(that); + + MutexLock lock(&that->api_lock_); + switch (state) { + case PW_STREAM_STATE_STREAMING: + that->started_ = true; + break; + case PW_STREAM_STATE_ERROR: + RTC_LOG(LS_ERROR) << "PipeWire stream state error: " << error_message; + [[fallthrough]]; + case PW_STREAM_STATE_PAUSED: + case PW_STREAM_STATE_UNCONNECTED: + case PW_STREAM_STATE_CONNECTING: + that->started_ = false; + break; + } + RTC_LOG(LS_VERBOSE) << "PipeWire stream state change: " + << pw_stream_state_as_string(old_state) << " -> " + << pw_stream_state_as_string(state); +} + +void VideoCaptureModulePipeWire::OnStreamProcess(void* data) { + VideoCaptureModulePipeWire* that = + static_cast(data); + RTC_DCHECK(that); + RTC_CHECK_RUNS_SERIALIZED(&that->capture_checker_); + that->ProcessBuffers(); +} + +static VideoRotation VideorotationFromPipeWireTransform(uint32_t transform) { + switch (transform) { + case SPA_META_TRANSFORMATION_90: + return kVideoRotation_90; + case SPA_META_TRANSFORMATION_180: + return kVideoRotation_180; + case SPA_META_TRANSFORMATION_270: + return kVideoRotation_270; + default: + return kVideoRotation_0; + } +} + +void VideoCaptureModulePipeWire::ProcessBuffers() { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + + while (pw_buffer* buffer = pw_stream_dequeue_buffer(stream_)) { + struct spa_meta_header* h; + h = static_cast( + spa_buffer_find_meta_data(buffer->buffer, SPA_META_Header, sizeof(*h))); + + struct spa_meta_videotransform* videotransform; + videotransform = + static_cast(spa_buffer_find_meta_data( + buffer->buffer, SPA_META_VideoTransform, sizeof(*videotransform))); + if (videotransform) { + VideoRotation rotation = + VideorotationFromPipeWireTransform(videotransform->transform); + SetCaptureRotation(rotation); + SetApplyRotation(rotation != kVideoRotation_0); + } + + if (h->flags & SPA_META_HEADER_FLAG_CORRUPTED) { + RTC_LOG(LS_INFO) << "Dropping corruped frame."; + } else { + IncomingFrame(static_cast(buffer->buffer->datas[0].data), + buffer->buffer->datas[0].chunk->size, + configured_capability_); + } + pw_stream_queue_buffer(stream_, buffer); + } +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.h b/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.h new file mode 100644 index 0000000000..eeb3b9497c --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_PIPEWIRE_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_PIPEWIRE_H_ + +#include "modules/video_capture/linux/pipewire_session.h" +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" + +namespace webrtc { +namespace videocapturemodule { +class VideoCaptureModulePipeWire : public VideoCaptureImpl { + public: + explicit VideoCaptureModulePipeWire(VideoCaptureOptions* options); + ~VideoCaptureModulePipeWire() override; + int32_t Init(const char* deviceUniqueId); + int32_t StartCapture(const VideoCaptureCapability& capability) override; + int32_t StopCapture() override; + bool CaptureStarted() override; + int32_t CaptureSettings(VideoCaptureCapability& settings) override; + + static VideoType PipeWireRawFormatToVideoType(uint32_t format); + + private: + static void OnStreamParamChanged(void* data, + uint32_t id, + const struct spa_pod* format); + static void OnStreamStateChanged(void* data, + pw_stream_state old_state, + pw_stream_state state, + const char* error_message); + + static void OnStreamProcess(void* data); + + void OnFormatChanged(const struct spa_pod* format); + void ProcessBuffers(); + + const rtc::scoped_refptr session_ + RTC_GUARDED_BY(api_checker_); + bool initialized_ RTC_GUARDED_BY(api_checker_); + bool started_ RTC_GUARDED_BY(api_lock_); + int node_id_ RTC_GUARDED_BY(capture_checker_); + VideoCaptureCapability configured_capability_ + RTC_GUARDED_BY(capture_checker_); + + struct pw_stream* stream_ RTC_GUARDED_BY(capture_checker_) = nullptr; + struct spa_hook stream_listener_ RTC_GUARDED_BY(capture_checker_); +}; +} // namespace videocapturemodule +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_PIPEWIRE_H_ diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc new file mode 100644 index 0000000000..c887683dc8 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc @@ -0,0 +1,485 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/video_capture_v4l2.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +// v4l includes +#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD +#include +#elif defined(__sun) +#include +#else +#include +#endif + +#include +#include + +#include "api/scoped_refptr.h" +#include "media/base/video_common.h" +#include "modules/video_capture/video_capture.h" +#include "rtc_base/logging.h" + +// These defines are here to support building on kernel 3.16 which some +// downstream projects, e.g. Firefox, use. +// TODO(apehrson): Remove them and their undefs when no longer needed. +#ifndef V4L2_PIX_FMT_ABGR32 +#define ABGR32_OVERRIDE 1 +#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_ARGB32 +#define ARGB32_OVERRIDE 1 +#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_RGBA32 +#define RGBA32_OVERRIDE 1 +#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') +#endif + +namespace webrtc { +namespace videocapturemodule { +VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() + : VideoCaptureImpl(), + _deviceId(-1), + _deviceFd(-1), + _buffersAllocatedByDevice(-1), + _captureStarted(false), + _pool(NULL) {} + +int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) { + RTC_DCHECK_RUN_ON(&api_checker_); + + int len = strlen((const char*)deviceUniqueIdUTF8); + _deviceUniqueId = new (std::nothrow) char[len + 1]; + if (_deviceUniqueId) { + memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); + } + + int fd; + char device[32]; + bool found = false; + + /* detect /dev/video [0-63] entries */ + int n; + for (n = 0; n < 64; n++) { + snprintf(device, sizeof(device), "/dev/video%d", n); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities + struct v4l2_capability cap; + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { + if (cap.bus_info[0] != 0) { + if (strncmp((const char*)cap.bus_info, + (const char*)deviceUniqueIdUTF8, + strlen((const char*)deviceUniqueIdUTF8)) == + 0) { // match with device id + close(fd); + found = true; + break; // fd matches with device unique id supplied + } + } + } + close(fd); // close since this is not the matching device + } + } + if (!found) { + RTC_LOG(LS_INFO) << "no matching device found"; + return -1; + } + _deviceId = n; // store the device id + return 0; +} + +VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { + RTC_DCHECK_RUN_ON(&api_checker_); + + StopCapture(); + if (_deviceFd != -1) + close(_deviceFd); +} + +int32_t VideoCaptureModuleV4L2::StartCapture( + const VideoCaptureCapability& capability) { + RTC_DCHECK_RUN_ON(&api_checker_); + + if (_captureStarted) { + if (capability == _requestedCapability) { + return 0; + } else { + StopCapture(); + } + } + + // Set a baseline of configured parameters. It is updated here during + // configuration, then read from the capture thread. + configured_capability_ = capability; + + MutexLock lock(&capture_lock_); + // first open /dev/video device + char device[20]; + snprintf(device, sizeof(device), "/dev/video%d", _deviceId); + + if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) { + RTC_LOG(LS_INFO) << "error in opening " << device << " errono = " << errno; + return -1; + } + + // Supported video formats in preferred order. + // If the requested resolution is larger than VGA, we prefer MJPEG. Go for + // I420 otherwise. + unsigned int hdFmts[] = { + V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YVU420, + V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY, V4L2_PIX_FMT_NV12, + V4L2_PIX_FMT_ABGR32, V4L2_PIX_FMT_ARGB32, V4L2_PIX_FMT_RGBA32, + V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_BGR24, + V4L2_PIX_FMT_RGB24, V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_JPEG, + }; + unsigned int sdFmts[] = { + V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YVU420, V4L2_PIX_FMT_YUYV, + V4L2_PIX_FMT_UYVY, V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_ABGR32, + V4L2_PIX_FMT_ARGB32, V4L2_PIX_FMT_RGBA32, V4L2_PIX_FMT_BGR32, + V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_BGR24, V4L2_PIX_FMT_RGB24, + V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_JPEG, + }; + const bool isHd = capability.width > 640 || capability.height > 480; + unsigned int* fmts = isHd ? hdFmts : sdFmts; + static_assert(sizeof(hdFmts) == sizeof(sdFmts)); + constexpr int nFormats = sizeof(hdFmts) / sizeof(unsigned int); + + // Enumerate image formats. + struct v4l2_fmtdesc fmt; + int fmtsIdx = nFormats; + memset(&fmt, 0, sizeof(fmt)); + fmt.index = 0; + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:"; + while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) { + RTC_LOG(LS_INFO) << " { pixelformat = " + << cricket::GetFourccName(fmt.pixelformat) + << ", description = '" << fmt.description << "' }"; + // Match the preferred order. + for (int i = 0; i < nFormats; i++) { + if (fmt.pixelformat == fmts[i] && i < fmtsIdx) + fmtsIdx = i; + } + // Keep enumerating. + fmt.index++; + } + + if (fmtsIdx == nFormats) { + RTC_LOG(LS_INFO) << "no supporting video formats found"; + return -1; + } else { + RTC_LOG(LS_INFO) << "We prefer format " + << cricket::GetFourccName(fmts[fmtsIdx]); + } + + struct v4l2_format video_fmt; + memset(&video_fmt, 0, sizeof(struct v4l2_format)); + video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + video_fmt.fmt.pix.sizeimage = 0; + video_fmt.fmt.pix.width = capability.width; + video_fmt.fmt.pix.height = capability.height; + video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; + + if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) + configured_capability_.videoType = VideoType::kYUY2; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) + configured_capability_.videoType = VideoType::kI420; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) + configured_capability_.videoType = VideoType::kYV12; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) + configured_capability_.videoType = VideoType::kUYVY; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_NV12) + configured_capability_.videoType = VideoType::kNV12; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_BGR24) + configured_capability_.videoType = VideoType::kRGB24; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) + configured_capability_.videoType = VideoType::kBGR24; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB565) + configured_capability_.videoType = VideoType::kRGB565; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_ABGR32 || + video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_BGR32) + configured_capability_.videoType = VideoType::kARGB; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_ARGB32 || + video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB32) + configured_capability_.videoType = VideoType::kBGRA; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGBA32) + configured_capability_.videoType = VideoType::kABGR; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || + video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) + configured_capability_.videoType = VideoType::kMJPEG; + else + RTC_DCHECK_NOTREACHED(); + + // set format and frame size now + if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) { + RTC_LOG(LS_INFO) << "error in VIDIOC_S_FMT, errno = " << errno; + return -1; + } + + // initialize current width and height + configured_capability_.width = video_fmt.fmt.pix.width; + configured_capability_.height = video_fmt.fmt.pix.height; + + // Trying to set frame rate, before check driver capability. + bool driver_framerate_support = true; + struct v4l2_streamparm streamparms; + memset(&streamparms, 0, sizeof(streamparms)); + streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) { + RTC_LOG(LS_INFO) << "error in VIDIOC_G_PARM errno = " << errno; + driver_framerate_support = false; + // continue + } else { + // check the capability flag is set to V4L2_CAP_TIMEPERFRAME. + if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { + // driver supports the feature. Set required framerate. + memset(&streamparms, 0, sizeof(streamparms)); + streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + streamparms.parm.capture.timeperframe.numerator = 1; + streamparms.parm.capture.timeperframe.denominator = capability.maxFPS; + if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) { + RTC_LOG(LS_INFO) << "Failed to set the framerate. errno=" << errno; + driver_framerate_support = false; + } + } + } + // If driver doesn't support framerate control, need to hardcode. + // Hardcoding the value based on the frame size. + if (!driver_framerate_support) { + if (configured_capability_.width >= 800 && + configured_capability_.videoType != VideoType::kMJPEG) { + configured_capability_.maxFPS = 15; + } else { + configured_capability_.maxFPS = 30; + } + } + + if (!AllocateVideoBuffers()) { + RTC_LOG(LS_INFO) << "failed to allocate video capture buffers"; + return -1; + } + + // Needed to start UVC camera - from the uvcview application + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) { + RTC_LOG(LS_INFO) << "Failed to turn on stream"; + return -1; + } + + _requestedCapability = capability; + _captureStarted = true; + + // start capture thread; + if (_captureThread.empty()) { + quit_ = false; + _captureThread = rtc::PlatformThread::SpawnJoinable( + [self = rtc::scoped_refptr(this)] { + while (self->CaptureProcess()) { + } + }, + "CaptureThread", + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh)); + } + return 0; +} + +int32_t VideoCaptureModuleV4L2::StopCapture() { + RTC_DCHECK_RUN_ON(&api_checker_); + + if (!_captureThread.empty()) { + { + MutexLock lock(&capture_lock_); + quit_ = true; + } + // Make sure the capture thread stops using the mutex. + _captureThread.Finalize(); + } + + MutexLock lock(&capture_lock_); + if (_captureStarted) { + _captureStarted = false; + + DeAllocateVideoBuffers(); + close(_deviceFd); + _deviceFd = -1; + + _requestedCapability = configured_capability_ = VideoCaptureCapability(); + } + + return 0; +} + +// critical section protected by the caller + +bool VideoCaptureModuleV4L2::AllocateVideoBuffers() { + struct v4l2_requestbuffers rbuffer; + memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); + + rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + rbuffer.memory = V4L2_MEMORY_MMAP; + rbuffer.count = kNoOfV4L2Bufffers; + + if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) { + RTC_LOG(LS_INFO) << "Could not get buffers from device. errno = " << errno; + return false; + } + + if (rbuffer.count > kNoOfV4L2Bufffers) + rbuffer.count = kNoOfV4L2Bufffers; + + _buffersAllocatedByDevice = rbuffer.count; + + // Map the buffers + _pool = new Buffer[rbuffer.count]; + + for (unsigned int i = 0; i < rbuffer.count; i++) { + struct v4l2_buffer buffer; + memset(&buffer, 0, sizeof(v4l2_buffer)); + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + buffer.index = i; + + if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) { + return false; + } + + _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, + MAP_SHARED, _deviceFd, buffer.m.offset); + + if (MAP_FAILED == _pool[i].start) { + for (unsigned int j = 0; j < i; j++) + munmap(_pool[j].start, _pool[j].length); + return false; + } + + _pool[i].length = buffer.length; + + if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) { + return false; + } + } + return true; +} + +bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { + // unmap buffers + for (int i = 0; i < _buffersAllocatedByDevice; i++) + munmap(_pool[i].start, _pool[i].length); + + delete[] _pool; + + // turn off stream + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) { + RTC_LOG(LS_INFO) << "VIDIOC_STREAMOFF error. errno: " << errno; + } + + return true; +} + +bool VideoCaptureModuleV4L2::CaptureStarted() { + return _captureStarted; +} + +bool VideoCaptureModuleV4L2::CaptureProcess() { + + int retVal = 0; + struct pollfd rSet; + + rSet.fd = _deviceFd; + rSet.events = POLLIN; + rSet.revents = 0; + + retVal = poll(&rSet, 1, 1000); + + { + MutexLock lock(&capture_lock_); + + if (quit_) { + return false; + } + + if (retVal < 0 && errno != EINTR) { // continue if interrupted + // poll failed + return false; + } else if (retVal == 0) { + // poll timed out + return true; + } else if (!(rSet.revents & POLLIN)) { + // not event on camera handle + return true; + } + + if (_captureStarted) { + struct v4l2_buffer buf; + memset(&buf, 0, sizeof(struct v4l2_buffer)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + // dequeue a buffer - repeat until dequeued properly! + while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) { + if (errno != EINTR) { + RTC_LOG(LS_INFO) << "could not sync on a buffer on device " + << strerror(errno); + return true; + } + } + + // convert to to I420 if needed + IncomingFrame(reinterpret_cast(_pool[buf.index].start), + buf.bytesused, configured_capability_); + // enqueue the buffer again + if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) { + RTC_LOG(LS_INFO) << "Failed to enqueue capture buffer"; + } + } + } + usleep(0); + return true; +} + +int32_t VideoCaptureModuleV4L2::CaptureSettings( + VideoCaptureCapability& settings) { + RTC_DCHECK_RUN_ON(&api_checker_); + settings = _requestedCapability; + + return 0; +} +} // namespace videocapturemodule +} // namespace webrtc + +#ifdef ABGR32_OVERRIDE +#undef ABGR32_OVERRIDE +#undef V4L2_PIX_FMT_ABGR32 +#endif + +#ifdef ARGB32_OVERRIDE +#undef ARGB32_OVERRIDE +#undef V4L2_PIX_FMT_ARGB32 +#endif + +#ifdef RGBA32_OVERRIDE +#undef RGBA32_OVERRIDE +#undef V4L2_PIX_FMT_RGBA32 +#endif diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h new file mode 100644 index 0000000000..61358d0325 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ + +#include +#include + +#include + +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace videocapturemodule { +class VideoCaptureModuleV4L2 : public VideoCaptureImpl { + public: + VideoCaptureModuleV4L2(); + ~VideoCaptureModuleV4L2() override; + int32_t Init(const char* deviceUniqueId); + int32_t StartCapture(const VideoCaptureCapability& capability) override; + int32_t StopCapture() override; + bool CaptureStarted() override; + int32_t CaptureSettings(VideoCaptureCapability& settings) override; + + private: + enum { kNoOfV4L2Bufffers = 4 }; + + static void CaptureThread(void*); + bool CaptureProcess(); + bool AllocateVideoBuffers() RTC_EXCLUSIVE_LOCKS_REQUIRED(capture_lock_); + bool DeAllocateVideoBuffers() RTC_EXCLUSIVE_LOCKS_REQUIRED(capture_lock_); + + rtc::PlatformThread _captureThread RTC_GUARDED_BY(api_checker_); + Mutex capture_lock_ RTC_ACQUIRED_BEFORE(api_lock_); + bool quit_ RTC_GUARDED_BY(capture_lock_); + int32_t _deviceId RTC_GUARDED_BY(api_checker_); + int32_t _deviceFd; + + int32_t _buffersAllocatedByDevice RTC_GUARDED_BY(capture_lock_); + VideoCaptureCapability configured_capability_; + bool _captureStarted; + struct Buffer { + void* start; + size_t length; + }; + Buffer* _pool RTC_GUARDED_BY(capture_lock_); +}; +} // namespace videocapturemodule +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ diff --git a/third_party/libwebrtc/modules/video_capture/raw_video_sink_interface.h b/third_party/libwebrtc/modules/video_capture/raw_video_sink_interface.h new file mode 100644 index 0000000000..094e9e20bd --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/raw_video_sink_interface.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces used for creating the VideoCaptureModule +// and DeviceInfo. + +#ifndef MODULES_VIDEO_CAPTURE_RAW_VIDEO_SINK_INTERFACE_H_ +#define MODULES_VIDEO_CAPTURE_RAW_VIDEO_SINK_INTERFACE_H_ + +#include "modules/video_capture/video_capture_defines.h" + +namespace webrtc { + +class RawVideoSinkInterface { + public: + virtual ~RawVideoSinkInterface() = default; + + virtual int32_t OnRawFrame(uint8_t* videoFrame, + size_t videoFrameLength, + const webrtc::VideoCaptureCapability& frameInfo, + VideoRotation rotation, + int64_t captureTime) = 0; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_RAW_VIDEO_SINK_INTERFACE_H_ diff --git a/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc b/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc new file mode 100644 index 0000000000..c8af222b57 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc @@ -0,0 +1,376 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/video_capture.h" + +#include + +#include +#include +#include + +#include "absl/memory/memory.h" +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_capture/video_capture_factory.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/sleep.h" +#include "test/frame_utils.h" +#include "test/gtest.h" + +using webrtc::SleepMs; +using webrtc::VideoCaptureCapability; +using webrtc::VideoCaptureFactory; +using webrtc::VideoCaptureModule; + +#define WAIT_(ex, timeout, res) \ + do { \ + res = (ex); \ + int64_t start = rtc::TimeMillis(); \ + while (!res && rtc::TimeMillis() < start + timeout) { \ + SleepMs(5); \ + res = (ex); \ + } \ + } while (0) + +#define EXPECT_TRUE_WAIT(ex, timeout) \ + do { \ + bool res; \ + WAIT_(ex, timeout, res); \ + if (!res) \ + EXPECT_TRUE(ex); \ + } while (0) + +static const int kTimeOut = 5000; +#ifdef WEBRTC_MAC +static const int kTestHeight = 288; +static const int kTestWidth = 352; +static const int kTestFramerate = 30; +#endif + +class TestVideoCaptureCallback + : public rtc::VideoSinkInterface { + public: + TestVideoCaptureCallback() + : last_render_time_ms_(0), + incoming_frames_(0), + timing_warnings_(0), + rotate_frame_(webrtc::kVideoRotation_0) {} + + ~TestVideoCaptureCallback() override { + if (timing_warnings_ > 0) + printf("No of timing warnings %d\n", timing_warnings_); + } + + void OnFrame(const webrtc::VideoFrame& videoFrame) override { + webrtc::MutexLock lock(&capture_lock_); + int height = videoFrame.height(); + int width = videoFrame.width(); +#if defined(WEBRTC_ANDROID) && WEBRTC_ANDROID + // Android camera frames may be rotated depending on test device + // orientation. + EXPECT_TRUE(height == capability_.height || height == capability_.width); + EXPECT_TRUE(width == capability_.width || width == capability_.height); +#else + EXPECT_EQ(height, capability_.height); + EXPECT_EQ(width, capability_.width); + EXPECT_EQ(rotate_frame_, videoFrame.rotation()); +#endif + // RenderTimstamp should be the time now. + EXPECT_TRUE(videoFrame.render_time_ms() >= rtc::TimeMillis() - 30 && + videoFrame.render_time_ms() <= rtc::TimeMillis()); + + if ((videoFrame.render_time_ms() > + last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS && + last_render_time_ms_ > 0) || + (videoFrame.render_time_ms() < + last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS && + last_render_time_ms_ > 0)) { + timing_warnings_++; + } + + incoming_frames_++; + last_render_time_ms_ = videoFrame.render_time_ms(); + last_frame_ = videoFrame.video_frame_buffer(); + } + + void SetExpectedCapability(VideoCaptureCapability capability) { + webrtc::MutexLock lock(&capture_lock_); + capability_ = capability; + incoming_frames_ = 0; + last_render_time_ms_ = 0; + } + int incoming_frames() { + webrtc::MutexLock lock(&capture_lock_); + return incoming_frames_; + } + + int timing_warnings() { + webrtc::MutexLock lock(&capture_lock_); + return timing_warnings_; + } + VideoCaptureCapability capability() { + webrtc::MutexLock lock(&capture_lock_); + return capability_; + } + + bool CompareLastFrame(const webrtc::VideoFrame& frame) { + webrtc::MutexLock lock(&capture_lock_); + return webrtc::test::FrameBufsEqual(last_frame_, + frame.video_frame_buffer()); + } + + void SetExpectedCaptureRotation(webrtc::VideoRotation rotation) { + webrtc::MutexLock lock(&capture_lock_); + rotate_frame_ = rotation; + } + + private: + webrtc::Mutex capture_lock_; + VideoCaptureCapability capability_; + int64_t last_render_time_ms_; + int incoming_frames_; + int timing_warnings_; + rtc::scoped_refptr last_frame_; + webrtc::VideoRotation rotate_frame_; +}; + +class VideoCaptureTest : public ::testing::Test { + public: + VideoCaptureTest() : number_of_devices_(0) {} + + void SetUp() override { + device_info_.reset(VideoCaptureFactory::CreateDeviceInfo()); + RTC_DCHECK(device_info_.get()); + number_of_devices_ = device_info_->NumberOfDevices(); + ASSERT_GT(number_of_devices_, 0u); + } + + rtc::scoped_refptr OpenVideoCaptureDevice( + unsigned int device, + rtc::VideoSinkInterface* callback) { + char device_name[256]; + char unique_name[256]; + + EXPECT_EQ(0, device_info_->GetDeviceName(device, device_name, 256, + unique_name, 256)); + + rtc::scoped_refptr module( + VideoCaptureFactory::Create(unique_name)); + if (module.get() == NULL) + return nullptr; + + EXPECT_FALSE(module->CaptureStarted()); + + module->RegisterCaptureDataCallback(callback); + return module; + } + + void StartCapture(VideoCaptureModule* capture_module, + VideoCaptureCapability capability) { + ASSERT_EQ(0, capture_module->StartCapture(capability)); + EXPECT_TRUE(capture_module->CaptureStarted()); + + VideoCaptureCapability resulting_capability; + EXPECT_EQ(0, capture_module->CaptureSettings(resulting_capability)); + EXPECT_EQ(capability.width, resulting_capability.width); + EXPECT_EQ(capability.height, resulting_capability.height); + } + + std::unique_ptr device_info_; + unsigned int number_of_devices_; +}; + +#ifdef WEBRTC_MAC +// Currently fails on Mac 64-bit, see +// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406 +#define MAYBE_CreateDelete DISABLED_CreateDelete +#else +#define MAYBE_CreateDelete CreateDelete +#endif +TEST_F(VideoCaptureTest, MAYBE_CreateDelete) { + for (int i = 0; i < 5; ++i) { + int64_t start_time = rtc::TimeMillis(); + TestVideoCaptureCallback capture_observer; + rtc::scoped_refptr module( + OpenVideoCaptureDevice(0, &capture_observer)); + ASSERT_TRUE(module.get() != NULL); + + VideoCaptureCapability capability; +#ifndef WEBRTC_MAC + device_info_->GetCapability(module->CurrentDeviceName(), 0, capability); +#else + capability.width = kTestWidth; + capability.height = kTestHeight; + capability.maxFPS = kTestFramerate; + capability.videoType = webrtc::VideoType::kUnknown; +#endif + capture_observer.SetExpectedCapability(capability); + ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability)); + + // Less than 4s to start the camera. + EXPECT_LE(rtc::TimeMillis() - start_time, 4000); + + // Make sure 5 frames are captured. + EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut); + + int64_t stop_time = rtc::TimeMillis(); + EXPECT_EQ(0, module->StopCapture()); + EXPECT_FALSE(module->CaptureStarted()); + + // Less than 3s to stop the camera. + EXPECT_LE(rtc::TimeMillis() - stop_time, 3000); + } +} + +#ifdef WEBRTC_MAC +// Currently fails on Mac 64-bit, see +// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406 +#define MAYBE_Capabilities DISABLED_Capabilities +#else +#define MAYBE_Capabilities Capabilities +#endif +TEST_F(VideoCaptureTest, MAYBE_Capabilities) { + TestVideoCaptureCallback capture_observer; + + rtc::scoped_refptr module( + OpenVideoCaptureDevice(0, &capture_observer)); + ASSERT_TRUE(module.get() != NULL); + + int number_of_capabilities = + device_info_->NumberOfCapabilities(module->CurrentDeviceName()); + EXPECT_GT(number_of_capabilities, 0); + // Key is x, value is vector of maxFPS values at that + // resolution. + typedef std::map > FrameRatesByResolution; + FrameRatesByResolution frame_rates_by_resolution; + for (int i = 0; i < number_of_capabilities; ++i) { + VideoCaptureCapability capability; + EXPECT_EQ(0, device_info_->GetCapability(module->CurrentDeviceName(), i, + capability)); + std::ostringstream resolutionStream; + resolutionStream << capability.width << "x" << capability.height; + resolutionStream.flush(); + std::string resolution = resolutionStream.str(); + frame_rates_by_resolution[resolution].push_back(capability.maxFPS); + + // Since Android presents so many resolution/FPS combinations and the test + // runner imposes a timeout, we only actually start the capture and test + // that a frame was captured for 2 frame-rates at each resolution. + if (frame_rates_by_resolution[resolution].size() > 2) + continue; + + capture_observer.SetExpectedCapability(capability); + ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability)); + // Make sure at least one frame is captured. + EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 1, kTimeOut); + + EXPECT_EQ(0, module->StopCapture()); + } + +#if defined(WEBRTC_ANDROID) && WEBRTC_ANDROID + // There's no reason for this to _necessarily_ be true, but in practice all + // Android devices this test runs on in fact do support multiple capture + // resolutions and multiple frame-rates per captured resolution, so we assert + // this fact here as a regression-test against the time that we only noticed a + // single frame-rate per resolution (bug 2974). If this test starts being run + // on devices for which this is untrue (e.g. Nexus4) then the following should + // probably be wrapped in a base::android::BuildInfo::model()/device() check. + EXPECT_GT(frame_rates_by_resolution.size(), 1U); + for (FrameRatesByResolution::const_iterator it = + frame_rates_by_resolution.begin(); + it != frame_rates_by_resolution.end(); ++it) { + EXPECT_GT(it->second.size(), 1U) << it->first; + } +#endif // WEBRTC_ANDROID +} + +// NOTE: flaky, crashes sometimes. +// http://code.google.com/p/webrtc/issues/detail?id=777 +TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) { + if (number_of_devices_ < 2) { + printf("There are not two cameras available. Aborting test. \n"); + return; + } + + TestVideoCaptureCallback capture_observer1; + rtc::scoped_refptr module1( + OpenVideoCaptureDevice(0, &capture_observer1)); + ASSERT_TRUE(module1.get() != NULL); + VideoCaptureCapability capability1; +#ifndef WEBRTC_MAC + device_info_->GetCapability(module1->CurrentDeviceName(), 0, capability1); +#else + capability1.width = kTestWidth; + capability1.height = kTestHeight; + capability1.maxFPS = kTestFramerate; + capability1.videoType = webrtc::VideoType::kUnknown; +#endif + capture_observer1.SetExpectedCapability(capability1); + + TestVideoCaptureCallback capture_observer2; + rtc::scoped_refptr module2( + OpenVideoCaptureDevice(1, &capture_observer2)); + ASSERT_TRUE(module1.get() != NULL); + + VideoCaptureCapability capability2; +#ifndef WEBRTC_MAC + device_info_->GetCapability(module2->CurrentDeviceName(), 0, capability2); +#else + capability2.width = kTestWidth; + capability2.height = kTestHeight; + capability2.maxFPS = kTestFramerate; + capability2.videoType = webrtc::VideoType::kUnknown; +#endif + capture_observer2.SetExpectedCapability(capability2); + + ASSERT_NO_FATAL_FAILURE(StartCapture(module1.get(), capability1)); + ASSERT_NO_FATAL_FAILURE(StartCapture(module2.get(), capability2)); + EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut); + EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut); + EXPECT_EQ(0, module2->StopCapture()); + EXPECT_EQ(0, module1->StopCapture()); +} + +#ifdef WEBRTC_MAC +// No VideoCaptureImpl on Mac. +#define MAYBE_ConcurrentAccess DISABLED_ConcurrentAccess +#else +#define MAYBE_ConcurrentAccess ConcurrentAccess +#endif +TEST_F(VideoCaptureTest, MAYBE_ConcurrentAccess) { + TestVideoCaptureCallback capture_observer1; + rtc::scoped_refptr module1( + OpenVideoCaptureDevice(0, &capture_observer1)); + ASSERT_TRUE(module1.get() != NULL); + VideoCaptureCapability capability; + device_info_->GetCapability(module1->CurrentDeviceName(), 0, capability); + capture_observer1.SetExpectedCapability(capability); + + TestVideoCaptureCallback capture_observer2; + rtc::scoped_refptr module2( + OpenVideoCaptureDevice(0, &capture_observer2)); + ASSERT_TRUE(module2.get() != NULL); + capture_observer2.SetExpectedCapability(capability); + + // Starting module1 should work. + ASSERT_NO_FATAL_FAILURE(StartCapture(module1.get(), capability)); + EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut); + + // When module1 is stopped, starting module2 for the same device should work. + EXPECT_EQ(0, module1->StopCapture()); + ASSERT_NO_FATAL_FAILURE(StartCapture(module2.get(), capability)); + EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut); + + EXPECT_EQ(0, module2->StopCapture()); +} diff --git a/third_party/libwebrtc/modules/video_capture/video_capture.h b/third_party/libwebrtc/modules/video_capture/video_capture.h new file mode 100644 index 0000000000..378a53b4d2 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture.h @@ -0,0 +1,166 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_ +#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_ + +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" +#include "modules/desktop_capture/desktop_capture_types.h" +#include "modules/video_capture/raw_video_sink_interface.h" +#include "modules/video_capture/video_capture_defines.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include + +#if defined(ANDROID) +#include +#endif + +namespace webrtc { + +class VideoInputFeedBack +{ +public: + virtual void OnDeviceChange() = 0; +protected: + virtual ~VideoInputFeedBack(){} +}; + +class VideoCaptureModule : public rtc::RefCountInterface { + public: + // Interface for receiving information about available camera devices. + class DeviceInfo { + public: + virtual uint32_t NumberOfDevices() = 0; + virtual int32_t Refresh() = 0; + virtual void DeviceChange() { + MutexLock lock(&_inputCallbacksMutex); + for (auto inputCallBack : _inputCallBacks) { + inputCallBack->OnDeviceChange(); + } + } + virtual void RegisterVideoInputFeedBack(VideoInputFeedBack* callBack) { + MutexLock lock(&_inputCallbacksMutex); + _inputCallBacks.insert(callBack); + } + + virtual void DeRegisterVideoInputFeedBack(VideoInputFeedBack* callBack) { + MutexLock lock(&_inputCallbacksMutex); + auto it = _inputCallBacks.find(callBack); + if (it != _inputCallBacks.end()) { + _inputCallBacks.erase(it); + } + } + + // Returns the available capture devices. + // deviceNumber - Index of capture device. + // deviceNameUTF8 - Friendly name of the capture device. + // deviceUniqueIdUTF8 - Unique name of the capture device if it exist. + // Otherwise same as deviceNameUTF8. + // productUniqueIdUTF8 - Unique product id if it exist. + // Null terminated otherwise. + virtual int32_t GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = 0, + uint32_t productUniqueIdUTF8Length = 0, + pid_t* pid = 0, + bool* deviceIsPlaceholder = 0) = 0; + + // Returns the number of capabilities this device. + virtual int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8) = 0; + + // Gets the capabilities of the named device. + virtual int32_t GetCapability(const char* deviceUniqueIdUTF8, + uint32_t deviceCapabilityNumber, + VideoCaptureCapability& capability) = 0; + + // Gets clockwise angle the captured frames should be rotated in order + // to be displayed correctly on a normally rotated display. + virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8, + VideoRotation& orientation) = 0; + + // Gets the capability that best matches the requested width, height and + // frame rate. + // Returns the deviceCapabilityNumber on success. + virtual int32_t GetBestMatchedCapability( + const char* deviceUniqueIdUTF8, + const VideoCaptureCapability& requested, + VideoCaptureCapability& resulting) = 0; + + // Display OS /capture device specific settings dialog + virtual int32_t DisplayCaptureSettingsDialogBox( + const char* deviceUniqueIdUTF8, + const char* dialogTitleUTF8, + void* parentWindow, + uint32_t positionX, + uint32_t positionY) = 0; + + virtual ~DeviceInfo() {} + private: + Mutex _inputCallbacksMutex; + std::set _inputCallBacks RTC_GUARDED_BY(_inputCallbacksMutex); + }; + + // Register capture data callback + virtual void RegisterCaptureDataCallback( + rtc::VideoSinkInterface* dataCallback) = 0; + virtual void RegisterCaptureDataCallback( + RawVideoSinkInterface* dataCallback) = 0; + + // Remove capture data callback + virtual void DeRegisterCaptureDataCallback( + rtc::VideoSinkInterface *dataCallback) = 0; + + // Start capture device + virtual int32_t StartCapture(const VideoCaptureCapability& capability) = 0; + + virtual int32_t StopCaptureIfAllClientsClose() = 0; + + virtual bool FocusOnSelectedSource() { return false; } + + virtual int32_t StopCapture() = 0; + + // Returns the name of the device used by this module. + virtual const char* CurrentDeviceName() const = 0; + + // Returns true if the capture device is running + virtual bool CaptureStarted() = 0; + + // Gets the current configuration. + virtual int32_t CaptureSettings(VideoCaptureCapability& settings) = 0; + + // Set the rotation of the captured frames. + // If the rotation is set to the same as returned by + // DeviceInfo::GetOrientation the captured frames are + // displayed correctly if rendered. + virtual int32_t SetCaptureRotation(VideoRotation rotation) = 0; + + // Tells the capture module whether to apply the pending rotation. By default, + // the rotation is applied and the generated frame is up right. When set to + // false, generated frames will carry the rotation information from + // SetCaptureRotation. Return value indicates whether this operation succeeds. + virtual bool SetApplyRotation(bool enable) = 0; + + // Return whether the rotation is applied or left pending. + virtual bool GetApplyRotation() = 0; + + // Mozilla: TrackingId setter for use in profiler markers. + virtual void SetTrackingId(uint32_t aTrackingIdProcId) {} + + protected: + ~VideoCaptureModule() override {} +}; + +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_ diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_config.h b/third_party/libwebrtc/modules/video_capture/video_capture_config.h new file mode 100644 index 0000000000..f285b9eeb1 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_config.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_ +#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_ + +namespace webrtc { +namespace videocapturemodule { +enum { kDefaultWidth = 640 }; // Start width +enum { kDefaultHeight = 480 }; // Start heigt +enum { kDefaultFrameRate = 30 }; // Start frame rate + +enum { kMaxFrameRate = 60 }; // Max allowed frame rate of the start image + +enum { kDefaultCaptureDelay = 120 }; +enum { + kMaxCaptureDelay = 270 +}; // Max capture delay allowed in the precompiled capture delay values. + +enum { kFrameRateCallbackInterval = 1000 }; +enum { kFrameRateCountHistorySize = 90 }; +enum { kFrameRateHistoryWindowMs = 2000 }; +} // namespace videocapturemodule +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_ diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_defines.h b/third_party/libwebrtc/modules/video_capture/video_capture_defines.h new file mode 100644 index 0000000000..63534600a9 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_defines.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_ +#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_ + +#include "api/video/video_frame.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" + +namespace webrtc { + +enum { + kVideoCaptureUniqueNameLength = 1024 +}; // Max unique capture device name lenght +enum { kVideoCaptureDeviceNameLength = 256 }; // Max capture device name lenght +enum { kVideoCaptureProductIdLength = 128 }; // Max product id length + +struct VideoCaptureCapability { + int32_t width; + int32_t height; + int32_t maxFPS; + VideoType videoType; + bool interlaced; + + VideoCaptureCapability() { + width = 0; + height = 0; + maxFPS = 0; + videoType = VideoType::kUnknown; + interlaced = false; + } + bool operator!=(const VideoCaptureCapability& other) const { + if (width != other.width) + return true; + if (height != other.height) + return true; + if (maxFPS != other.maxFPS) + return true; + if (videoType != other.videoType) + return true; + if (interlaced != other.interlaced) + return true; + return false; + } + bool operator==(const VideoCaptureCapability& other) const { + return !operator!=(other); + } +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_ diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_factory.cc b/third_party/libwebrtc/modules/video_capture/video_capture_factory.cc new file mode 100644 index 0000000000..2790fbbe1c --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_factory.cc @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/video_capture_factory.h" + +#include "modules/video_capture/video_capture_impl.h" + +namespace webrtc { + +rtc::scoped_refptr VideoCaptureFactory::Create( + const char* deviceUniqueIdUTF8) { + return videocapturemodule::VideoCaptureImpl::Create(deviceUniqueIdUTF8); +} + +rtc::scoped_refptr VideoCaptureFactory::Create( + VideoCaptureOptions* options, + const char* deviceUniqueIdUTF8) { +// This is only implemented on pure Linux and WEBRTC_LINUX is defined for +// Android as well +#if (!defined(WEBRTC_LINUX) && !defined(WEBRTC_BSD)) || defined(WEBRTC_ANDROID) + return nullptr; +#else + return videocapturemodule::VideoCaptureImpl::Create(options, + deviceUniqueIdUTF8); +#endif +} + +VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo() { + return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(); +} + +VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo( + VideoCaptureOptions* options) { +// This is only implemented on pure Linux and WEBRTC_LINUX is defined for +// Android as well +#if (!defined(WEBRTC_LINUX) && !defined(WEBRTC_BSD)) || defined(WEBRTC_ANDROID) + return nullptr; +#else + return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(options); +#endif +} + +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_factory.h b/third_party/libwebrtc/modules/video_capture/video_capture_factory.h new file mode 100644 index 0000000000..62b4067c3a --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_factory.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces used for creating the VideoCaptureModule +// and DeviceInfo. + +#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_ +#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_ + +#include "api/scoped_refptr.h" +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_defines.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class VideoCaptureOptions; + +class RTC_EXPORT VideoCaptureFactory { + public: + // Create a video capture module object + // id - unique identifier of this video capture module object. + // deviceUniqueIdUTF8 - name of the device. + // Available names can be found by using GetDeviceName + static rtc::scoped_refptr Create( + const char* deviceUniqueIdUTF8); + static rtc::scoped_refptr Create( + VideoCaptureOptions* options, + const char* deviceUniqueIdUTF8); + + static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(); + static VideoCaptureModule::DeviceInfo* CreateDeviceInfo( + VideoCaptureOptions* options); + + private: + ~VideoCaptureFactory(); +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_ diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_factory_null.cc b/third_party/libwebrtc/modules/video_capture/video_capture_factory_null.cc new file mode 100644 index 0000000000..7808d19851 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_factory_null.cc @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/video_capture_impl.h" + +namespace webrtc { +namespace videocapturemodule { + +// static +VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { + return nullptr; +} + +rtc::scoped_refptr VideoCaptureImpl::Create( + const char* device_id) { + return nullptr; +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_impl.cc b/third_party/libwebrtc/modules/video_capture/video_capture_impl.cc new file mode 100644 index 0000000000..15dfb7fe1f --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_impl.cc @@ -0,0 +1,345 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/video_capture_impl.h" + +#include +#include + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_capture/video_capture_config.h" +#include "rtc_base/logging.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" +#include "third_party/libyuv/include/libyuv.h" + +namespace webrtc { +namespace videocapturemodule { + +const char* VideoCaptureImpl::CurrentDeviceName() const { + RTC_DCHECK_RUN_ON(&api_checker_); + return _deviceUniqueId; +} + +// static +int32_t VideoCaptureImpl::RotationFromDegrees(int degrees, + VideoRotation* rotation) { + switch (degrees) { + case 0: + *rotation = kVideoRotation_0; + return 0; + case 90: + *rotation = kVideoRotation_90; + return 0; + case 180: + *rotation = kVideoRotation_180; + return 0; + case 270: + *rotation = kVideoRotation_270; + return 0; + default: + return -1; + ; + } +} + +// static +int32_t VideoCaptureImpl::RotationInDegrees(VideoRotation rotation, + int* degrees) { + switch (rotation) { + case kVideoRotation_0: + *degrees = 0; + return 0; + case kVideoRotation_90: + *degrees = 90; + return 0; + case kVideoRotation_180: + *degrees = 180; + return 0; + case kVideoRotation_270: + *degrees = 270; + return 0; + } + return -1; +} + +VideoCaptureImpl::VideoCaptureImpl() + : _deviceUniqueId(NULL), + _requestedCapability(), + _lastProcessTimeNanos(rtc::TimeNanos()), + _lastFrameRateCallbackTimeNanos(rtc::TimeNanos()), + _rawDataCallBack(NULL), + _lastProcessFrameTimeNanos(rtc::TimeNanos()), + _rotateFrame(kVideoRotation_0), + apply_rotation_(false) { + _requestedCapability.width = kDefaultWidth; + _requestedCapability.height = kDefaultHeight; + _requestedCapability.maxFPS = 30; + _requestedCapability.videoType = VideoType::kI420; + memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos)); +} + +VideoCaptureImpl::~VideoCaptureImpl() { + RTC_DCHECK_RUN_ON(&api_checker_); + if (_deviceUniqueId) + delete[] _deviceUniqueId; +} + +void VideoCaptureImpl::RegisterCaptureDataCallback( + rtc::VideoSinkInterface* dataCallBack) { + MutexLock lock(&api_lock_); + RTC_DCHECK(!_rawDataCallBack); + _dataCallBacks.insert(dataCallBack); +} + +void VideoCaptureImpl::RegisterCaptureDataCallback( + RawVideoSinkInterface* dataCallBack) { + MutexLock lock(&api_lock_); + RTC_DCHECK(_dataCallBacks.empty()); + _rawDataCallBack = dataCallBack; +} + +void VideoCaptureImpl::DeRegisterCaptureDataCallback( + rtc::VideoSinkInterface* dataCallBack) { + MutexLock lock(&api_lock_); + auto it = _dataCallBacks.find(dataCallBack); + if (it != _dataCallBacks.end()) { + _dataCallBacks.erase(it); + } + _rawDataCallBack = NULL; +} + +int32_t VideoCaptureImpl::StopCaptureIfAllClientsClose() { + RTC_DCHECK_RUN_ON(&api_checker_); + { + MutexLock lock(&api_lock_); + if (!_dataCallBacks.empty()) { + return 0; + } + } + return StopCapture(); +} + +int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + + UpdateFrameCount(); // frame count used for local frame rate callback. + + for (auto* dataCallBack : _dataCallBacks) { + dataCallBack->OnFrame(captureFrame); + } + + return 0; +} + +void VideoCaptureImpl::DeliverRawFrame(uint8_t* videoFrame, + size_t videoFrameLength, + const VideoCaptureCapability& frameInfo, + int64_t captureTime) { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + + UpdateFrameCount(); + _rawDataCallBack->OnRawFrame(videoFrame, videoFrameLength, frameInfo, + _rotateFrame, captureTime); +} + +int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, + size_t videoFrameLength, + const VideoCaptureCapability& frameInfo, + int64_t captureTime /*=0*/) { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + MutexLock lock(&api_lock_); + + const int32_t width = frameInfo.width; + const int32_t height = frameInfo.height; + + TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime); + + if (_rawDataCallBack) { + DeliverRawFrame(videoFrame, videoFrameLength, frameInfo, captureTime); + return 0; + } + + // Not encoded, convert to I420. + if (frameInfo.videoType != VideoType::kMJPEG) { + // Allow buffers larger than expected. On linux gstreamer allocates buffers + // page-aligned and v4l2loopback passes us the buffer size verbatim which + // for most cases is larger than expected. + // See https://github.com/umlaeute/v4l2loopback/issues/190. + if (auto size = CalcBufferSize(frameInfo.videoType, width, abs(height)); + videoFrameLength < size) { + RTC_LOG(LS_ERROR) << "Wrong incoming frame length. Expected " << size + << ", Got " << videoFrameLength << "."; + return -1; + } + } + + int target_width = width; + int target_height = abs(height); + + if (apply_rotation_) { + // Rotating resolution when for 90/270 degree rotations. + if (_rotateFrame == kVideoRotation_90 || + _rotateFrame == kVideoRotation_270) { + target_width = abs(height); + target_height = width; + } + } + + int stride_y = target_width; + int stride_uv = (target_width + 1) / 2; + + // Setting absolute height (in case it was negative). + // In Windows, the image starts bottom left, instead of top left. + // Setting a negative source height, inverts the image (within LibYuv). + rtc::scoped_refptr buffer = I420Buffer::Create( + target_width, target_height, stride_y, stride_uv, stride_uv); + + libyuv::RotationMode rotation_mode = libyuv::kRotate0; + if (apply_rotation_) { + switch (_rotateFrame) { + case kVideoRotation_0: + rotation_mode = libyuv::kRotate0; + break; + case kVideoRotation_90: + rotation_mode = libyuv::kRotate90; + break; + case kVideoRotation_180: + rotation_mode = libyuv::kRotate180; + break; + case kVideoRotation_270: + rotation_mode = libyuv::kRotate270; + break; + } + } + + int dst_width = buffer->width(); + int dst_height = buffer->height(); + + // LibYuv expects pre-rotation_mode values for dst. + // Stride values should correspond to the destination values. + if (rotation_mode == libyuv::kRotate90 || rotation_mode == libyuv::kRotate270) { + std::swap(dst_width, dst_height); + } + + const int conversionResult = libyuv::ConvertToI420( + videoFrame, videoFrameLength, buffer.get()->MutableDataY(), + buffer.get()->StrideY(), buffer.get()->MutableDataU(), + buffer.get()->StrideU(), buffer.get()->MutableDataV(), + buffer.get()->StrideV(), 0, 0, // No Cropping + width, height, dst_width, dst_height, rotation_mode, + ConvertVideoType(frameInfo.videoType)); + if (conversionResult != 0) { + RTC_LOG(LS_ERROR) << "Failed to convert capture frame from type " + << static_cast(frameInfo.videoType) << "to I420."; + return -1; + } + + VideoFrame captureFrame = + VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(0) + .set_timestamp_ms(rtc::TimeMillis()) + .set_rotation(!apply_rotation_ ? _rotateFrame : kVideoRotation_0) + .build(); + captureFrame.set_ntp_time_ms(captureTime); + + // This is one ugly hack to let CamerasParent know what rotation + // the frame was captured at. Note that this goes against the intended + // meaning of rotation of the frame (how to rotate it before rendering). + // We do this so CamerasChild can scale to the proper dimensions + // later on in the pipe. + captureFrame.set_rotation(_rotateFrame); + + DeliverCapturedFrame(captureFrame); + + return 0; +} + +int32_t VideoCaptureImpl::StartCapture( + const VideoCaptureCapability& capability) { + RTC_DCHECK_RUN_ON(&api_checker_); + _requestedCapability = capability; + return -1; +} + +int32_t VideoCaptureImpl::StopCapture() { + return -1; +} + +bool VideoCaptureImpl::CaptureStarted() { + return false; +} + +int32_t VideoCaptureImpl::CaptureSettings( + VideoCaptureCapability& /*settings*/) { + return -1; +} + +int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) { + MutexLock lock(&api_lock_); + _rotateFrame = rotation; + return 0; +} + +bool VideoCaptureImpl::SetApplyRotation(bool enable) { + MutexLock lock(&api_lock_); + apply_rotation_ = enable; + return true; +} + +bool VideoCaptureImpl::GetApplyRotation() { + MutexLock lock(&api_lock_); + return apply_rotation_; +} + +void VideoCaptureImpl::UpdateFrameCount() { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + + if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) { + // first no shift + } else { + // shift + for (int i = (kFrameRateCountHistorySize - 2); i >= 0; --i) { + _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i]; + } + } + _incomingFrameTimesNanos[0] = rtc::TimeNanos(); +} + +uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + + int32_t num = 0; + int32_t nrOfFrames = 0; + for (num = 1; num < (kFrameRateCountHistorySize - 1); ++num) { + if (_incomingFrameTimesNanos[num] <= 0 || + (now_ns - _incomingFrameTimesNanos[num]) / + rtc::kNumNanosecsPerMillisec > + kFrameRateHistoryWindowMs) { // don't use data older than 2sec + break; + } else { + nrOfFrames++; + } + } + if (num > 1) { + int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) / + rtc::kNumNanosecsPerMillisec; + if (diff > 0) { + return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f); + } + } + + return nrOfFrames; +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_impl.h b/third_party/libwebrtc/modules/video_capture/video_capture_impl.h new file mode 100644 index 0000000000..dcc93fd3a0 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_impl.h @@ -0,0 +1,137 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_ +#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_ + +/* + * video_capture_impl.h + */ + +#include +#include + +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/video/video_frame.h" +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_config.h" +#include "modules/video_capture/video_capture_defines.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class VideoCaptureOptions; + +namespace videocapturemodule { +// Class definitions +class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { + public: + /* + * Create a video capture module object + * + * id - unique identifier of this video capture module object + * deviceUniqueIdUTF8 - name of the device. Available names can be found by + * using GetDeviceName + */ + static rtc::scoped_refptr Create( + const char* deviceUniqueIdUTF8); + static rtc::scoped_refptr Create( + VideoCaptureOptions* options, + const char* deviceUniqueIdUTF8); + + static DeviceInfo* CreateDeviceInfo(); + static DeviceInfo* CreateDeviceInfo(VideoCaptureOptions* options); + + // Helpers for converting between (integral) degrees and + // VideoRotation values. Return 0 on success. + static int32_t RotationFromDegrees(int degrees, VideoRotation* rotation); + static int32_t RotationInDegrees(VideoRotation rotation, int* degrees); + + // Call backs + void RegisterCaptureDataCallback( + rtc::VideoSinkInterface* dataCallback) override; + virtual void RegisterCaptureDataCallback( + RawVideoSinkInterface* dataCallback) override; + void DeRegisterCaptureDataCallback( + rtc::VideoSinkInterface* dataCallback) override; + + int32_t StopCaptureIfAllClientsClose() override; + int32_t SetCaptureRotation(VideoRotation rotation) override; + bool SetApplyRotation(bool enable) override; + bool GetApplyRotation() override; + + const char* CurrentDeviceName() const override; + + // `capture_time` must be specified in NTP time format in milliseconds. + int32_t IncomingFrame(uint8_t* videoFrame, + size_t videoFrameLength, + const VideoCaptureCapability& frameInfo, + int64_t captureTime = 0); + + // Platform dependent + int32_t StartCapture(const VideoCaptureCapability& capability) override; + int32_t StopCapture() override; + bool CaptureStarted() override; + int32_t CaptureSettings(VideoCaptureCapability& /*settings*/) override; + + protected: + VideoCaptureImpl(); + ~VideoCaptureImpl() override; + + // Calls to the public API must happen on a single thread. + SequenceChecker api_checker_; + // RaceChecker for members that can be accessed on the API thread while + // capture is not happening, and on a callback thread otherwise. + rtc::RaceChecker capture_checker_; + // current Device unique name; + char* _deviceUniqueId RTC_GUARDED_BY(api_checker_); + + // moved DeliverCapturedFrame to protected for VideoCaptureAndroid (mjf) + int32_t DeliverCapturedFrame(VideoFrame& captureFrame) + RTC_EXCLUSIVE_LOCKS_REQUIRED(api_lock_); + Mutex api_lock_; + // Should be set by platform dependent code in StartCapture. + VideoCaptureCapability _requestedCapability RTC_GUARDED_BY(api_checker_); + + private: + void UpdateFrameCount(); + uint32_t CalculateFrameRate(int64_t now_ns); + void DeliverRawFrame(uint8_t* videoFrame, + size_t videoFrameLength, + const VideoCaptureCapability& frameInfo, + int64_t captureTime) + RTC_EXCLUSIVE_LOCKS_REQUIRED(api_lock_); + + // last time the module process function was called. + int64_t _lastProcessTimeNanos RTC_GUARDED_BY(capture_checker_); + // last time the frame rate callback function was called. + int64_t _lastFrameRateCallbackTimeNanos RTC_GUARDED_BY(capture_checker_); + + std::set*> _dataCallBacks RTC_GUARDED_BY(api_lock_); + RawVideoSinkInterface* _rawDataCallBack RTC_GUARDED_BY(api_lock_); + + int64_t _lastProcessFrameTimeNanos RTC_GUARDED_BY(capture_checker_); + // timestamp for local captured frames + int64_t _incomingFrameTimesNanos[kFrameRateCountHistorySize] RTC_GUARDED_BY( + capture_checker_); + // Set if the frame should be rotated by the capture module. + VideoRotation _rotateFrame RTC_GUARDED_BY(api_lock_); + + // Indicate whether rotation should be applied before delivered externally. + bool apply_rotation_ RTC_GUARDED_BY(api_lock_); +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_ diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build b/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build new file mode 100644 index 0000000000..24988a1ffc --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build @@ -0,0 +1,376 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + + ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### + ### DO NOT edit it by hand. ### +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + CXXFLAGS += CONFIG["MOZ_GTK3_CFLAGS"] + +COMPILE_FLAGS["OS_INCLUDES"] = [] +AllowCompilerWarnings() + +DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" +DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True +DEFINES["RTC_ENABLE_VP9"] = True +DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" +DEFINES["WEBRTC_LIBRARY_IMPL"] = True +DEFINES["WEBRTC_MOZILLA_BUILD"] = True +DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" +DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" + +FINAL_LIBRARY = "webrtc" + + +LOCAL_INCLUDES += [ + "!/ipc/ipdl/_ipdlheaders", + "!/third_party/libwebrtc/gen", + "/ipc/chromium/src", + "/third_party/libwebrtc/", + "/third_party/libwebrtc/third_party/abseil-cpp/", + "/tools/profiler/public" +] + +UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/video_capture_options.cc" +] + +if not CONFIG["MOZ_DEBUG"]: + + DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" + DEFINES["NDEBUG"] = True + DEFINES["NVALGRIND"] = True + +if CONFIG["MOZ_DEBUG"] == "1": + + DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" + +if CONFIG["OS_TARGET"] == "Android": + + DEFINES["ANDROID"] = True + DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" + DEFINES["HAVE_SYS_UIO_H"] = True + DEFINES["WEBRTC_ANDROID"] = True + DEFINES["WEBRTC_ANDROID_OPENSLES"] = True + DEFINES["WEBRTC_ENABLE_LIBEVENT"] = True + DEFINES["WEBRTC_LINUX"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_GNU_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + + OS_LIBS += [ + "GLESv2", + "log" + ] + +if CONFIG["OS_TARGET"] == "Darwin": + + DEFINES["WEBRTC_MAC"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True + DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + +if CONFIG["OS_TARGET"] == "Linux": + + DEFINES["USE_AURA"] = "1" + DEFINES["USE_GLIB"] = "1" + DEFINES["USE_NSS_CERTS"] = "1" + DEFINES["USE_OZONE"] = "1" + DEFINES["USE_UDEV"] = True + DEFINES["WEBRTC_ENABLE_LIBEVENT"] = True + DEFINES["WEBRTC_LINUX"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_FILE_OFFSET_BITS"] = "64" + DEFINES["_LARGEFILE64_SOURCE"] = True + DEFINES["_LARGEFILE_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + + OS_LIBS += [ + "rt" + ] + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc", + "/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc" + ] + +if CONFIG["OS_TARGET"] == "OpenBSD": + + DEFINES["USE_GLIB"] = "1" + DEFINES["USE_OZONE"] = "1" + DEFINES["USE_X11"] = "1" + DEFINES["WEBRTC_BSD"] = True + DEFINES["WEBRTC_ENABLE_LIBEVENT"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_FILE_OFFSET_BITS"] = "64" + DEFINES["_LARGEFILE64_SOURCE"] = True + DEFINES["_LARGEFILE_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc", + "/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc" + ] + +if CONFIG["OS_TARGET"] == "WINNT": + + DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True + DEFINES["NOMINMAX"] = True + DEFINES["NTDDI_VERSION"] = "0x0A000000" + DEFINES["PSAPI_VERSION"] = "2" + DEFINES["RTC_ENABLE_WIN_WGC"] = True + DEFINES["UNICODE"] = True + DEFINES["USE_AURA"] = "1" + DEFINES["WEBRTC_WIN"] = True + DEFINES["WIN32"] = True + DEFINES["WIN32_LEAN_AND_MEAN"] = True + DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP" + DEFINES["WINVER"] = "0x0A00" + DEFINES["_ATL_NO_OPENGL"] = True + DEFINES["_CRT_RAND_S"] = True + DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True + DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True + DEFINES["_HAS_EXCEPTIONS"] = "0" + DEFINES["_HAS_NODISCARD"] = True + DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True + DEFINES["_SECURE_ATL"] = True + DEFINES["_UNICODE"] = True + DEFINES["_WIN32_WINNT"] = "0x0A00" + DEFINES["_WINDOWS"] = True + DEFINES["__STD_C"] = True + + OS_LIBS += [ + "crypt32", + "iphlpapi", + "ole32", + "oleaut32", + "secur32", + "strmiids", + "user32", + "winmm" + ] + + SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc", + "/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc", + "/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc" + ] + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc", + "/third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc" + ] + +if CONFIG["TARGET_CPU"] == "aarch64": + + DEFINES["WEBRTC_ARCH_ARM64"] = True + DEFINES["WEBRTC_HAS_NEON"] = True + +if CONFIG["TARGET_CPU"] == "arm": + + CXXFLAGS += [ + "-mfpu=neon" + ] + + DEFINES["WEBRTC_ARCH_ARM"] = True + DEFINES["WEBRTC_ARCH_ARM_V7"] = True + DEFINES["WEBRTC_HAS_NEON"] = True + +if CONFIG["TARGET_CPU"] == "mips32": + + DEFINES["MIPS32_LE"] = True + DEFINES["MIPS_FPU_LE"] = True + DEFINES["WEBRTC_USE_PIPEWIRE"] = True + DEFINES["_GNU_SOURCE"] = True + + LOCAL_INCLUDES += [ + "/gfx/angle/checkout/include/", + "/third_party/drm/drm/", + "/third_party/drm/drm/include/", + "/third_party/drm/drm/include/libdrm/", + "/third_party/gbm/gbm/", + "/third_party/libepoxy/libepoxy/include/", + "/third_party/pipewire/" + ] + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc", + "/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc", + "/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc" + ] + +if CONFIG["TARGET_CPU"] == "mips64": + + DEFINES["WEBRTC_USE_PIPEWIRE"] = True + DEFINES["_GNU_SOURCE"] = True + + LOCAL_INCLUDES += [ + "/gfx/angle/checkout/include/", + "/third_party/drm/drm/", + "/third_party/drm/drm/include/", + "/third_party/drm/drm/include/libdrm/", + "/third_party/gbm/gbm/", + "/third_party/libepoxy/libepoxy/include/", + "/third_party/pipewire/" + ] + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc", + "/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc", + "/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc" + ] + +if CONFIG["TARGET_CPU"] == "x86": + + DEFINES["WEBRTC_ENABLE_AVX2"] = True + +if CONFIG["TARGET_CPU"] == "x86_64": + + DEFINES["WEBRTC_ENABLE_AVX2"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT": + + DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0" + +if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["USE_X11"] = "1" + +if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm": + + OS_LIBS += [ + "android_support", + "unwind" + ] + +if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86": + + CXXFLAGS += [ + "-msse2" + ] + + OS_LIBS += [ + "android_support" + ] + +if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64": + + DEFINES["WEBRTC_USE_PIPEWIRE"] = True + DEFINES["_GNU_SOURCE"] = True + + LOCAL_INCLUDES += [ + "/gfx/angle/checkout/include/", + "/third_party/drm/drm/", + "/third_party/drm/drm/include/", + "/third_party/drm/drm/include/libdrm/", + "/third_party/gbm/gbm/", + "/third_party/libepoxy/libepoxy/include/", + "/third_party/pipewire/" + ] + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc", + "/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc", + "/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc" + ] + +if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "arm": + + DEFINES["WEBRTC_USE_PIPEWIRE"] = True + DEFINES["_GNU_SOURCE"] = True + + LOCAL_INCLUDES += [ + "/gfx/angle/checkout/include/", + "/third_party/drm/drm/", + "/third_party/drm/drm/include/", + "/third_party/drm/drm/include/libdrm/", + "/third_party/gbm/gbm/", + "/third_party/libepoxy/libepoxy/include/", + "/third_party/pipewire/" + ] + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc", + "/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc", + "/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc" + ] + +if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "x86": + + CXXFLAGS += [ + "-msse2" + ] + + DEFINES["WEBRTC_USE_PIPEWIRE"] = True + DEFINES["_GNU_SOURCE"] = True + + LOCAL_INCLUDES += [ + "/gfx/angle/checkout/include/", + "/third_party/drm/drm/", + "/third_party/drm/drm/include/", + "/third_party/drm/drm/include/libdrm/", + "/third_party/gbm/gbm/", + "/third_party/libepoxy/libepoxy/include/", + "/third_party/pipewire/" + ] + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc", + "/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc", + "/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc" + ] + +if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "x86_64": + + DEFINES["WEBRTC_USE_PIPEWIRE"] = True + DEFINES["_GNU_SOURCE"] = True + + LOCAL_INCLUDES += [ + "/gfx/angle/checkout/include/", + "/third_party/drm/drm/", + "/third_party/drm/drm/include/", + "/third_party/drm/drm/include/libdrm/", + "/third_party/gbm/gbm/", + "/third_party/libepoxy/libepoxy/include/", + "/third_party/pipewire/" + ] + + UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc", + "/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc", + "/third_party/libwebrtc/modules/video_capture/linux/pipewire_session.cc", + "/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc" + ] + +Library("video_capture_internal_impl_gn") diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build b/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build new file mode 100644 index 0000000000..49c62d5cf6 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build @@ -0,0 +1,241 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + + ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### + ### DO NOT edit it by hand. ### + +COMPILE_FLAGS["OS_INCLUDES"] = [] +AllowCompilerWarnings() + +DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" +DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True +DEFINES["RTC_ENABLE_VP9"] = True +DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" +DEFINES["WEBRTC_LIBRARY_IMPL"] = True +DEFINES["WEBRTC_MOZILLA_BUILD"] = True +DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" +DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" + +FINAL_LIBRARY = "webrtc" + + +LOCAL_INCLUDES += [ + "!/ipc/ipdl/_ipdlheaders", + "!/third_party/libwebrtc/gen", + "/ipc/chromium/src", + "/media/libyuv/", + "/media/libyuv/libyuv/include/", + "/third_party/libwebrtc/", + "/third_party/libwebrtc/third_party/abseil-cpp/", + "/tools/profiler/public" +] + +UNIFIED_SOURCES += [ + "/third_party/libwebrtc/modules/video_capture/device_info_impl.cc", + "/third_party/libwebrtc/modules/video_capture/video_capture_factory.cc", + "/third_party/libwebrtc/modules/video_capture/video_capture_impl.cc" +] + +if not CONFIG["MOZ_DEBUG"]: + + DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" + DEFINES["NDEBUG"] = True + DEFINES["NVALGRIND"] = True + +if CONFIG["MOZ_DEBUG"] == "1": + + DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" + +if CONFIG["OS_TARGET"] == "Android": + + DEFINES["ANDROID"] = True + DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" + DEFINES["HAVE_SYS_UIO_H"] = True + DEFINES["WEBRTC_ANDROID"] = True + DEFINES["WEBRTC_ANDROID_OPENSLES"] = True + DEFINES["WEBRTC_ENABLE_LIBEVENT"] = True + DEFINES["WEBRTC_LINUX"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_GNU_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + + OS_LIBS += [ + "GLESv2", + "log" + ] + +if CONFIG["OS_TARGET"] == "Darwin": + + DEFINES["WEBRTC_MAC"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True + DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + +if CONFIG["OS_TARGET"] == "Linux": + + DEFINES["USE_AURA"] = "1" + DEFINES["USE_GLIB"] = "1" + DEFINES["USE_NSS_CERTS"] = "1" + DEFINES["USE_OZONE"] = "1" + DEFINES["USE_UDEV"] = True + DEFINES["WEBRTC_ENABLE_LIBEVENT"] = True + DEFINES["WEBRTC_LINUX"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_FILE_OFFSET_BITS"] = "64" + DEFINES["_LARGEFILE64_SOURCE"] = True + DEFINES["_LARGEFILE_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + + OS_LIBS += [ + "rt" + ] + +if CONFIG["OS_TARGET"] == "OpenBSD": + + DEFINES["USE_GLIB"] = "1" + DEFINES["USE_OZONE"] = "1" + DEFINES["USE_X11"] = "1" + DEFINES["WEBRTC_BSD"] = True + DEFINES["WEBRTC_ENABLE_LIBEVENT"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_FILE_OFFSET_BITS"] = "64" + DEFINES["_LARGEFILE64_SOURCE"] = True + DEFINES["_LARGEFILE_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + +if CONFIG["OS_TARGET"] == "WINNT": + + DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True + DEFINES["NOMINMAX"] = True + DEFINES["NTDDI_VERSION"] = "0x0A000000" + DEFINES["PSAPI_VERSION"] = "2" + DEFINES["RTC_ENABLE_WIN_WGC"] = True + DEFINES["UNICODE"] = True + DEFINES["USE_AURA"] = "1" + DEFINES["WEBRTC_WIN"] = True + DEFINES["WIN32"] = True + DEFINES["WIN32_LEAN_AND_MEAN"] = True + DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP" + DEFINES["WINVER"] = "0x0A00" + DEFINES["_ATL_NO_OPENGL"] = True + DEFINES["_CRT_RAND_S"] = True + DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True + DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True + DEFINES["_HAS_EXCEPTIONS"] = "0" + DEFINES["_HAS_NODISCARD"] = True + DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True + DEFINES["_SECURE_ATL"] = True + DEFINES["_UNICODE"] = True + DEFINES["_WIN32_WINNT"] = "0x0A00" + DEFINES["_WINDOWS"] = True + DEFINES["__STD_C"] = True + + OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", + "winmm" + ] + +if CONFIG["TARGET_CPU"] == "aarch64": + + DEFINES["WEBRTC_ARCH_ARM64"] = True + DEFINES["WEBRTC_HAS_NEON"] = True + +if CONFIG["TARGET_CPU"] == "arm": + + CXXFLAGS += [ + "-mfpu=neon" + ] + + DEFINES["WEBRTC_ARCH_ARM"] = True + DEFINES["WEBRTC_ARCH_ARM_V7"] = True + DEFINES["WEBRTC_HAS_NEON"] = True + +if CONFIG["TARGET_CPU"] == "mips32": + + DEFINES["MIPS32_LE"] = True + DEFINES["MIPS_FPU_LE"] = True + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["TARGET_CPU"] == "mips64": + + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["TARGET_CPU"] == "x86": + + DEFINES["WEBRTC_ENABLE_AVX2"] = True + +if CONFIG["TARGET_CPU"] == "x86_64": + + DEFINES["WEBRTC_ENABLE_AVX2"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT": + + DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0" + +if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["USE_X11"] = "1" + +if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm": + + OS_LIBS += [ + "android_support", + "unwind" + ] + +if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86": + + CXXFLAGS += [ + "-msse2" + ] + + OS_LIBS += [ + "android_support" + ] + +if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64": + + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "arm": + + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "x86": + + CXXFLAGS += [ + "-msse2" + ] + + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "x86_64": + + DEFINES["_GNU_SOURCE"] = True + +Library("video_capture_module_gn") diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_options.cc b/third_party/libwebrtc/modules/video_capture/video_capture_options.cc new file mode 100644 index 0000000000..203d0a604b --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_options.cc @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/video_capture_options.h" + +#if defined(WEBRTC_USE_PIPEWIRE) +#include "modules/video_capture/linux/pipewire_session.h" +#endif + +namespace webrtc { + +VideoCaptureOptions::VideoCaptureOptions() {} +VideoCaptureOptions::VideoCaptureOptions(const VideoCaptureOptions& options) = + default; +VideoCaptureOptions::VideoCaptureOptions(VideoCaptureOptions&& options) = + default; +VideoCaptureOptions::~VideoCaptureOptions() {} + +VideoCaptureOptions& VideoCaptureOptions::operator=( + const VideoCaptureOptions& options) = default; +VideoCaptureOptions& VideoCaptureOptions::operator=( + VideoCaptureOptions&& options) = default; + +void VideoCaptureOptions::Init(Callback* callback) { +#if defined(WEBRTC_USE_PIPEWIRE) + if (allow_pipewire_) { + pipewire_session_ = + rtc::make_ref_counted(); + pipewire_session_->Init(callback, pipewire_fd_); + return; + } +#endif +#if defined(WEBRTC_LINUX) + if (!allow_v4l2_) + callback->OnInitialized(Status::UNAVAILABLE); + else +#endif + callback->OnInitialized(Status::SUCCESS); +} + +#if defined(WEBRTC_USE_PIPEWIRE) +rtc::scoped_refptr +VideoCaptureOptions::pipewire_session() { + return pipewire_session_; +} +#endif + +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_options.h b/third_party/libwebrtc/modules/video_capture/video_capture_options.h new file mode 100644 index 0000000000..37965305d9 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/video_capture_options.h @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_OPTIONS_H_ +#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_OPTIONS_H_ + +#include "api/scoped_refptr.h" +#include "rtc_base/system/rtc_export.h" + +#if defined(WEBRTC_USE_PIPEWIRE) +#include "modules/portal/pipewire_utils.h" +#endif + +namespace webrtc { + +#if defined(WEBRTC_USE_PIPEWIRE) +namespace videocapturemodule { +class PipeWireSession; +} +#endif + +// An object that stores initialization parameters for video capturers +class RTC_EXPORT VideoCaptureOptions { + public: + VideoCaptureOptions(); + VideoCaptureOptions(const VideoCaptureOptions& options); + VideoCaptureOptions(VideoCaptureOptions&& options); + ~VideoCaptureOptions(); + + VideoCaptureOptions& operator=(const VideoCaptureOptions& options); + VideoCaptureOptions& operator=(VideoCaptureOptions&& options); + + enum class Status { + SUCCESS, + UNINITIALIZED, + UNAVAILABLE, + DENIED, + ERROR, + MAX_VALUE = ERROR + }; + + class Callback { + public: + virtual void OnInitialized(Status status) = 0; + + protected: + virtual ~Callback() = default; + }; + + void Init(Callback* callback); + +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) + bool allow_v4l2() const { return allow_v4l2_; } + void set_allow_v4l2(bool allow) { allow_v4l2_ = allow; } +#endif + +#if defined(WEBRTC_USE_PIPEWIRE) + bool allow_pipewire() const { return allow_pipewire_; } + void set_allow_pipewire(bool allow) { allow_pipewire_ = allow; } + void set_pipewire_fd(int fd) { pipewire_fd_ = fd; } + rtc::scoped_refptr pipewire_session(); +#endif + + private: +#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) + bool allow_v4l2_ = false; +#endif +#if defined(WEBRTC_USE_PIPEWIRE) + bool allow_pipewire_ = false; + int pipewire_fd_ = kInvalidPipeWireFd; + rtc::scoped_refptr pipewire_session_; +#endif +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_OPTIONS_H_ diff --git a/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc b/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc new file mode 100644 index 0000000000..8ca741239c --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.cc @@ -0,0 +1,713 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/windows/device_info_ds.h" + +#include + +#include "modules/video_capture/video_capture_config.h" +#include "modules/video_capture/windows/help_functions_ds.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_utils.h" + +namespace webrtc { +namespace videocapturemodule { + +BOOL isVideoDevice(DEV_BROADCAST_HDR *pHdr) +{ + if (pHdr == NULL) { + return FALSE; + } + if (pHdr->dbch_devicetype != DBT_DEVTYP_DEVICEINTERFACE) { + return FALSE; + } + DEV_BROADCAST_DEVICEINTERFACE* pDi = (DEV_BROADCAST_DEVICEINTERFACE*)pHdr; + return pDi->dbcc_classguid == KSCATEGORY_VIDEO_CAMERA; +} + +LRESULT CALLBACK WndProc(HWND hWnd, UINT uiMsg, WPARAM wParam, LPARAM lParam) +{ + DeviceInfoDS* pParent; + if (uiMsg == WM_CREATE) + { + pParent = (DeviceInfoDS*)((LPCREATESTRUCT)lParam)->lpCreateParams; + SetWindowLongPtr(hWnd, GWLP_USERDATA, (LONG_PTR)pParent); + } + else if (uiMsg == WM_DESTROY) + { + SetWindowLongPtr(hWnd, GWLP_USERDATA, NULL); + } + else if (uiMsg == WM_DEVICECHANGE) + { + pParent = (DeviceInfoDS*)GetWindowLongPtr(hWnd, GWLP_USERDATA); + if (pParent && isVideoDevice((PDEV_BROADCAST_HDR)lParam)) + { + pParent->DeviceChange(); + } + } + return DefWindowProc(hWnd, uiMsg, wParam, lParam); +} + +// static +DeviceInfoDS* DeviceInfoDS::Create() { + DeviceInfoDS* dsInfo = new DeviceInfoDS(); + if (!dsInfo || dsInfo->Init() != 0) { + delete dsInfo; + dsInfo = NULL; + } + return dsInfo; +} + +DeviceInfoDS::DeviceInfoDS() + : _dsDevEnum(NULL), + _dsMonikerDevEnum(NULL), + _CoUninitializeIsRequired(true), + _hdevnotify(NULL) { + // 1) Initialize the COM library (make Windows load the DLLs). + // + // CoInitializeEx must be called at least once, and is usually called only + // once, for each thread that uses the COM library. Multiple calls to + // CoInitializeEx by the same thread are allowed as long as they pass the same + // concurrency flag, but subsequent valid calls return S_FALSE. To close the + // COM library gracefully on a thread, each successful call to CoInitializeEx, + // including any call that returns S_FALSE, must be balanced by a + // corresponding call to CoUninitialize. + // + + /*Apartment-threading, while allowing for multiple threads of execution, + serializes all incoming calls by requiring that calls to methods of objects + created by this thread always run on the same thread the apartment/thread + that created them. In addition, calls can arrive only at message-queue + boundaries (i.e., only during a PeekMessage, SendMessage, DispatchMessage, + etc.). Because of this serialization, it is not typically necessary to write + concurrency control into the code for the object, other than to avoid calls + to PeekMessage and SendMessage during processing that must not be interrupted + by other method invocations or calls to other objects in the same + apartment/thread.*/ + + /// CoInitializeEx(NULL, COINIT_APARTMENTTHREADED ); //| + /// COINIT_SPEED_OVER_MEMORY + HRESULT hr = CoInitializeEx( + NULL, COINIT_MULTITHREADED); // Use COINIT_MULTITHREADED since Voice + // Engine uses COINIT_MULTITHREADED + if (FAILED(hr)) { + // Avoid calling CoUninitialize() since CoInitializeEx() failed. + _CoUninitializeIsRequired = FALSE; + + if (hr == RPC_E_CHANGED_MODE) { + // Calling thread has already initialized COM to be used in a + // single-threaded apartment (STA). We are then prevented from using STA. + // Details: hr = 0x80010106 <=> "Cannot change thread mode after it is + // set". + // + RTC_DLOG(LS_INFO) << __FUNCTION__ + << ": CoInitializeEx(NULL, COINIT_APARTMENTTHREADED)" + " => RPC_E_CHANGED_MODE, error 0x" + << rtc::ToHex(hr); + } + } + + _hInstance = reinterpret_cast(GetModuleHandle(NULL)); + _wndClass = {0}; + _wndClass.lpfnWndProc = &WndProc; + _wndClass.lpszClassName = TEXT("DeviceInfoDS"); + _wndClass.hInstance = _hInstance; + + if (RegisterClass(&_wndClass)) { + _hwnd = CreateWindow(_wndClass.lpszClassName, NULL, 0, CW_USEDEFAULT, + CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, NULL, + NULL, _hInstance, this); + + DEV_BROADCAST_DEVICEINTERFACE di = { 0 }; + di.dbcc_size = sizeof(di); + di.dbcc_devicetype = DBT_DEVTYP_DEVICEINTERFACE; + di.dbcc_classguid = KSCATEGORY_VIDEO_CAMERA; + + _hdevnotify = RegisterDeviceNotification(_hwnd, &di, + DEVICE_NOTIFY_WINDOW_HANDLE); + } +} + +DeviceInfoDS::~DeviceInfoDS() { + RELEASE_AND_CLEAR(_dsMonikerDevEnum); + RELEASE_AND_CLEAR(_dsDevEnum); + if (_CoUninitializeIsRequired) { + CoUninitialize(); + } + if (_hdevnotify) + { + UnregisterDeviceNotification(_hdevnotify); + } + if (_hwnd != NULL) { + DestroyWindow(_hwnd); + } + UnregisterClass(_wndClass.lpszClassName, _hInstance); +} + +int32_t DeviceInfoDS::Init() { + HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, + IID_ICreateDevEnum, (void**)&_dsDevEnum); + if (hr != NOERROR) { + RTC_LOG(LS_INFO) << "Failed to create CLSID_SystemDeviceEnum, error 0x" + << rtc::ToHex(hr); + return -1; + } + return 0; +} +uint32_t DeviceInfoDS::NumberOfDevices() { + MutexLock lock(&_apiLock); + return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0); +} + +int32_t DeviceInfoDS::GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8, + uint32_t productUniqueIdUTF8Length, + pid_t* pid, + bool* deviceIsPlaceholder) { + MutexLock lock(&_apiLock); + const int32_t result = GetDeviceInfo( + deviceNumber, deviceNameUTF8, deviceNameLength, deviceUniqueIdUTF8, + deviceUniqueIdUTF8Length, productUniqueIdUTF8, productUniqueIdUTF8Length); + return result > (int32_t)deviceNumber ? 0 : -1; +} + +int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8, + uint32_t productUniqueIdUTF8Length) + +{ + // enumerate all video capture devices + RELEASE_AND_CLEAR(_dsMonikerDevEnum); + HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, + &_dsMonikerDevEnum, 0); + if (hr != NOERROR) { + RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x" + << rtc::ToHex(hr) << ". No webcam exist?"; + return 0; + } + + _dsMonikerDevEnum->Reset(); + ULONG cFetched; + IMoniker* pM; + int index = 0; + while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched)) { + IPropertyBag* pBag; + hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void**)&pBag); + if (S_OK == hr) { + // Find the description or friendly name. + VARIANT varName; + VariantInit(&varName); + hr = pBag->Read(L"Description", &varName, 0); + if (FAILED(hr)) { + hr = pBag->Read(L"FriendlyName", &varName, 0); + } + if (SUCCEEDED(hr)) { + // ignore all VFW drivers + if ((wcsstr(varName.bstrVal, (L"(VFW)")) == NULL) && + (_wcsnicmp(varName.bstrVal, (L"Google Camera Adapter"), 21) != 0)) { + // Found a valid device. + if (index == static_cast(deviceNumber)) { + int convResult = 0; + if (deviceNameLength > 0) { + convResult = WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1, + (char*)deviceNameUTF8, + deviceNameLength, NULL, NULL); + if (convResult == 0) { + RTC_LOG(LS_INFO) << "Failed to convert device name to UTF8, " + "error = " + << GetLastError(); + return -1; + } + } + if (deviceUniqueIdUTF8Length > 0) { + hr = pBag->Read(L"DevicePath", &varName, 0); + if (FAILED(hr)) { + strncpy_s((char*)deviceUniqueIdUTF8, deviceUniqueIdUTF8Length, + (char*)deviceNameUTF8, convResult); + RTC_LOG(LS_INFO) << "Failed to get " + "deviceUniqueIdUTF8 using " + "deviceNameUTF8"; + } else { + convResult = WideCharToMultiByte( + CP_UTF8, 0, varName.bstrVal, -1, (char*)deviceUniqueIdUTF8, + deviceUniqueIdUTF8Length, NULL, NULL); + if (convResult == 0) { + RTC_LOG(LS_INFO) << "Failed to convert device " + "name to UTF8, error = " + << GetLastError(); + return -1; + } + if (productUniqueIdUTF8 && productUniqueIdUTF8Length > 0) { + GetProductId(deviceUniqueIdUTF8, productUniqueIdUTF8, + productUniqueIdUTF8Length); + } + } + } + } + ++index; // increase the number of valid devices + } + } + VariantClear(&varName); + pBag->Release(); + pM->Release(); + } + } + if (deviceNameLength) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << " " << deviceNameUTF8; + } + return index; +} + +IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8, + char* productUniqueIdUTF8, + uint32_t productUniqueIdUTF8Length) { + const int32_t deviceUniqueIdUTF8Length = (int32_t)strlen( + (char*)deviceUniqueIdUTF8); // UTF8 is also NULL terminated + if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) { + RTC_LOG(LS_INFO) << "Device name too long"; + return NULL; + } + + // enumerate all video capture devices + RELEASE_AND_CLEAR(_dsMonikerDevEnum); + HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, + &_dsMonikerDevEnum, 0); + if (hr != NOERROR) { + RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x" + << rtc::ToHex(hr) << ". No webcam exist?"; + return 0; + } + _dsMonikerDevEnum->Reset(); + ULONG cFetched; + IMoniker* pM; + + IBaseFilter* captureFilter = NULL; + bool deviceFound = false; + while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched) && !deviceFound) { + IPropertyBag* pBag; + hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void**)&pBag); + if (S_OK == hr) { + // Find the description or friendly name. + VARIANT varName; + VariantInit(&varName); + if (deviceUniqueIdUTF8Length > 0) { + hr = pBag->Read(L"DevicePath", &varName, 0); + if (FAILED(hr)) { + hr = pBag->Read(L"Description", &varName, 0); + if (FAILED(hr)) { + hr = pBag->Read(L"FriendlyName", &varName, 0); + } + } + if (SUCCEEDED(hr)) { + char tempDevicePathUTF8[256]; + tempDevicePathUTF8[0] = 0; + WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1, + tempDevicePathUTF8, sizeof(tempDevicePathUTF8), + NULL, NULL); + if (strncmp(tempDevicePathUTF8, (const char*)deviceUniqueIdUTF8, + deviceUniqueIdUTF8Length) == 0) { + // We have found the requested device + deviceFound = true; + hr = + pM->BindToObject(0, 0, IID_IBaseFilter, (void**)&captureFilter); + if FAILED (hr) { + RTC_LOG(LS_ERROR) << "Failed to bind to the selected " + "capture device " + << hr; + } + + if (productUniqueIdUTF8 && + productUniqueIdUTF8Length > 0) // Get the device name + { + GetProductId(deviceUniqueIdUTF8, productUniqueIdUTF8, + productUniqueIdUTF8Length); + } + } + } + } + VariantClear(&varName); + pBag->Release(); + } + pM->Release(); + } + return captureFilter; +} + +int32_t DeviceInfoDS::GetWindowsCapability( + const int32_t capabilityIndex, + VideoCaptureCapabilityWindows& windowsCapability) { + MutexLock lock(&_apiLock); + + if (capabilityIndex < 0 || static_cast(capabilityIndex) >= + _captureCapabilitiesWindows.size()) { + return -1; + } + + windowsCapability = _captureCapabilitiesWindows[capabilityIndex]; + return 0; +} + +int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8) + +{ + // Reset old capability list + _captureCapabilities.clear(); + + const int32_t deviceUniqueIdUTF8Length = + (int32_t)strlen((char*)deviceUniqueIdUTF8); + if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) { + RTC_LOG(LS_INFO) << "Device name too long"; + return -1; + } + RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device " + << deviceUniqueIdUTF8; + + char productId[kVideoCaptureProductIdLength]; + IBaseFilter* captureDevice = DeviceInfoDS::GetDeviceFilter( + deviceUniqueIdUTF8, productId, kVideoCaptureProductIdLength); + if (!captureDevice) + return -1; + IPin* outputCapturePin = GetOutputPin(captureDevice, GUID_NULL); + if (!outputCapturePin) { + RTC_LOG(LS_INFO) << "Failed to get capture device output pin"; + RELEASE_AND_CLEAR(captureDevice); + return -1; + } + IAMExtDevice* extDevice = NULL; + HRESULT hr = + captureDevice->QueryInterface(IID_IAMExtDevice, (void**)&extDevice); + if (SUCCEEDED(hr) && extDevice) { + RTC_LOG(LS_INFO) << "This is an external device"; + extDevice->Release(); + } + + IAMStreamConfig* streamConfig = NULL; + hr = outputCapturePin->QueryInterface(IID_IAMStreamConfig, + (void**)&streamConfig); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to get IID_IAMStreamConfig interface " + "from capture device"; + return -1; + } + + // this gets the FPS + IAMVideoControl* videoControlConfig = NULL; + HRESULT hrVC = captureDevice->QueryInterface(IID_IAMVideoControl, + (void**)&videoControlConfig); + if (FAILED(hrVC)) { + RTC_LOG(LS_INFO) << "IID_IAMVideoControl Interface NOT SUPPORTED"; + } + + AM_MEDIA_TYPE* pmt = NULL; + VIDEO_STREAM_CONFIG_CAPS caps; + int count, size; + + hr = streamConfig->GetNumberOfCapabilities(&count, &size); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to GetNumberOfCapabilities"; + RELEASE_AND_CLEAR(videoControlConfig); + RELEASE_AND_CLEAR(streamConfig); + RELEASE_AND_CLEAR(outputCapturePin); + RELEASE_AND_CLEAR(captureDevice); + return -1; + } + + // Check if the device support formattype == FORMAT_VideoInfo2 and + // FORMAT_VideoInfo. Prefer FORMAT_VideoInfo since some cameras (ZureCam) has + // been seen having problem with MJPEG and FORMAT_VideoInfo2 Interlace flag is + // only supported in FORMAT_VideoInfo2 + bool supportFORMAT_VideoInfo2 = false; + bool supportFORMAT_VideoInfo = false; + bool foundInterlacedFormat = false; + GUID preferedVideoFormat = FORMAT_VideoInfo; + for (int32_t tmp = 0; tmp < count; ++tmp) { + hr = streamConfig->GetStreamCaps(tmp, &pmt, reinterpret_cast(&caps)); + if (hr == S_OK) { + if (pmt->majortype == MEDIATYPE_Video && + pmt->formattype == FORMAT_VideoInfo2) { + RTC_LOG(LS_INFO) << "Device support FORMAT_VideoInfo2"; + supportFORMAT_VideoInfo2 = true; + VIDEOINFOHEADER2* h = + reinterpret_cast(pmt->pbFormat); + RTC_DCHECK(h); + foundInterlacedFormat |= + h->dwInterlaceFlags & + (AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOnly); + } + if (pmt->majortype == MEDIATYPE_Video && + pmt->formattype == FORMAT_VideoInfo) { + RTC_LOG(LS_INFO) << "Device support FORMAT_VideoInfo2"; + supportFORMAT_VideoInfo = true; + } + + FreeMediaType(pmt); + pmt = NULL; + } + } + if (supportFORMAT_VideoInfo2) { + if (supportFORMAT_VideoInfo && !foundInterlacedFormat) { + preferedVideoFormat = FORMAT_VideoInfo; + } else { + preferedVideoFormat = FORMAT_VideoInfo2; + } + } + + for (int32_t tmp = 0; tmp < count; ++tmp) { + hr = streamConfig->GetStreamCaps(tmp, &pmt, reinterpret_cast(&caps)); + if (hr != S_OK) { + RTC_LOG(LS_INFO) << "Failed to GetStreamCaps"; + RELEASE_AND_CLEAR(videoControlConfig); + RELEASE_AND_CLEAR(streamConfig); + RELEASE_AND_CLEAR(outputCapturePin); + RELEASE_AND_CLEAR(captureDevice); + return -1; + } + + if (pmt->majortype == MEDIATYPE_Video && + pmt->formattype == preferedVideoFormat) { + VideoCaptureCapabilityWindows capability; + int64_t avgTimePerFrame = 0; + + if (pmt->formattype == FORMAT_VideoInfo) { + VIDEOINFOHEADER* h = reinterpret_cast(pmt->pbFormat); + RTC_DCHECK(h); + capability.directShowCapabilityIndex = tmp; + capability.width = h->bmiHeader.biWidth; + capability.height = h->bmiHeader.biHeight; + avgTimePerFrame = h->AvgTimePerFrame; + } + if (pmt->formattype == FORMAT_VideoInfo2) { + VIDEOINFOHEADER2* h = + reinterpret_cast(pmt->pbFormat); + RTC_DCHECK(h); + capability.directShowCapabilityIndex = tmp; + capability.width = h->bmiHeader.biWidth; + capability.height = h->bmiHeader.biHeight; + capability.interlaced = + h->dwInterlaceFlags & + (AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOnly); + avgTimePerFrame = h->AvgTimePerFrame; + } + + if (hrVC == S_OK) { + LONGLONG* frameDurationList = NULL; + LONGLONG maxFPS = 0; + long listSize = 0; + SIZE size; + size.cx = capability.width; + size.cy = capability.height; + + // GetMaxAvailableFrameRate doesn't return max frame rate always + // eg: Logitech Notebook. This may be due to a bug in that API + // because GetFrameRateList array is reversed in the above camera. So + // a util method written. Can't assume the first value will return + // the max fps. + hrVC = videoControlConfig->GetFrameRateList( + outputCapturePin, tmp, size, &listSize, &frameDurationList); + + if (hrVC == S_OK) { + maxFPS = GetMaxOfFrameArray(frameDurationList, listSize); + } + + CoTaskMemFree(frameDurationList); + frameDurationList = NULL; + listSize = 0; + + // On some odd cameras, you may get a 0 for duration. Some others may + // not update the out vars. GetMaxOfFrameArray returns the lowest + // duration (highest FPS), or 0 if there was no list with elements. + if (0 != maxFPS) { + capability.maxFPS = static_cast(10000000 / maxFPS); + capability.supportFrameRateControl = true; + } else // use existing method + { + RTC_LOG(LS_INFO) << "GetMaxAvailableFrameRate NOT SUPPORTED"; + if (avgTimePerFrame > 0) + capability.maxFPS = static_cast(10000000 / avgTimePerFrame); + else + capability.maxFPS = 0; + } + } else // use existing method in case IAMVideoControl is not supported + { + if (avgTimePerFrame > 0) + capability.maxFPS = static_cast(10000000 / avgTimePerFrame); + else + capability.maxFPS = 0; + } + + // can't switch MEDIATYPE :~( + if (pmt->subtype == MEDIASUBTYPE_I420) { + capability.videoType = VideoType::kI420; + } else if (pmt->subtype == MEDIASUBTYPE_IYUV) { + capability.videoType = VideoType::kIYUV; + } else if (pmt->subtype == MEDIASUBTYPE_RGB24) { + capability.videoType = VideoType::kRGB24; + } else if (pmt->subtype == MEDIASUBTYPE_YUY2) { + capability.videoType = VideoType::kYUY2; + } else if (pmt->subtype == MEDIASUBTYPE_RGB565) { + capability.videoType = VideoType::kRGB565; + } else if (pmt->subtype == MEDIASUBTYPE_MJPG) { + capability.videoType = VideoType::kMJPEG; + } else if (pmt->subtype == MEDIASUBTYPE_dvsl || + pmt->subtype == MEDIASUBTYPE_dvsd || + pmt->subtype == + MEDIASUBTYPE_dvhd) // If this is an external DV camera + { + capability.videoType = + VideoType::kYUY2; // MS DV filter seems to create this type + } else if (pmt->subtype == + MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards + { + capability.videoType = VideoType::kUYVY; + } else if (pmt->subtype == + MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses + // BT. 709 color. Not entiry correct to use + // UYVY. http://en.wikipedia.org/wiki/YCbCr + { + RTC_LOG(LS_INFO) << "Device support HDYC."; + capability.videoType = VideoType::kUYVY; + } else { + WCHAR strGuid[39]; + StringFromGUID2(pmt->subtype, strGuid, 39); + RTC_LOG(LS_WARNING) + << "Device support unknown media type " << strGuid << ", width " + << capability.width << ", height " << capability.height; + continue; + } + + _captureCapabilities.push_back(capability); + _captureCapabilitiesWindows.push_back(capability); + RTC_LOG(LS_INFO) << "Camera capability, width:" << capability.width + << " height:" << capability.height + << " type:" << static_cast(capability.videoType) + << " fps:" << capability.maxFPS; + } + FreeMediaType(pmt); + pmt = NULL; + } + RELEASE_AND_CLEAR(streamConfig); + RELEASE_AND_CLEAR(videoControlConfig); + RELEASE_AND_CLEAR(outputCapturePin); + RELEASE_AND_CLEAR(captureDevice); // Release the capture device + + // Store the new used device name + _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length; + _lastUsedDeviceName = + (char*)realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1); + memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, + _lastUsedDeviceNameLength + 1); + RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); + + return static_cast(_captureCapabilities.size()); +} + +// Constructs a product ID from the Windows DevicePath. on a USB device the +// devicePath contains product id and vendor id. This seems to work for firewire +// as well. +// Example of device path: +// "\\?\usb#vid_0408&pid_2010&mi_00#7&258e7aaf&0&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global" +// "\\?\avc#sony&dv-vcr&camcorder&dv#65b2d50301460008#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global" +void DeviceInfoDS::GetProductId(const char* devicePath, + char* productUniqueIdUTF8, + uint32_t productUniqueIdUTF8Length) { + *productUniqueIdUTF8 = '\0'; + char* startPos = strstr((char*)devicePath, "\\\\?\\"); + if (!startPos) { + strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1); + RTC_LOG(LS_INFO) << "Failed to get the product Id"; + return; + } + startPos += 4; + + char* pos = strchr(startPos, '&'); + if (!pos || pos >= (char*)devicePath + strlen((char*)devicePath)) { + strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1); + RTC_LOG(LS_INFO) << "Failed to get the product Id"; + return; + } + // Find the second occurrence. + pos = strchr(pos + 1, '&'); + uint32_t bytesToCopy = (uint32_t)(pos - startPos); + if (pos && (bytesToCopy < productUniqueIdUTF8Length) && + bytesToCopy <= kVideoCaptureProductIdLength) { + strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, + (char*)startPos, bytesToCopy); + } else { + strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1); + RTC_LOG(LS_INFO) << "Failed to get the product Id"; + } +} + +int32_t DeviceInfoDS::DisplayCaptureSettingsDialogBox( + const char* deviceUniqueIdUTF8, + const char* dialogTitleUTF8, + void* parentWindow, + uint32_t positionX, + uint32_t positionY) { + MutexLock lock(&_apiLock); + HWND window = (HWND)parentWindow; + + IBaseFilter* filter = GetDeviceFilter(deviceUniqueIdUTF8, NULL, 0); + if (!filter) + return -1; + + ISpecifyPropertyPages* pPages = NULL; + CAUUID uuid; + HRESULT hr = S_OK; + + hr = filter->QueryInterface(IID_ISpecifyPropertyPages, (LPVOID*)&pPages); + if (!SUCCEEDED(hr)) { + filter->Release(); + return -1; + } + hr = pPages->GetPages(&uuid); + if (!SUCCEEDED(hr)) { + filter->Release(); + return -1; + } + + WCHAR tempDialogTitleWide[256]; + tempDialogTitleWide[0] = 0; + int size = 255; + + // UTF-8 to wide char + MultiByteToWideChar(CP_UTF8, 0, (char*)dialogTitleUTF8, -1, + tempDialogTitleWide, size); + + // Invoke a dialog box to display. + + hr = OleCreatePropertyFrame( + window, // You must create the parent window. + positionX, // Horizontal position for the dialog box. + positionY, // Vertical position for the dialog box. + tempDialogTitleWide, // String used for the dialog box caption. + 1, // Number of pointers passed in pPlugin. + (LPUNKNOWN*)&filter, // Pointer to the filter. + uuid.cElems, // Number of property pages. + uuid.pElems, // Array of property page CLSIDs. + LOCALE_USER_DEFAULT, // Locale ID for the dialog box. + 0, NULL); // Reserved + // Release memory. + if (uuid.pElems) { + CoTaskMemFree(uuid.pElems); + } + filter->Release(); + return 0; +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.h b/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.h new file mode 100644 index 0000000000..a9a1449b99 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/device_info_ds.h @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_ +#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_ + +#include +#include +#include + +#include "modules/video_capture/device_info_impl.h" +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_impl.h" + +namespace webrtc { +namespace videocapturemodule { +struct VideoCaptureCapabilityWindows : public VideoCaptureCapability { + uint32_t directShowCapabilityIndex; + bool supportFrameRateControl; + VideoCaptureCapabilityWindows() { + directShowCapabilityIndex = 0; + supportFrameRateControl = false; + } +}; + +class DeviceInfoDS : public DeviceInfoImpl { + public: + // Factory function. + static DeviceInfoDS* Create(); + + DeviceInfoDS(); + ~DeviceInfoDS() override; + + int32_t Init() override; + uint32_t NumberOfDevices() override; + + /* + * Returns the available capture devices. + */ + int32_t GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8, + uint32_t productUniqueIdUTF8Length, + pid_t* pid, + bool* deviceIsPlaceholder) override; + + /* + * Display OS /capture device specific settings dialog + */ + int32_t DisplayCaptureSettingsDialogBox(const char* deviceUniqueIdUTF8, + const char* dialogTitleUTF8, + void* parentWindow, + uint32_t positionX, + uint32_t positionY) override; + + // Windows specific + + /* Gets a capture device filter + The user of this API is responsible for releasing the filter when it not + needed. + */ + IBaseFilter* GetDeviceFilter(const char* deviceUniqueIdUTF8, + char* productUniqueIdUTF8 = NULL, + uint32_t productUniqueIdUTF8Length = 0); + + int32_t GetWindowsCapability( + int32_t capabilityIndex, + VideoCaptureCapabilityWindows& windowsCapability); + + static void GetProductId(const char* devicePath, + char* productUniqueIdUTF8, + uint32_t productUniqueIdUTF8Length); + + protected: + int32_t GetDeviceInfo(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8, + uint32_t productUniqueIdUTF8Length); + + int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); + + private: + ICreateDevEnum* _dsDevEnum; + IEnumMoniker* _dsMonikerDevEnum; + bool _CoUninitializeIsRequired; + std::vector _captureCapabilitiesWindows; + HWND _hwnd; + WNDCLASS _wndClass; + HINSTANCE _hInstance; + HDEVNOTIFY _hdevnotify; +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_ diff --git a/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc b/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc new file mode 100644 index 0000000000..47fecfe4a1 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.cc @@ -0,0 +1,158 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include // Must come before the help_functions_ds.h include so + // that DEFINE_GUID() entries will be defined in this + // object file. + +#include + +#include "modules/video_capture/windows/help_functions_ds.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +// This returns minimum :), which will give max frame rate... +LONGLONG GetMaxOfFrameArray(LONGLONG* maxFps, long size) { + if (!maxFps || size <= 0) { + return 0; + } + LONGLONG maxFPS = maxFps[0]; + for (int i = 0; i < size; i++) { + if (maxFPS > maxFps[i]) + maxFPS = maxFps[i]; + } + return maxFPS; +} + +IPin* GetInputPin(IBaseFilter* filter) { + IPin* pin = NULL; + IEnumPins* pPinEnum = NULL; + filter->EnumPins(&pPinEnum); + if (pPinEnum == NULL) { + return NULL; + } + + // get first unconnected pin + pPinEnum->Reset(); // set to first pin + + while (S_OK == pPinEnum->Next(1, &pin, NULL)) { + PIN_DIRECTION pPinDir; + pin->QueryDirection(&pPinDir); + if (PINDIR_INPUT == pPinDir) // This is an input pin + { + IPin* tempPin = NULL; + if (S_OK != pin->ConnectedTo(&tempPin)) // The pint is not connected + { + pPinEnum->Release(); + return pin; + } + } + pin->Release(); + } + pPinEnum->Release(); + return NULL; +} + +IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category) { + IPin* pin = NULL; + IEnumPins* pPinEnum = NULL; + filter->EnumPins(&pPinEnum); + if (pPinEnum == NULL) { + return NULL; + } + // get first unconnected pin + pPinEnum->Reset(); // set to first pin + while (S_OK == pPinEnum->Next(1, &pin, NULL)) { + PIN_DIRECTION pPinDir; + pin->QueryDirection(&pPinDir); + if (PINDIR_OUTPUT == pPinDir) // This is an output pin + { + if (Category == GUID_NULL || PinMatchesCategory(pin, Category)) { + pPinEnum->Release(); + return pin; + } + } + pin->Release(); + pin = NULL; + } + pPinEnum->Release(); + return NULL; +} + +BOOL PinMatchesCategory(IPin* pPin, REFGUID Category) { + BOOL bFound = FALSE; + IKsPropertySet* pKs = NULL; + HRESULT hr = pPin->QueryInterface(IID_PPV_ARGS(&pKs)); + if (SUCCEEDED(hr)) { + GUID PinCategory; + DWORD cbReturned; + hr = pKs->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, + &PinCategory, sizeof(GUID), &cbReturned); + if (SUCCEEDED(hr) && (cbReturned == sizeof(GUID))) { + bFound = (PinCategory == Category); + } + pKs->Release(); + } + return bFound; +} + +void ResetMediaType(AM_MEDIA_TYPE* media_type) { + if (!media_type) + return; + if (media_type->cbFormat != 0) { + CoTaskMemFree(media_type->pbFormat); + media_type->cbFormat = 0; + media_type->pbFormat = nullptr; + } + if (media_type->pUnk) { + media_type->pUnk->Release(); + media_type->pUnk = nullptr; + } +} + +void FreeMediaType(AM_MEDIA_TYPE* media_type) { + if (!media_type) + return; + ResetMediaType(media_type); + CoTaskMemFree(media_type); +} + +HRESULT CopyMediaType(AM_MEDIA_TYPE* target, const AM_MEDIA_TYPE* source) { + RTC_DCHECK_NE(source, target); + *target = *source; + if (source->cbFormat != 0) { + RTC_DCHECK(source->pbFormat); + target->pbFormat = + reinterpret_cast(CoTaskMemAlloc(source->cbFormat)); + if (target->pbFormat == nullptr) { + target->cbFormat = 0; + return E_OUTOFMEMORY; + } else { + CopyMemory(target->pbFormat, source->pbFormat, target->cbFormat); + } + } + + if (target->pUnk != nullptr) + target->pUnk->AddRef(); + + return S_OK; +} + +wchar_t* DuplicateWideString(const wchar_t* str) { + size_t len = lstrlenW(str); + wchar_t* ret = + reinterpret_cast(CoTaskMemAlloc((len + 1) * sizeof(wchar_t))); + lstrcpyW(ret, str); + return ret; +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.h b/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.h new file mode 100644 index 0000000000..29479157a8 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/help_functions_ds.h @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_ +#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_ + +#include + +#include +#include + +#include "api/scoped_refptr.h" +#include "rtc_base/ref_counter.h" + +DEFINE_GUID(MEDIASUBTYPE_I420, + 0x30323449, + 0x0000, + 0x0010, + 0x80, + 0x00, + 0x00, + 0xAA, + 0x00, + 0x38, + 0x9B, + 0x71); +DEFINE_GUID(MEDIASUBTYPE_HDYC, + 0x43594448, + 0x0000, + 0x0010, + 0x80, + 0x00, + 0x00, + 0xAA, + 0x00, + 0x38, + 0x9B, + 0x71); + +#define RELEASE_AND_CLEAR(p) \ + if (p) { \ + (p)->Release(); \ + (p) = NULL; \ + } + +namespace webrtc { +namespace videocapturemodule { +LONGLONG GetMaxOfFrameArray(LONGLONG* maxFps, long size); + +IPin* GetInputPin(IBaseFilter* filter); +IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category); +BOOL PinMatchesCategory(IPin* pPin, REFGUID Category); +void ResetMediaType(AM_MEDIA_TYPE* media_type); +void FreeMediaType(AM_MEDIA_TYPE* media_type); +HRESULT CopyMediaType(AM_MEDIA_TYPE* target, const AM_MEDIA_TYPE* source); + +// Helper function to make using scoped_refptr with COM interface pointers +// a little less awkward. rtc::scoped_refptr doesn't support the & operator +// or a way to receive values via an out ptr. +// The function is intentionally not called QueryInterface to make things less +// confusing for the compiler to figure out what the caller wants to do when +// called from within the context of a class that also implements COM +// interfaces. +template +HRESULT GetComInterface(IUnknown* object, rtc::scoped_refptr* ptr) { + // This helper function is not meant to magically free ptr. If we do that + // we add code bloat to most places where it's not needed and make the code + // less readable since it's not clear at the call site that the pointer + // would get freed even inf QI() fails. + RTC_DCHECK(!ptr->get()); + void* new_ptr = nullptr; + HRESULT hr = object->QueryInterface(__uuidof(T), &new_ptr); + if (SUCCEEDED(hr)) + ptr->swap(reinterpret_cast(&new_ptr)); + return hr; +} + +// Provides a reference count implementation for COM (IUnknown derived) classes. +// The implementation uses atomics for managing the ref count. +template +class ComRefCount : public T { + public: + ComRefCount() {} + + template + explicit ComRefCount(P0&& p0) : T(std::forward(p0)) {} + + STDMETHOD_(ULONG, AddRef)() override { + ref_count_.IncRef(); + return 1; + } + + STDMETHOD_(ULONG, Release)() override { + const auto status = ref_count_.DecRef(); + if (status == rtc::RefCountReleaseStatus::kDroppedLastRef) { + delete this; + return 0; + } + return 1; + } + + protected: + ~ComRefCount() {} + + private: + webrtc::webrtc_impl::RefCounter ref_count_{0}; +}; + +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_ diff --git a/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc b/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc new file mode 100644 index 0000000000..290a45affb --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.cc @@ -0,0 +1,961 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/windows/sink_filter_ds.h" + +#include // VIDEOINFOHEADER2 +#include + +#include +#include + +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/string_utils.h" + +DEFINE_GUID(CLSID_SINKFILTER, + 0x88cdbbdc, + 0xa73b, + 0x4afa, + 0xac, + 0xbf, + 0x15, + 0xd5, + 0xe2, + 0xce, + 0x12, + 0xc3); + +namespace webrtc { +namespace videocapturemodule { +namespace { + +// Simple enumeration implementation that enumerates over a single pin :-/ +class EnumPins : public IEnumPins { + public: + EnumPins(IPin* pin) : pin_(pin) {} + + protected: + virtual ~EnumPins() {} + + private: + STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override { + if (riid == IID_IUnknown || riid == IID_IEnumPins) { + *ppv = static_cast(this); + AddRef(); + return S_OK; + } + return E_NOINTERFACE; + } + + STDMETHOD(Clone)(IEnumPins** pins) { + RTC_DCHECK_NOTREACHED(); + return E_NOTIMPL; + } + + STDMETHOD(Next)(ULONG count, IPin** pins, ULONG* fetched) { + RTC_DCHECK(count > 0); + RTC_DCHECK(pins); + // fetched may be NULL. + + if (pos_ > 0) { + if (fetched) + *fetched = 0; + return S_FALSE; + } + + ++pos_; + pins[0] = pin_.get(); + pins[0]->AddRef(); + if (fetched) + *fetched = 1; + + return count == 1 ? S_OK : S_FALSE; + } + + STDMETHOD(Skip)(ULONG count) { + RTC_DCHECK_NOTREACHED(); + return E_NOTIMPL; + } + + STDMETHOD(Reset)() { + pos_ = 0; + return S_OK; + } + + rtc::scoped_refptr pin_; + int pos_ = 0; +}; + +bool IsMediaTypePartialMatch(const AM_MEDIA_TYPE& a, const AM_MEDIA_TYPE& b) { + if (b.majortype != GUID_NULL && a.majortype != b.majortype) + return false; + + if (b.subtype != GUID_NULL && a.subtype != b.subtype) + return false; + + if (b.formattype != GUID_NULL) { + // if the format block is specified then it must match exactly + if (a.formattype != b.formattype) + return false; + + if (a.cbFormat != b.cbFormat) + return false; + + if (a.cbFormat != 0 && memcmp(a.pbFormat, b.pbFormat, a.cbFormat) != 0) + return false; + } + + return true; +} + +bool IsMediaTypeFullySpecified(const AM_MEDIA_TYPE& type) { + return type.majortype != GUID_NULL && type.formattype != GUID_NULL; +} + +BYTE* AllocMediaTypeFormatBuffer(AM_MEDIA_TYPE* media_type, ULONG length) { + RTC_DCHECK(length); + if (media_type->cbFormat == length) + return media_type->pbFormat; + + BYTE* buffer = static_cast(CoTaskMemAlloc(length)); + if (!buffer) + return nullptr; + + if (media_type->pbFormat) { + RTC_DCHECK(media_type->cbFormat); + CoTaskMemFree(media_type->pbFormat); + media_type->pbFormat = nullptr; + } + + media_type->cbFormat = length; + media_type->pbFormat = buffer; + return buffer; +} + +void GetSampleProperties(IMediaSample* sample, AM_SAMPLE2_PROPERTIES* props) { + rtc::scoped_refptr sample2; + if (SUCCEEDED(GetComInterface(sample, &sample2))) { + sample2->GetProperties(sizeof(*props), reinterpret_cast(props)); + return; + } + + // Get the properties the hard way. + props->cbData = sizeof(*props); + props->dwTypeSpecificFlags = 0; + props->dwStreamId = AM_STREAM_MEDIA; + props->dwSampleFlags = 0; + + if (sample->IsDiscontinuity() == S_OK) + props->dwSampleFlags |= AM_SAMPLE_DATADISCONTINUITY; + + if (sample->IsPreroll() == S_OK) + props->dwSampleFlags |= AM_SAMPLE_PREROLL; + + if (sample->IsSyncPoint() == S_OK) + props->dwSampleFlags |= AM_SAMPLE_SPLICEPOINT; + + if (SUCCEEDED(sample->GetTime(&props->tStart, &props->tStop))) + props->dwSampleFlags |= AM_SAMPLE_TIMEVALID | AM_SAMPLE_STOPVALID; + + if (sample->GetMediaType(&props->pMediaType) == S_OK) + props->dwSampleFlags |= AM_SAMPLE_TYPECHANGED; + + sample->GetPointer(&props->pbBuffer); + props->lActual = sample->GetActualDataLength(); + props->cbBuffer = sample->GetSize(); +} + +// Returns true if the media type is supported, false otherwise. +// For supported types, the `capability` will be populated accordingly. +bool TranslateMediaTypeToVideoCaptureCapability( + const AM_MEDIA_TYPE* media_type, + VideoCaptureCapability* capability) { + RTC_DCHECK(capability); + if (!media_type || media_type->majortype != MEDIATYPE_Video || + !media_type->pbFormat) { + return false; + } + + const BITMAPINFOHEADER* bih = nullptr; + if (media_type->formattype == FORMAT_VideoInfo) { + bih = &reinterpret_cast(media_type->pbFormat)->bmiHeader; + } else if (media_type->formattype != FORMAT_VideoInfo2) { + bih = &reinterpret_cast(media_type->pbFormat)->bmiHeader; + } else { + return false; + } + + RTC_LOG(LS_INFO) << "TranslateMediaTypeToVideoCaptureCapability width:" + << bih->biWidth << " height:" << bih->biHeight + << " Compression:0x" << rtc::ToHex(bih->biCompression); + + const GUID& sub_type = media_type->subtype; + if (sub_type == MEDIASUBTYPE_MJPG && + bih->biCompression == MAKEFOURCC('M', 'J', 'P', 'G')) { + capability->videoType = VideoType::kMJPEG; + } else if (sub_type == MEDIASUBTYPE_I420 && + bih->biCompression == MAKEFOURCC('I', '4', '2', '0')) { + capability->videoType = VideoType::kI420; + } else if (sub_type == MEDIASUBTYPE_YUY2 && + bih->biCompression == MAKEFOURCC('Y', 'U', 'Y', '2')) { + capability->videoType = VideoType::kYUY2; + } else if (sub_type == MEDIASUBTYPE_UYVY && + bih->biCompression == MAKEFOURCC('U', 'Y', 'V', 'Y')) { + capability->videoType = VideoType::kUYVY; + } else if (sub_type == MEDIASUBTYPE_HDYC) { + capability->videoType = VideoType::kUYVY; + } else if (sub_type == MEDIASUBTYPE_RGB24 && bih->biCompression == BI_RGB) { + capability->videoType = VideoType::kRGB24; + } else { + return false; + } + + // Store the incoming width and height + capability->width = bih->biWidth; + + // Store the incoming height, + // for RGB24 we assume the frame to be upside down + if (sub_type == MEDIASUBTYPE_RGB24 && bih->biHeight > 0) { + capability->height = -(bih->biHeight); + } else { + capability->height = abs(bih->biHeight); + } + + return true; +} + +class MediaTypesEnum : public IEnumMediaTypes { + public: + MediaTypesEnum(const VideoCaptureCapability& capability) + : capability_(capability), + format_preference_order_( + {// Default preferences, sorted by cost-to-convert-to-i420. + VideoType::kI420, VideoType::kYUY2, VideoType::kRGB24, + VideoType::kUYVY, VideoType::kMJPEG}) { + // Use the preferred video type, if supported. + auto it = std::find(format_preference_order_.begin(), + format_preference_order_.end(), capability_.videoType); + if (it != format_preference_order_.end()) { + RTC_LOG(LS_INFO) << "Selected video type: " << *it; + // Move it to the front of the list, if it isn't already there. + if (it != format_preference_order_.begin()) { + format_preference_order_.splice(format_preference_order_.begin(), + format_preference_order_, it, + std::next(it)); + } + } else { + RTC_LOG(LS_WARNING) << "Unsupported video type: " + << rtc::ToString( + static_cast(capability_.videoType)) + << ", using default preference list."; + } + } + + protected: + virtual ~MediaTypesEnum() {} + + private: + STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override { + if (riid == IID_IUnknown || riid == IID_IEnumMediaTypes) { + *ppv = static_cast(this); + AddRef(); + return S_OK; + } + return E_NOINTERFACE; + } + + // IEnumMediaTypes + STDMETHOD(Clone)(IEnumMediaTypes** pins) { + RTC_DCHECK_NOTREACHED(); + return E_NOTIMPL; + } + + STDMETHOD(Next)(ULONG count, AM_MEDIA_TYPE** types, ULONG* fetched) { + RTC_DCHECK(count > 0); + RTC_DCHECK(types); + // fetched may be NULL. + if (fetched) + *fetched = 0; + + for (ULONG i = 0; + i < count && pos_ < static_cast(format_preference_order_.size()); + ++i) { + AM_MEDIA_TYPE* media_type = reinterpret_cast( + CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE))); + ZeroMemory(media_type, sizeof(*media_type)); + types[i] = media_type; + VIDEOINFOHEADER* vih = reinterpret_cast( + AllocMediaTypeFormatBuffer(media_type, sizeof(VIDEOINFOHEADER))); + ZeroMemory(vih, sizeof(*vih)); + vih->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); + vih->bmiHeader.biPlanes = 1; + vih->bmiHeader.biClrImportant = 0; + vih->bmiHeader.biClrUsed = 0; + if (capability_.maxFPS != 0) + vih->AvgTimePerFrame = 10000000 / capability_.maxFPS; + + SetRectEmpty(&vih->rcSource); // we want the whole image area rendered. + SetRectEmpty(&vih->rcTarget); // no particular destination rectangle + + media_type->majortype = MEDIATYPE_Video; + media_type->formattype = FORMAT_VideoInfo; + media_type->bTemporalCompression = FALSE; + + // Set format information. + auto format_it = std::next(format_preference_order_.begin(), pos_++); + SetMediaInfoFromVideoType(*format_it, &vih->bmiHeader, media_type); + + vih->bmiHeader.biWidth = capability_.width; + vih->bmiHeader.biHeight = capability_.height; + vih->bmiHeader.biSizeImage = ((vih->bmiHeader.biBitCount / 4) * + capability_.height * capability_.width) / + 2; + + RTC_DCHECK(vih->bmiHeader.biSizeImage); + media_type->lSampleSize = vih->bmiHeader.biSizeImage; + media_type->bFixedSizeSamples = true; + if (fetched) + ++(*fetched); + } + return pos_ == static_cast(format_preference_order_.size()) ? S_FALSE + : S_OK; + } + + static void SetMediaInfoFromVideoType(VideoType video_type, + BITMAPINFOHEADER* bitmap_header, + AM_MEDIA_TYPE* media_type) { + switch (video_type) { + case VideoType::kI420: + bitmap_header->biCompression = MAKEFOURCC('I', '4', '2', '0'); + bitmap_header->biBitCount = 12; // bit per pixel + media_type->subtype = MEDIASUBTYPE_I420; + break; + case VideoType::kYUY2: + bitmap_header->biCompression = MAKEFOURCC('Y', 'U', 'Y', '2'); + bitmap_header->biBitCount = 16; // bit per pixel + media_type->subtype = MEDIASUBTYPE_YUY2; + break; + case VideoType::kRGB24: + bitmap_header->biCompression = BI_RGB; + bitmap_header->biBitCount = 24; // bit per pixel + media_type->subtype = MEDIASUBTYPE_RGB24; + break; + case VideoType::kUYVY: + bitmap_header->biCompression = MAKEFOURCC('U', 'Y', 'V', 'Y'); + bitmap_header->biBitCount = 16; // bit per pixel + media_type->subtype = MEDIASUBTYPE_UYVY; + break; + case VideoType::kMJPEG: + bitmap_header->biCompression = MAKEFOURCC('M', 'J', 'P', 'G'); + bitmap_header->biBitCount = 12; // bit per pixel + media_type->subtype = MEDIASUBTYPE_MJPG; + break; + default: + RTC_DCHECK_NOTREACHED(); + } + } + + STDMETHOD(Skip)(ULONG count) { + RTC_DCHECK_NOTREACHED(); + return E_NOTIMPL; + } + + STDMETHOD(Reset)() { + pos_ = 0; + return S_OK; + } + + int pos_ = 0; + const VideoCaptureCapability capability_; + std::list format_preference_order_; +}; + +} // namespace + +CaptureInputPin::CaptureInputPin(CaptureSinkFilter* filter) { + capture_checker_.Detach(); + // No reference held to avoid circular references. + info_.pFilter = filter; + info_.dir = PINDIR_INPUT; +} + +CaptureInputPin::~CaptureInputPin() { + RTC_DCHECK_RUN_ON(&main_checker_); + ResetMediaType(&media_type_); +} + +HRESULT CaptureInputPin::SetRequestedCapability( + const VideoCaptureCapability& capability) { + RTC_DCHECK_RUN_ON(&main_checker_); + RTC_DCHECK(Filter()->IsStopped()); + requested_capability_ = capability; + resulting_capability_ = VideoCaptureCapability(); + return S_OK; +} + +void CaptureInputPin::OnFilterActivated() { + RTC_DCHECK_RUN_ON(&main_checker_); + runtime_error_ = false; + flushing_ = false; + capture_checker_.Detach(); + capture_thread_id_ = 0; +} + +void CaptureInputPin::OnFilterDeactivated() { + RTC_DCHECK_RUN_ON(&main_checker_); + // Expedite shutdown by raising the flushing flag so no further processing + // on the capture thread occurs. When the graph is stopped and all filters + // have been told to stop, the media controller (graph) will wait for the + // capture thread to stop. + flushing_ = true; + if (allocator_) + allocator_->Decommit(); +} + +CaptureSinkFilter* CaptureInputPin::Filter() const { + return static_cast(info_.pFilter); +} + +HRESULT CaptureInputPin::AttemptConnection(IPin* receive_pin, + const AM_MEDIA_TYPE* media_type) { + RTC_DCHECK_RUN_ON(&main_checker_); + RTC_DCHECK(Filter()->IsStopped()); + + // Check that the connection is valid -- need to do this for every + // connect attempt since BreakConnect will undo it. + HRESULT hr = CheckDirection(receive_pin); + if (FAILED(hr)) + return hr; + + if (!TranslateMediaTypeToVideoCaptureCapability(media_type, + &resulting_capability_)) { + ClearAllocator(true); + return VFW_E_TYPE_NOT_ACCEPTED; + } + + // See if the other pin will accept this type. + hr = receive_pin->ReceiveConnection(static_cast(this), media_type); + if (FAILED(hr)) { + receive_pin_ = nullptr; // Should already be null, but just in case. + return hr; + } + + // Should have been set as part of the connect process. + RTC_DCHECK_EQ(receive_pin_, receive_pin); + + ResetMediaType(&media_type_); + CopyMediaType(&media_type_, media_type); + + return S_OK; +} + +std::vector CaptureInputPin::DetermineCandidateFormats( + IPin* receive_pin, + const AM_MEDIA_TYPE* media_type) { + RTC_DCHECK_RUN_ON(&main_checker_); + RTC_DCHECK(receive_pin); + RTC_DCHECK(media_type); + + std::vector ret; + + for (int i = 0; i < 2; i++) { + IEnumMediaTypes* types = nullptr; + if (i == 0) { + // First time around, try types from receive_pin. + receive_pin->EnumMediaTypes(&types); + } else { + // Then try ours. + EnumMediaTypes(&types); + } + + if (types) { + while (true) { + ULONG fetched = 0; + AM_MEDIA_TYPE* this_type = nullptr; + if (types->Next(1, &this_type, &fetched) != S_OK) + break; + + if (IsMediaTypePartialMatch(*this_type, *media_type)) { + ret.push_back(this_type); + } else { + FreeMediaType(this_type); + } + } + types->Release(); + } + } + + return ret; +} + +void CaptureInputPin::ClearAllocator(bool decommit) { + RTC_DCHECK_RUN_ON(&main_checker_); + if (!allocator_) + return; + if (decommit) + allocator_->Decommit(); + allocator_ = nullptr; +} + +HRESULT CaptureInputPin::CheckDirection(IPin* pin) const { + RTC_DCHECK_RUN_ON(&main_checker_); + PIN_DIRECTION pd; + pin->QueryDirection(&pd); + // Fairly basic check, make sure we don't pair input with input etc. + return pd == info_.dir ? VFW_E_INVALID_DIRECTION : S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::QueryInterface(REFIID riid, + void** ppv) { + (*ppv) = nullptr; + if (riid == IID_IUnknown || riid == IID_IMemInputPin) { + *ppv = static_cast(this); + } else if (riid == IID_IPin) { + *ppv = static_cast(this); + } + + if (!(*ppv)) + return E_NOINTERFACE; + + static_cast(this)->AddRef(); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::Connect(IPin* receive_pin, const AM_MEDIA_TYPE* media_type) { + RTC_DCHECK_RUN_ON(&main_checker_); + if (!media_type || !receive_pin) + return E_POINTER; + + if (!Filter()->IsStopped()) + return VFW_E_NOT_STOPPED; + + if (receive_pin_) { + RTC_DCHECK_NOTREACHED(); + return VFW_E_ALREADY_CONNECTED; + } + + if (IsMediaTypeFullySpecified(*media_type)) + return AttemptConnection(receive_pin, media_type); + + auto types = DetermineCandidateFormats(receive_pin, media_type); + bool connected = false; + for (auto* type : types) { + if (!connected && AttemptConnection(receive_pin, media_type) == S_OK) + connected = true; + + FreeMediaType(type); + } + + return connected ? S_OK : VFW_E_NO_ACCEPTABLE_TYPES; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::ReceiveConnection(IPin* connector, + const AM_MEDIA_TYPE* media_type) { + RTC_DCHECK_RUN_ON(&main_checker_); + RTC_DCHECK(Filter()->IsStopped()); + + if (receive_pin_) { + RTC_DCHECK_NOTREACHED(); + return VFW_E_ALREADY_CONNECTED; + } + + HRESULT hr = CheckDirection(connector); + if (FAILED(hr)) + return hr; + + if (!TranslateMediaTypeToVideoCaptureCapability(media_type, + &resulting_capability_)) + return VFW_E_TYPE_NOT_ACCEPTED; + + // Complete the connection + + receive_pin_ = connector; + ResetMediaType(&media_type_); + CopyMediaType(&media_type_, media_type); + + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::Disconnect() { + RTC_DCHECK_RUN_ON(&main_checker_); + if (!Filter()->IsStopped()) + return VFW_E_NOT_STOPPED; + + if (!receive_pin_) + return S_FALSE; + + ClearAllocator(true); + receive_pin_ = nullptr; + + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::ConnectedTo(IPin** pin) { + RTC_DCHECK_RUN_ON(&main_checker_); + + if (!receive_pin_) + return VFW_E_NOT_CONNECTED; + + *pin = receive_pin_.get(); + receive_pin_->AddRef(); + + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::ConnectionMediaType(AM_MEDIA_TYPE* media_type) { + RTC_DCHECK_RUN_ON(&main_checker_); + + if (!receive_pin_) + return VFW_E_NOT_CONNECTED; + + CopyMediaType(media_type, &media_type_); + + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::QueryPinInfo(PIN_INFO* info) { + RTC_DCHECK_RUN_ON(&main_checker_); + *info = info_; + if (info_.pFilter) + info_.pFilter->AddRef(); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::QueryDirection(PIN_DIRECTION* pin_dir) { + RTC_DCHECK_RUN_ON(&main_checker_); + *pin_dir = info_.dir; + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::QueryId(LPWSTR* id) { + RTC_DCHECK_RUN_ON(&main_checker_); + size_t len = lstrlenW(info_.achName); + *id = reinterpret_cast(CoTaskMemAlloc((len + 1) * sizeof(wchar_t))); + lstrcpyW(*id, info_.achName); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::QueryAccept(const AM_MEDIA_TYPE* media_type) { + RTC_DCHECK_RUN_ON(&main_checker_); + RTC_DCHECK(Filter()->IsStopped()); + VideoCaptureCapability capability(resulting_capability_); + return TranslateMediaTypeToVideoCaptureCapability(media_type, &capability) + ? S_FALSE + : S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::EnumMediaTypes(IEnumMediaTypes** types) { + RTC_DCHECK_RUN_ON(&main_checker_); + *types = new ComRefCount(requested_capability_); + (*types)->AddRef(); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::QueryInternalConnections(IPin** pins, ULONG* count) { + return E_NOTIMPL; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::EndOfStream() { + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::BeginFlush() { + RTC_DCHECK_RUN_ON(&main_checker_); + flushing_ = true; + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::EndFlush() { + RTC_DCHECK_RUN_ON(&main_checker_); + flushing_ = false; + runtime_error_ = false; + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::NewSegment(REFERENCE_TIME start, + REFERENCE_TIME stop, + double rate) { + RTC_DCHECK_RUN_ON(&main_checker_); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::GetAllocator(IMemAllocator** allocator) { + RTC_DCHECK_RUN_ON(&main_checker_); + if (allocator_ == nullptr) { + HRESULT hr = CoCreateInstance(CLSID_MemoryAllocator, 0, + CLSCTX_INPROC_SERVER, IID_IMemAllocator, + reinterpret_cast(allocator)); + if (FAILED(hr)) + return hr; + allocator_.swap(allocator); + } + *allocator = allocator_.get(); + allocator_->AddRef(); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::NotifyAllocator(IMemAllocator* allocator, BOOL read_only) { + RTC_DCHECK_RUN_ON(&main_checker_); + allocator_.swap(&allocator); + if (allocator_) + allocator_->AddRef(); + if (allocator) + allocator->Release(); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::GetAllocatorRequirements(ALLOCATOR_PROPERTIES* props) { + return E_NOTIMPL; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::Receive(IMediaSample* media_sample) { + RTC_DCHECK_RUN_ON(&capture_checker_); + + CaptureSinkFilter* const filter = static_cast(Filter()); + + if (flushing_.load(std::memory_order_relaxed)) + return S_FALSE; + + if (runtime_error_.load(std::memory_order_relaxed)) + return VFW_E_RUNTIME_ERROR; + + if (!capture_thread_id_) { + // Make sure we set the thread name only once. + capture_thread_id_ = GetCurrentThreadId(); + rtc::SetCurrentThreadName("webrtc_video_capture"); + } + + AM_SAMPLE2_PROPERTIES sample_props = {}; + GetSampleProperties(media_sample, &sample_props); + // Has the format changed in this sample? + if (sample_props.dwSampleFlags & AM_SAMPLE_TYPECHANGED) { + // Check the derived class accepts the new format. + // This shouldn't fail as the source must call QueryAccept first. + + // Note: This will modify resulting_capability_. + // That should be OK as long as resulting_capability_ is only modified + // on this thread while it is running (filter is not stopped), and only + // modified on the main thread when the filter is stopped (i.e. this thread + // is not running). + if (!TranslateMediaTypeToVideoCaptureCapability(sample_props.pMediaType, + &resulting_capability_)) { + // Raise a runtime error if we fail the media type + runtime_error_ = true; + EndOfStream(); + Filter()->NotifyEvent(EC_ERRORABORT, VFW_E_TYPE_NOT_ACCEPTED, 0); + return VFW_E_INVALIDMEDIATYPE; + } + } + + filter->ProcessCapturedFrame(sample_props.pbBuffer, sample_props.lActual, + resulting_capability_); + + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureInputPin::ReceiveMultiple(IMediaSample** samples, + long count, + long* processed) { + HRESULT hr = S_OK; + *processed = 0; + while (count-- > 0) { + hr = Receive(samples[*processed]); + if (hr != S_OK) + break; + ++(*processed); + } + return hr; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::ReceiveCanBlock() { + return S_FALSE; +} + +// ---------------------------------------------------------------------------- + +CaptureSinkFilter::CaptureSinkFilter(VideoCaptureImpl* capture_observer) + : input_pin_(new ComRefCount(this)), + capture_observer_(capture_observer) {} + +CaptureSinkFilter::~CaptureSinkFilter() { + RTC_DCHECK_RUN_ON(&main_checker_); +} + +HRESULT CaptureSinkFilter::SetRequestedCapability( + const VideoCaptureCapability& capability) { + RTC_DCHECK_RUN_ON(&main_checker_); + // Called on the same thread as capture is started on. + return input_pin_->SetRequestedCapability(capability); +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::GetState(DWORD msecs, FILTER_STATE* state) { + RTC_DCHECK_RUN_ON(&main_checker_); + *state = state_; + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::SetSyncSource(IReferenceClock* clock) { + RTC_DCHECK_RUN_ON(&main_checker_); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::GetSyncSource(IReferenceClock** clock) { + RTC_DCHECK_RUN_ON(&main_checker_); + return E_NOTIMPL; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::Pause() { + RTC_DCHECK_RUN_ON(&main_checker_); + state_ = State_Paused; + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::Run(REFERENCE_TIME tStart) { + RTC_DCHECK_RUN_ON(&main_checker_); + if (state_ == State_Stopped) + Pause(); + + state_ = State_Running; + input_pin_->OnFilterActivated(); + + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::Stop() { + RTC_DCHECK_RUN_ON(&main_checker_); + if (state_ == State_Stopped) + return S_OK; + + state_ = State_Stopped; + input_pin_->OnFilterDeactivated(); + + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::EnumPins(IEnumPins** pins) { + RTC_DCHECK_RUN_ON(&main_checker_); + *pins = new ComRefCount(input_pin_.get()); + (*pins)->AddRef(); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::FindPin(LPCWSTR id, + IPin** pin) { + RTC_DCHECK_RUN_ON(&main_checker_); + // There's no ID assigned to our input pin, so looking it up based on one + // is pointless (and in practice, this method isn't being used). + return VFW_E_NOT_FOUND; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::QueryFilterInfo(FILTER_INFO* info) { + RTC_DCHECK_RUN_ON(&main_checker_); + *info = info_; + if (info->pGraph) + info->pGraph->AddRef(); + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::JoinFilterGraph(IFilterGraph* graph, LPCWSTR name) { + RTC_DCHECK_RUN_ON(&main_checker_); + RTC_DCHECK(IsStopped()); + + // Note, since a reference to the filter is held by the graph manager, + // filters must not hold a reference to the graph. If they would, we'd have + // a circular reference. Instead, a pointer to the graph can be held without + // reference. See documentation for IBaseFilter::JoinFilterGraph for more. + info_.pGraph = graph; // No AddRef(). + sink_ = nullptr; + + if (info_.pGraph) { + // make sure we don't hold on to the reference we may receive. + // Note that this assumes the same object identity, but so be it. + rtc::scoped_refptr sink; + GetComInterface(info_.pGraph, &sink); + sink_ = sink.get(); + } + + info_.achName[0] = L'\0'; + if (name) + lstrcpynW(info_.achName, name, arraysize(info_.achName)); + + return S_OK; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::QueryVendorInfo(LPWSTR* vendor_info) { + return E_NOTIMPL; +} + +void CaptureSinkFilter::ProcessCapturedFrame( + unsigned char* buffer, + size_t length, + const VideoCaptureCapability& frame_info) { + // Called on the capture thread. + capture_observer_->IncomingFrame(buffer, length, frame_info); +} + +void CaptureSinkFilter::NotifyEvent(long code, + LONG_PTR param1, + LONG_PTR param2) { + // Called on the capture thread. + if (!sink_) + return; + + if (EC_COMPLETE == code) + param2 = reinterpret_cast(static_cast(this)); + sink_->Notify(code, param1, param2); +} + +bool CaptureSinkFilter::IsStopped() const { + RTC_DCHECK_RUN_ON(&main_checker_); + return state_ == State_Stopped; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP +CaptureSinkFilter::QueryInterface(REFIID riid, void** ppv) { + if (riid == IID_IUnknown || riid == IID_IPersist || riid == IID_IBaseFilter) { + *ppv = static_cast(this); + AddRef(); + return S_OK; + } + return E_NOINTERFACE; +} + +COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::GetClassID(CLSID* clsid) { + *clsid = CLSID_SINKFILTER; + return S_OK; +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.h b/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.h new file mode 100644 index 0000000000..b0fabda3cd --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/sink_filter_ds.h @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_ +#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_ + +#include + +#include +#include +#include + +#include "api/sequence_checker.h" +#include "modules/video_capture/video_capture_impl.h" +#include "modules/video_capture/windows/help_functions_ds.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { +namespace videocapturemodule { +// forward declarations +class CaptureSinkFilter; + +// Input pin for camera input +// Implements IMemInputPin, IPin. +class CaptureInputPin : public IMemInputPin, public IPin { + public: + CaptureInputPin(CaptureSinkFilter* filter); + + HRESULT SetRequestedCapability(const VideoCaptureCapability& capability); + + // Notifications from the filter. + void OnFilterActivated(); + void OnFilterDeactivated(); + + protected: + virtual ~CaptureInputPin(); + + private: + CaptureSinkFilter* Filter() const; + + HRESULT AttemptConnection(IPin* receive_pin, const AM_MEDIA_TYPE* media_type); + std::vector DetermineCandidateFormats( + IPin* receive_pin, + const AM_MEDIA_TYPE* media_type); + void ClearAllocator(bool decommit); + HRESULT CheckDirection(IPin* pin) const; + + // IUnknown + STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override; + + // clang-format off + // clang isn't sure what to do with the longer STDMETHOD() function + // declarations. + + // IPin + STDMETHOD(Connect)(IPin* receive_pin, + const AM_MEDIA_TYPE* media_type) override; + STDMETHOD(ReceiveConnection)(IPin* connector, + const AM_MEDIA_TYPE* media_type) override; + STDMETHOD(Disconnect)() override; + STDMETHOD(ConnectedTo)(IPin** pin) override; + STDMETHOD(ConnectionMediaType)(AM_MEDIA_TYPE* media_type) override; + STDMETHOD(QueryPinInfo)(PIN_INFO* info) override; + STDMETHOD(QueryDirection)(PIN_DIRECTION* pin_dir) override; + STDMETHOD(QueryId)(LPWSTR* id) override; + STDMETHOD(QueryAccept)(const AM_MEDIA_TYPE* media_type) override; + STDMETHOD(EnumMediaTypes)(IEnumMediaTypes** types) override; + STDMETHOD(QueryInternalConnections)(IPin** pins, ULONG* count) override; + STDMETHOD(EndOfStream)() override; + STDMETHOD(BeginFlush)() override; + STDMETHOD(EndFlush)() override; + STDMETHOD(NewSegment)(REFERENCE_TIME start, REFERENCE_TIME stop, + double rate) override; + + // IMemInputPin + STDMETHOD(GetAllocator)(IMemAllocator** allocator) override; + STDMETHOD(NotifyAllocator)(IMemAllocator* allocator, BOOL read_only) override; + STDMETHOD(GetAllocatorRequirements)(ALLOCATOR_PROPERTIES* props) override; + STDMETHOD(Receive)(IMediaSample* sample) override; + STDMETHOD(ReceiveMultiple)(IMediaSample** samples, long count, + long* processed) override; + STDMETHOD(ReceiveCanBlock)() override; + // clang-format on + + SequenceChecker main_checker_; + SequenceChecker capture_checker_; + + VideoCaptureCapability requested_capability_ RTC_GUARDED_BY(main_checker_); + // Accessed on the main thread when Filter()->IsStopped() (capture thread not + // running), otherwise accessed on the capture thread. + VideoCaptureCapability resulting_capability_; + DWORD capture_thread_id_ = 0; + rtc::scoped_refptr allocator_ RTC_GUARDED_BY(main_checker_); + rtc::scoped_refptr receive_pin_ RTC_GUARDED_BY(main_checker_); + std::atomic_bool flushing_{false}; + std::atomic_bool runtime_error_{false}; + // Holds a referenceless pointer to the owning filter, the name and + // direction of the pin. The filter pointer can be considered const. + PIN_INFO info_ = {}; + AM_MEDIA_TYPE media_type_ RTC_GUARDED_BY(main_checker_) = {}; +}; + +// Implement IBaseFilter (including IPersist and IMediaFilter). +class CaptureSinkFilter : public IBaseFilter { + public: + CaptureSinkFilter(VideoCaptureImpl* capture_observer); + + HRESULT SetRequestedCapability(const VideoCaptureCapability& capability); + + // Called on the capture thread. + void ProcessCapturedFrame(unsigned char* buffer, + size_t length, + const VideoCaptureCapability& frame_info); + + void NotifyEvent(long code, LONG_PTR param1, LONG_PTR param2); + bool IsStopped() const; + + // IUnknown + STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override; + + // IPersist + STDMETHOD(GetClassID)(CLSID* clsid) override; + + // IMediaFilter. + STDMETHOD(GetState)(DWORD msecs, FILTER_STATE* state) override; + STDMETHOD(SetSyncSource)(IReferenceClock* clock) override; + STDMETHOD(GetSyncSource)(IReferenceClock** clock) override; + STDMETHOD(Pause)() override; + STDMETHOD(Run)(REFERENCE_TIME start) override; + STDMETHOD(Stop)() override; + + // IBaseFilter + STDMETHOD(EnumPins)(IEnumPins** pins) override; + STDMETHOD(FindPin)(LPCWSTR id, IPin** pin) override; + STDMETHOD(QueryFilterInfo)(FILTER_INFO* info) override; + STDMETHOD(JoinFilterGraph)(IFilterGraph* graph, LPCWSTR name) override; + STDMETHOD(QueryVendorInfo)(LPWSTR* vendor_info) override; + + protected: + virtual ~CaptureSinkFilter(); + + private: + SequenceChecker main_checker_; + const rtc::scoped_refptr> input_pin_; + VideoCaptureImpl* const capture_observer_; + FILTER_INFO info_ RTC_GUARDED_BY(main_checker_) = {}; + // Set/cleared in JoinFilterGraph. The filter must be stopped (no capture) + // at that time, so no lock is required. While the state is not stopped, + // the sink will be used from the capture thread. + IMediaEventSink* sink_ = nullptr; + FILTER_STATE state_ RTC_GUARDED_BY(main_checker_) = State_Stopped; +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_ diff --git a/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc b/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc new file mode 100644 index 0000000000..37ee0fde01 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.cc @@ -0,0 +1,337 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/windows/video_capture_ds.h" + +#include // VIDEOINFOHEADER2 + +#include "modules/video_capture/video_capture_config.h" +#include "modules/video_capture/windows/help_functions_ds.h" +#include "modules/video_capture/windows/sink_filter_ds.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +VideoCaptureDS::VideoCaptureDS() + : _captureFilter(NULL), + _graphBuilder(NULL), + _mediaControl(NULL), + _inputSendPin(NULL), + _outputCapturePin(NULL), + _dvFilter(NULL), + _inputDvPin(NULL), + _outputDvPin(NULL) {} + +VideoCaptureDS::~VideoCaptureDS() { + if (_mediaControl) { + _mediaControl->Stop(); + } + if (_graphBuilder) { + if (sink_filter_) + _graphBuilder->RemoveFilter(sink_filter_.get()); + if (_captureFilter) + _graphBuilder->RemoveFilter(_captureFilter); + if (_dvFilter) + _graphBuilder->RemoveFilter(_dvFilter); + } + RELEASE_AND_CLEAR(_inputSendPin); + RELEASE_AND_CLEAR(_outputCapturePin); + + RELEASE_AND_CLEAR(_captureFilter); // release the capture device + RELEASE_AND_CLEAR(_dvFilter); + + RELEASE_AND_CLEAR(_mediaControl); + + RELEASE_AND_CLEAR(_inputDvPin); + RELEASE_AND_CLEAR(_outputDvPin); + + RELEASE_AND_CLEAR(_graphBuilder); +} + +int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) { + RTC_DCHECK_RUN_ON(&api_checker_); + + const int32_t nameLength = (int32_t)strlen((char*)deviceUniqueIdUTF8); + if (nameLength >= kVideoCaptureUniqueNameLength) + return -1; + + // Store the device name + _deviceUniqueId = new (std::nothrow) char[nameLength + 1]; + memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1); + + if (_dsInfo.Init() != 0) + return -1; + + _captureFilter = _dsInfo.GetDeviceFilter(deviceUniqueIdUTF8); + if (!_captureFilter) { + RTC_LOG(LS_INFO) << "Failed to create capture filter."; + return -1; + } + + // Get the interface for DirectShow's GraphBuilder + HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, + IID_IGraphBuilder, (void**)&_graphBuilder); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to create graph builder."; + return -1; + } + + hr = _graphBuilder->QueryInterface(IID_IMediaControl, (void**)&_mediaControl); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to create media control builder."; + return -1; + } + hr = _graphBuilder->AddFilter(_captureFilter, CAPTURE_FILTER_NAME); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to add the capture device to the graph."; + return -1; + } + + _outputCapturePin = GetOutputPin(_captureFilter, PIN_CATEGORY_CAPTURE); + if (!_outputCapturePin) { + RTC_LOG(LS_INFO) << "Failed to get output capture pin"; + return -1; + } + + // Create the sink filte used for receiving Captured frames. + sink_filter_ = new ComRefCount(this); + + hr = _graphBuilder->AddFilter(sink_filter_.get(), SINK_FILTER_NAME); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to add the send filter to the graph."; + return -1; + } + + _inputSendPin = GetInputPin(sink_filter_.get()); + if (!_inputSendPin) { + RTC_LOG(LS_INFO) << "Failed to get input send pin"; + return -1; + } + + if (SetCameraOutput(_requestedCapability) != 0) { + return -1; + } + RTC_LOG(LS_INFO) << "Capture device '" << deviceUniqueIdUTF8 + << "' initialized."; + return 0; +} + +int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) { + RTC_DCHECK_RUN_ON(&api_checker_); + + if (capability != _requestedCapability) { + DisconnectGraph(); + + if (SetCameraOutput(capability) != 0) { + return -1; + } + } + HRESULT hr = _mediaControl->Pause(); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) + << "Failed to Pause the Capture device. Is it already occupied? " << hr; + return -1; + } + hr = _mediaControl->Run(); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to start the Capture device."; + return -1; + } + return 0; +} + +int32_t VideoCaptureDS::StopCapture() { + RTC_DCHECK_RUN_ON(&api_checker_); + + HRESULT hr = _mediaControl->StopWhenReady(); + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to stop the capture graph. " << hr; + return -1; + } + return 0; +} + +bool VideoCaptureDS::CaptureStarted() { + RTC_DCHECK_RUN_ON(&api_checker_); + + OAFilterState state = 0; + HRESULT hr = _mediaControl->GetState(1000, &state); + if (hr != S_OK && hr != VFW_S_CANT_CUE) { + RTC_LOG(LS_INFO) << "Failed to get the CaptureStarted status"; + } + RTC_LOG(LS_INFO) << "CaptureStarted " << state; + return state == State_Running; +} + +int32_t VideoCaptureDS::CaptureSettings(VideoCaptureCapability& settings) { + RTC_DCHECK_RUN_ON(&api_checker_); + settings = _requestedCapability; + return 0; +} + +int32_t VideoCaptureDS::SetCameraOutput( + const VideoCaptureCapability& requestedCapability) { + RTC_DCHECK_RUN_ON(&api_checker_); + + // Get the best matching capability + VideoCaptureCapability capability; + int32_t capabilityIndex; + + // Store the new requested size + _requestedCapability = requestedCapability; + // Match the requested capability with the supported. + if ((capabilityIndex = _dsInfo.GetBestMatchedCapability( + _deviceUniqueId, _requestedCapability, capability)) < 0) { + return -1; + } + // Reduce the frame rate if possible. + if (capability.maxFPS > requestedCapability.maxFPS) { + capability.maxFPS = requestedCapability.maxFPS; + } else if (capability.maxFPS <= 0) { + capability.maxFPS = 30; + } + + // Convert it to the windows capability index since they are not nexessary + // the same + VideoCaptureCapabilityWindows windowsCapability; + if (_dsInfo.GetWindowsCapability(capabilityIndex, windowsCapability) != 0) { + return -1; + } + + IAMStreamConfig* streamConfig = NULL; + AM_MEDIA_TYPE* pmt = NULL; + VIDEO_STREAM_CONFIG_CAPS caps; + + HRESULT hr = _outputCapturePin->QueryInterface(IID_IAMStreamConfig, + (void**)&streamConfig); + if (hr) { + RTC_LOG(LS_INFO) << "Can't get the Capture format settings."; + return -1; + } + + // Get the windows capability from the capture device + bool isDVCamera = false; + hr = streamConfig->GetStreamCaps(windowsCapability.directShowCapabilityIndex, + &pmt, reinterpret_cast(&caps)); + if (hr == S_OK) { + if (pmt->formattype == FORMAT_VideoInfo2) { + VIDEOINFOHEADER2* h = reinterpret_cast(pmt->pbFormat); + if (capability.maxFPS > 0 && windowsCapability.supportFrameRateControl) { + h->AvgTimePerFrame = REFERENCE_TIME(10000000.0 / capability.maxFPS); + } + } else { + VIDEOINFOHEADER* h = reinterpret_cast(pmt->pbFormat); + if (capability.maxFPS > 0 && windowsCapability.supportFrameRateControl) { + h->AvgTimePerFrame = REFERENCE_TIME(10000000.0 / capability.maxFPS); + } + } + + // Set the sink filter to request this capability + sink_filter_->SetRequestedCapability(capability); + // Order the capture device to use this capability + hr += streamConfig->SetFormat(pmt); + + // Check if this is a DV camera and we need to add MS DV Filter + if (pmt->subtype == MEDIASUBTYPE_dvsl || + pmt->subtype == MEDIASUBTYPE_dvsd || + pmt->subtype == MEDIASUBTYPE_dvhd) { + isDVCamera = true; // This is a DV camera. Use MS DV filter + } + + FreeMediaType(pmt); + pmt = NULL; + } + RELEASE_AND_CLEAR(streamConfig); + + if (FAILED(hr)) { + RTC_LOG(LS_INFO) << "Failed to set capture device output format"; + return -1; + } + + if (isDVCamera) { + hr = ConnectDVCamera(); + } else { + hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputSendPin, NULL); + } + if (hr != S_OK) { + RTC_LOG(LS_INFO) << "Failed to connect the Capture graph " << hr; + return -1; + } + return 0; +} + +int32_t VideoCaptureDS::DisconnectGraph() { + RTC_DCHECK_RUN_ON(&api_checker_); + + HRESULT hr = _mediaControl->Stop(); + hr += _graphBuilder->Disconnect(_outputCapturePin); + hr += _graphBuilder->Disconnect(_inputSendPin); + + // if the DV camera filter exist + if (_dvFilter) { + _graphBuilder->Disconnect(_inputDvPin); + _graphBuilder->Disconnect(_outputDvPin); + } + if (hr != S_OK) { + RTC_LOG(LS_ERROR) + << "Failed to Stop the Capture device for reconfiguration " << hr; + return -1; + } + return 0; +} + +HRESULT VideoCaptureDS::ConnectDVCamera() { + RTC_DCHECK_RUN_ON(&api_checker_); + + HRESULT hr = S_OK; + + if (!_dvFilter) { + hr = CoCreateInstance(CLSID_DVVideoCodec, NULL, CLSCTX_INPROC, + IID_IBaseFilter, (void**)&_dvFilter); + if (hr != S_OK) { + RTC_LOG(LS_INFO) << "Failed to create the dv decoder: " << hr; + return hr; + } + hr = _graphBuilder->AddFilter(_dvFilter, L"VideoDecoderDV"); + if (hr != S_OK) { + RTC_LOG(LS_INFO) << "Failed to add the dv decoder to the graph: " << hr; + return hr; + } + _inputDvPin = GetInputPin(_dvFilter); + if (_inputDvPin == NULL) { + RTC_LOG(LS_INFO) << "Failed to get input pin from DV decoder"; + return -1; + } + _outputDvPin = GetOutputPin(_dvFilter, GUID_NULL); + if (_outputDvPin == NULL) { + RTC_LOG(LS_INFO) << "Failed to get output pin from DV decoder"; + return -1; + } + } + hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputDvPin, NULL); + if (hr != S_OK) { + RTC_LOG(LS_INFO) << "Failed to connect capture device to the dv devoder: " + << hr; + return hr; + } + + hr = _graphBuilder->ConnectDirect(_outputDvPin, _inputSendPin, NULL); + if (hr != S_OK) { + if (hr == HRESULT_FROM_WIN32(ERROR_TOO_MANY_OPEN_FILES)) { + RTC_LOG(LS_INFO) << "Failed to connect the capture device, busy"; + } else { + RTC_LOG(LS_INFO) << "Failed to connect capture device to the send graph: " + << hr; + } + } + return hr; +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.h b/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.h new file mode 100644 index 0000000000..d6897155b0 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/video_capture_ds.h @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_ +#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_ + +#include "api/scoped_refptr.h" +#include "modules/video_capture/video_capture_impl.h" +#include "modules/video_capture/windows/device_info_ds.h" + +#define CAPTURE_FILTER_NAME L"VideoCaptureFilter" +#define SINK_FILTER_NAME L"SinkFilter" + +namespace webrtc { +namespace videocapturemodule { +// Forward declaraion +class CaptureSinkFilter; + +class VideoCaptureDS : public VideoCaptureImpl { + public: + VideoCaptureDS(); + + virtual int32_t Init(const char* deviceUniqueIdUTF8); + + /************************************************************************* + * + * Start/Stop + * + *************************************************************************/ + int32_t StartCapture(const VideoCaptureCapability& capability) override; + int32_t StopCapture() override; + + /************************************************************************** + * + * Properties of the set device + * + **************************************************************************/ + + bool CaptureStarted() override; + int32_t CaptureSettings(VideoCaptureCapability& settings) override; + + protected: + ~VideoCaptureDS() override; + + // Help functions + + int32_t SetCameraOutput(const VideoCaptureCapability& requestedCapability); + int32_t DisconnectGraph(); + HRESULT ConnectDVCamera(); + + DeviceInfoDS _dsInfo RTC_GUARDED_BY(api_checker_); + + IBaseFilter* _captureFilter RTC_GUARDED_BY(api_checker_); + IGraphBuilder* _graphBuilder RTC_GUARDED_BY(api_checker_); + IMediaControl* _mediaControl RTC_GUARDED_BY(api_checker_); + rtc::scoped_refptr sink_filter_ + RTC_GUARDED_BY(api_checker_); + IPin* _inputSendPin RTC_GUARDED_BY(api_checker_); + IPin* _outputCapturePin RTC_GUARDED_BY(api_checker_); + + // Microsoft DV interface (external DV cameras) + IBaseFilter* _dvFilter RTC_GUARDED_BY(api_checker_); + IPin* _inputDvPin RTC_GUARDED_BY(api_checker_); + IPin* _outputDvPin RTC_GUARDED_BY(api_checker_); +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_ diff --git a/third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc b/third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc new file mode 100644 index 0000000000..481326c1d2 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/windows/video_capture_factory_windows.cc @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/scoped_refptr.h" +#include "modules/video_capture/windows/video_capture_ds.h" + +namespace webrtc { +namespace videocapturemodule { + +// static +VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { + // TODO(tommi): Use the Media Foundation version on Vista and up. + return DeviceInfoDS::Create(); +} + +rtc::scoped_refptr VideoCaptureImpl::Create( + const char* device_id) { + if (device_id == nullptr) + return nullptr; + + // TODO(tommi): Use Media Foundation implementation for Vista and up. + auto capture = rtc::make_ref_counted(); + if (capture->Init(device_id) != 0) { + return nullptr; + } + + return capture; +} + +} // namespace videocapturemodule +} // namespace webrtc -- cgit v1.2.3