diff options
Diffstat (limited to 'third_party/libwebrtc/modules/video_capture/linux')
6 files changed, 1235 insertions, 0 deletions
diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc new file mode 100644 index 0000000000..4821cbccd5 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_linux.cc @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <errno.h> +#include <fcntl.h> +#include <poll.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <sys/ioctl.h> +#include <unistd.h> +// v4l includes +#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD +#include <sys/videoio.h> +#elif defined(__sun) +#include <sys/videodev2.h> +#else +#include <linux/videodev2.h> +#endif + +#include <vector> + +#include "modules/video_capture/linux/device_info_v4l2.h" +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { + return new videocapturemodule::DeviceInfoV4l2(); +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc new file mode 100644 index 0000000000..ccd4b2bd2a --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc @@ -0,0 +1,517 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/device_info_v4l2.h" + +#include <errno.h> +#include <fcntl.h> +#include <poll.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <sys/ioctl.h> +#include <unistd.h> +// v4l includes +#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD +#include <sys/videoio.h> +#elif defined(__sun) +#include <sys/videodev2.h> +#else +#include <linux/videodev2.h> +#endif + +#include <vector> + +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "rtc_base/logging.h" + +#ifdef WEBRTC_LINUX +#define EVENT_SIZE ( sizeof (struct inotify_event) ) +#define BUF_LEN ( 1024 * ( EVENT_SIZE + 16 ) ) +#endif + +// These defines are here to support building on kernel 3.16 which some +// downstream projects, e.g. Firefox, use. +// TODO(apehrson): Remove them and their undefs when no longer needed. +#ifndef V4L2_PIX_FMT_ABGR32 +#define ABGR32_OVERRIDE 1 +#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_ARGB32 +#define ARGB32_OVERRIDE 1 +#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_RGBA32 +#define RGBA32_OVERRIDE 1 +#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') +#endif + +namespace webrtc { +namespace videocapturemodule { +#ifdef WEBRTC_LINUX +void DeviceInfoV4l2::HandleEvent(inotify_event* event, int fd) +{ + if (event->mask & IN_CREATE) { + if (fd == _fd_v4l) { + DeviceChange(); + } else if ((event->mask & IN_ISDIR) && (fd == _fd_dev)) { + if (_wd_v4l < 0) { + // Sometimes inotify_add_watch failed if we call it immediately after receiving this event + // Adding 5ms delay to let file system settle down + usleep(5*1000); + _wd_v4l = inotify_add_watch(_fd_v4l, "/dev/v4l/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF); + if (_wd_v4l >= 0) { + DeviceChange(); + } + } + } + } else if (event->mask & IN_DELETE) { + if (fd == _fd_v4l) { + DeviceChange(); + } + } else if (event->mask & IN_DELETE_SELF) { + if (fd == _fd_v4l) { + inotify_rm_watch(_fd_v4l, _wd_v4l); + _wd_v4l = -1; + } else { + assert(false); + } + } +} + +int DeviceInfoV4l2::EventCheck(int fd) +{ + struct pollfd fds = { + .fd = fd, + .events = POLLIN, + .revents = 0 + }; + + return poll(&fds, 1, 100); +} + +int DeviceInfoV4l2::HandleEvents(int fd) +{ + char buffer[BUF_LEN]; + + ssize_t r = read(fd, buffer, BUF_LEN); + + if (r <= 0) { + return r; + } + + ssize_t buffer_i = 0; + inotify_event* pevent; + size_t eventSize; + int count = 0; + + while (buffer_i < r) + { + pevent = (inotify_event *) (&buffer[buffer_i]); + eventSize = sizeof(inotify_event) + pevent->len; + char event[sizeof(inotify_event) + FILENAME_MAX + 1] // null-terminated + __attribute__ ((aligned(__alignof__(struct inotify_event)))); + + memcpy(event, pevent, eventSize); + + HandleEvent((inotify_event*)(event), fd); + + buffer_i += eventSize; + count++; + } + + return count; +} + +int DeviceInfoV4l2::ProcessInotifyEvents() +{ + while (!_isShutdown) { + if (EventCheck(_fd_dev) > 0) { + if (HandleEvents(_fd_dev) < 0) { + break; + } + } + if (EventCheck(_fd_v4l) > 0) { + if (HandleEvents(_fd_v4l) < 0) { + break; + } + } + } + return 0; +} + +void DeviceInfoV4l2::InotifyProcess() +{ + _fd_v4l = inotify_init(); + _fd_dev = inotify_init(); + if (_fd_v4l >= 0 && _fd_dev >= 0) { + _wd_v4l = inotify_add_watch(_fd_v4l, "/dev/v4l/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF); + _wd_dev = inotify_add_watch(_fd_dev, "/dev/", IN_CREATE); + ProcessInotifyEvents(); + + if (_wd_v4l >= 0) { + inotify_rm_watch(_fd_v4l, _wd_v4l); + } + + if (_wd_dev >= 0) { + inotify_rm_watch(_fd_dev, _wd_dev); + } + + close(_fd_v4l); + close(_fd_dev); + } +} +#endif + +DeviceInfoV4l2::DeviceInfoV4l2() : DeviceInfoImpl() +#ifdef WEBRTC_LINUX + , _isShutdown(false) +#endif +{ +#ifdef WEBRTC_LINUX + _inotifyEventThread = rtc::PlatformThread::SpawnJoinable( + [this] { + InotifyProcess(); + }, "InotifyEventThread"); +#endif +} + +int32_t DeviceInfoV4l2::Init() { + return 0; +} + +DeviceInfoV4l2::~DeviceInfoV4l2() { +#ifdef WEBRTC_LINUX + _isShutdown = true; + + if (!_inotifyEventThread.empty()) { + _inotifyEventThread.Finalize(); + } +#endif +} + +uint32_t DeviceInfoV4l2::NumberOfDevices() { + uint32_t count = 0; + char device[20]; + int fd = -1; + struct v4l2_capability cap; + + /* detect /dev/video [0-63]VideoCaptureModule entries */ + for (int n = 0; n < 64; n++) { + snprintf(device, sizeof(device), "/dev/video%d", n); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities and make sure this is a video capture device + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !IsVideoCaptureDevice(&cap)) { + close(fd); + continue; + } + + close(fd); + count++; + } + } + + return count; +} + +int32_t DeviceInfoV4l2::GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* /*productUniqueIdUTF8*/, + uint32_t /*productUniqueIdUTF8Length*/, + pid_t* /*pid*/) { + // Travel through /dev/video [0-63] + uint32_t count = 0; + char device[20]; + int fd = -1; + bool found = false; + struct v4l2_capability cap; + int device_index; + for (device_index = 0; device_index < 64; device_index++) { + sprintf(device, "/dev/video%d", device_index); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities and make sure this is a video capture device + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !IsVideoCaptureDevice(&cap)) { + close(fd); + continue; + } + if (count == deviceNumber) { + // Found the device + found = true; + break; + } else { + close(fd); + count++; + } + } + } + + if (!found) + return -1; + + // query device capabilities + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { + RTC_LOG(LS_INFO) << "error in querying the device capability for device " + << device << ". errno = " << errno; + close(fd); + return -1; + } + + close(fd); + + char cameraName[64]; + memset(deviceNameUTF8, 0, deviceNameLength); + memcpy(cameraName, cap.card, sizeof(cap.card)); + + if (deviceNameLength > strlen(cameraName)) { + memcpy(deviceNameUTF8, cameraName, strlen(cameraName)); + } else { + RTC_LOG(LS_INFO) << "buffer passed is too small"; + return -1; + } + + if (cap.bus_info[0] != 0) { // may not available in all drivers + // copy device id + size_t len = strlen(reinterpret_cast<const char*>(cap.bus_info)); + if (deviceUniqueIdUTF8Length > len) { + memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length); + memcpy(deviceUniqueIdUTF8, cap.bus_info, len); + } else { + RTC_LOG(LS_INFO) << "buffer passed is too small"; + return -1; + } + } else { + // if there's no bus info to use for uniqueId, invent one - and it has to be repeatable + if (snprintf(deviceUniqueIdUTF8, + deviceUniqueIdUTF8Length, "fake_%u", device_index) >= + (int) deviceUniqueIdUTF8Length) + { + return -1; + } + } + return 0; +} + +int32_t DeviceInfoV4l2::CreateCapabilityMap(const char* deviceUniqueIdUTF8) { + int fd; + char device[32]; + bool found = false; + + const int32_t deviceUniqueIdUTF8Length = strlen(deviceUniqueIdUTF8); + if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) { + RTC_LOG(LS_INFO) << "Device name too long"; + return -1; + } + RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device " + << deviceUniqueIdUTF8; + + /* detect /dev/video [0-63] entries */ + for (int n = 0; n < 64; ++n) { + snprintf(device, sizeof(device), "/dev/video%d", n); + fd = open(device, O_RDONLY); + if (fd == -1) + continue; + + // query device capabilities + struct v4l2_capability cap; + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { + // skip devices without video capture capability + if (!IsVideoCaptureDevice(&cap)) { + close(fd); + continue; + } + + if (cap.bus_info[0] != 0) { + if (strncmp(reinterpret_cast<const char*>(cap.bus_info), + deviceUniqueIdUTF8, + strlen(deviceUniqueIdUTF8)) == 0) { // match with device id + found = true; + break; // fd matches with device unique id supplied + } + } else { // match for device name + if (IsDeviceNameMatches(reinterpret_cast<const char*>(cap.card), + deviceUniqueIdUTF8)) { + found = true; + break; + } + } + } + close(fd); // close since this is not the matching device + } + + if (!found) { + RTC_LOG(LS_INFO) << "no matching device found"; + return -1; + } + + // now fd will point to the matching device + // reset old capability list. + _captureCapabilities.clear(); + + int size = FillCapabilities(fd); + close(fd); + + // Store the new used device name + _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length; + _lastUsedDeviceName = reinterpret_cast<char*>( + realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1)); + memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, + _lastUsedDeviceNameLength + 1); + + RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); + + return size; +} + +int32_t DeviceInfoV4l2::DisplayCaptureSettingsDialogBox( + const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + uint32_t /*positionX*/, + uint32_t /*positionY*/) { + return -1; +} + +bool DeviceInfoV4l2::IsDeviceNameMatches(const char* name, + const char* deviceUniqueIdUTF8) { + if (strncmp(deviceUniqueIdUTF8, name, strlen(name)) == 0) + return true; + return false; +} + +bool DeviceInfoV4l2::IsVideoCaptureDevice(struct v4l2_capability* cap) +{ + if (cap->capabilities & V4L2_CAP_DEVICE_CAPS) { + return cap->device_caps & V4L2_CAP_VIDEO_CAPTURE; + } else { + return cap->capabilities & V4L2_CAP_VIDEO_CAPTURE; + } +} + +int32_t DeviceInfoV4l2::FillCapabilities(int fd) { + // set image format + struct v4l2_format video_fmt; + memset(&video_fmt, 0, sizeof(struct v4l2_format)); + + video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + video_fmt.fmt.pix.sizeimage = 0; + + unsigned int videoFormats[] = { + V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_JPEG, V4L2_PIX_FMT_YUV420, + V4L2_PIX_FMT_YVU420, V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY, + V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_BGR24, V4L2_PIX_FMT_RGB24, + V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_ABGR32, V4L2_PIX_FMT_ARGB32, + V4L2_PIX_FMT_RGBA32, V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_RGB32, + }; + constexpr int totalFmts = sizeof(videoFormats) / sizeof(unsigned int); + + int sizes = 13; + unsigned int size[][2] = {{128, 96}, {160, 120}, {176, 144}, {320, 240}, + {352, 288}, {640, 480}, {704, 576}, {800, 600}, + {960, 720}, {1280, 720}, {1024, 768}, {1440, 1080}, + {1920, 1080}}; + + for (int fmts = 0; fmts < totalFmts; fmts++) { + for (int i = 0; i < sizes; i++) { + video_fmt.fmt.pix.pixelformat = videoFormats[fmts]; + video_fmt.fmt.pix.width = size[i][0]; + video_fmt.fmt.pix.height = size[i][1]; + + if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) >= 0) { + if ((video_fmt.fmt.pix.width == size[i][0]) && + (video_fmt.fmt.pix.height == size[i][1])) { + VideoCaptureCapability cap; + cap.width = video_fmt.fmt.pix.width; + cap.height = video_fmt.fmt.pix.height; + if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV) { + cap.videoType = VideoType::kYUY2; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420) { + cap.videoType = VideoType::kI420; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_YVU420) { + cap.videoType = VideoType::kYV12; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG || + videoFormats[fmts] == V4L2_PIX_FMT_JPEG) { + cap.videoType = VideoType::kMJPEG; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY) { + cap.videoType = VideoType::kUYVY; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_NV12) { + cap.videoType = VideoType::kNV12; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_BGR24) { + // NB that for RGB formats, `VideoType` follows naming conventions + // of libyuv[1], where e.g. the format for FOURCC "ARGB" stores + // pixels in BGRA order in memory. V4L2[2] on the other hand names + // its formats based on the order of the RGB components as stored in + // memory. Applies to all RGB formats below. + // [1]https://chromium.googlesource.com/libyuv/libyuv/+/refs/heads/main/docs/formats.md#the-argb-fourcc + // [2]https://www.kernel.org/doc/html/v6.2/userspace-api/media/v4l/pixfmt-rgb.html#bits-per-component + cap.videoType = VideoType::kRGB24; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB24) { + cap.videoType = VideoType::kBGR24; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB565) { + cap.videoType = VideoType::kRGB565; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_ABGR32) { + cap.videoType = VideoType::kARGB; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_ARGB32) { + cap.videoType = VideoType::kBGRA; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_BGR32) { + cap.videoType = VideoType::kARGB; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGB32) { + cap.videoType = VideoType::kBGRA; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_RGBA32) { + cap.videoType = VideoType::kABGR; + } else { + RTC_DCHECK_NOTREACHED(); + } + + // get fps of current camera mode + // V4l2 does not have a stable method of knowing so we just guess. + if (cap.width >= 800 && cap.videoType != VideoType::kMJPEG) { + cap.maxFPS = 15; + } else { + cap.maxFPS = 30; + } + + _captureCapabilities.push_back(cap); + RTC_LOG(LS_VERBOSE) << "Camera capability, width:" << cap.width + << " height:" << cap.height + << " type:" << static_cast<int32_t>(cap.videoType) + << " fps:" << cap.maxFPS; + } + } + } + } + + RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); + return _captureCapabilities.size(); +} + +} // namespace videocapturemodule +} // namespace webrtc + +#ifdef ABGR32_OVERRIDE +#undef ABGR32_OVERRIDE +#undef V4L2_PIX_FMT_ABGR32 +#endif + +#ifdef ARGB32_OVERRIDE +#undef ARGB32_OVERRIDE +#undef V4L2_PIX_FMT_ARGB32 +#endif + +#ifdef RGBA32_OVERRIDE +#undef RGBA32_OVERRIDE +#undef V4L2_PIX_FMT_RGBA32 +#endif diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h new file mode 100644 index 0000000000..0bec3eb765 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.h @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ + +#include <stdint.h> + +#include "modules/video_capture/device_info_impl.h" + +#include "rtc_base/platform_thread.h" +#ifdef WEBRTC_LINUX +#include <sys/inotify.h> +#endif + +struct v4l2_capability; + +namespace webrtc { +namespace videocapturemodule { +class DeviceInfoV4l2 : public DeviceInfoImpl { + public: + DeviceInfoV4l2(); + ~DeviceInfoV4l2() override; + uint32_t NumberOfDevices() override; + int32_t GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = 0, + uint32_t productUniqueIdUTF8Length = 0, + pid_t* pid=0) override; + /* + * Fills the membervariable _captureCapabilities with capabilites for the + * given device name. + */ + int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); + int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + uint32_t /*positionX*/, + uint32_t /*positionY*/) override; + int32_t FillCapabilities(int fd) RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); + int32_t Init() override; + + private: + bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8); + bool IsVideoCaptureDevice(struct v4l2_capability* cap); + +#ifdef WEBRTC_LINUX + void HandleEvent(inotify_event* event, int fd); + int EventCheck(int fd); + int HandleEvents(int fd); + int ProcessInotifyEvents(); + rtc::PlatformThread _inotifyEventThread; + void InotifyProcess(); + int _fd_v4l, _fd_dev, _wd_v4l, _wd_dev; /* accessed on InotifyEventThread thread */ + std::atomic<bool> _isShutdown; +#endif +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc new file mode 100644 index 0000000000..f3324a8e68 --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_linux.cc @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include <errno.h> +#include <fcntl.h> +#include <poll.h> +#include <stdio.h> +#include <string.h> +#include <sys/ioctl.h> +#include <sys/mman.h> +#include <sys/select.h> +#include <time.h> +#include <unistd.h> +// v4l includes +#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD +#include <sys/videoio.h> +#elif defined(__sun) +#include <sys/videodev2.h> +#else +#include <linux/videodev2.h> +#endif + +#include <new> +#include <string> + +#include "api/scoped_refptr.h" +#include "media/base/video_common.h" +#include "modules/video_capture/linux/video_capture_v4l2.h" +#include "modules/video_capture/video_capture.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create( + const char* deviceUniqueId) { + auto implementation = rtc::make_ref_counted<VideoCaptureModuleV4L2>(); + + if (implementation->Init(deviceUniqueId) != 0) + return nullptr; + + return implementation; +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc new file mode 100644 index 0000000000..2935cd027d --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc @@ -0,0 +1,489 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/video_capture_v4l2.h" + +#include <errno.h> +#include <fcntl.h> +#include <poll.h> +#include <stdio.h> +#include <string.h> +#include <sys/ioctl.h> +#include <sys/mman.h> +#include <sys/select.h> +#include <time.h> +#include <unistd.h> +// v4l includes +#if defined(__NetBSD__) || defined(__OpenBSD__) // WEBRTC_BSD +#include <sys/videoio.h> +#elif defined(__sun) +#include <sys/videodev2.h> +#else +#include <linux/videodev2.h> +#endif + +#include <new> +#include <string> + +#include "api/scoped_refptr.h" +#include "media/base/video_common.h" +#include "modules/video_capture/video_capture.h" +#include "rtc_base/logging.h" + +// These defines are here to support building on kernel 3.16 which some +// downstream projects, e.g. Firefox, use. +// TODO(apehrson): Remove them and their undefs when no longer needed. +#ifndef V4L2_PIX_FMT_ABGR32 +#define ABGR32_OVERRIDE 1 +#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_ARGB32 +#define ARGB32_OVERRIDE 1 +#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') +#endif + +#ifndef V4L2_PIX_FMT_RGBA32 +#define RGBA32_OVERRIDE 1 +#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') +#endif + +namespace webrtc { +namespace videocapturemodule { +VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() + : VideoCaptureImpl(), + _deviceId(-1), + _deviceFd(-1), + _buffersAllocatedByDevice(-1), + _currentWidth(-1), + _currentHeight(-1), + _currentFrameRate(-1), + _captureStarted(false), + _captureVideoType(VideoType::kI420), + _pool(NULL) {} + +int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) { + int len = strlen((const char*)deviceUniqueIdUTF8); + _deviceUniqueId = new (std::nothrow) char[len + 1]; + if (_deviceUniqueId) { + memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); + } + + int device_index; + if (sscanf(deviceUniqueIdUTF8,"fake_%d", &device_index) == 1) + { + _deviceId = device_index; + return 0; + } + + int fd; + char device[32]; + bool found = false; + + /* detect /dev/video [0-63] entries */ + int n; + for (n = 0; n < 64; n++) { + snprintf(device, sizeof(device), "/dev/video%d", n); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities + struct v4l2_capability cap; + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { + if (cap.bus_info[0] != 0) { + if (strncmp((const char*)cap.bus_info, + (const char*)deviceUniqueIdUTF8, + strlen((const char*)deviceUniqueIdUTF8)) == + 0) { // match with device id + close(fd); + found = true; + break; // fd matches with device unique id supplied + } + } + } + close(fd); // close since this is not the matching device + } + } + if (!found) { + RTC_LOG(LS_INFO) << "no matching device found"; + return -1; + } + _deviceId = n; // store the device id + return 0; +} + +VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { + StopCapture(); + if (_deviceFd != -1) + close(_deviceFd); +} + +int32_t VideoCaptureModuleV4L2::StartCapture( + const VideoCaptureCapability& capability) { + if (_captureStarted) { + if (capability.width == _currentWidth && + capability.height == _currentHeight && + _captureVideoType == capability.videoType) { + return 0; + } else { + StopCapture(); + } + } + + MutexLock lock(&capture_lock_); + // first open /dev/video device + char device[20]; + snprintf(device, sizeof(device), "/dev/video%d", _deviceId); + + if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) { + RTC_LOG(LS_INFO) << "error in opening " << device << " errono = " << errno; + return -1; + } + + // Supported video formats in preferred order. + // If the requested resolution is larger than VGA, we prefer MJPEG. Go for + // I420 otherwise. + unsigned int hdFmts[] = { + V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YVU420, + V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY, V4L2_PIX_FMT_NV12, + V4L2_PIX_FMT_ABGR32, V4L2_PIX_FMT_ARGB32, V4L2_PIX_FMT_RGBA32, + V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_BGR24, + V4L2_PIX_FMT_RGB24, V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_JPEG, + }; + unsigned int sdFmts[] = { + V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YVU420, V4L2_PIX_FMT_YUYV, + V4L2_PIX_FMT_UYVY, V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_ABGR32, + V4L2_PIX_FMT_ARGB32, V4L2_PIX_FMT_RGBA32, V4L2_PIX_FMT_BGR32, + V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_BGR24, V4L2_PIX_FMT_RGB24, + V4L2_PIX_FMT_RGB565, V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_JPEG, + }; + const bool isHd = capability.width > 640 || capability.height > 480; + unsigned int* fmts = isHd ? hdFmts : sdFmts; + static_assert(sizeof(hdFmts) == sizeof(sdFmts)); + constexpr int nFormats = sizeof(hdFmts) / sizeof(unsigned int); + + // Enumerate image formats. + struct v4l2_fmtdesc fmt; + int fmtsIdx = nFormats; + memset(&fmt, 0, sizeof(fmt)); + fmt.index = 0; + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:"; + while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) { + RTC_LOG(LS_INFO) << " { pixelformat = " + << cricket::GetFourccName(fmt.pixelformat) + << ", description = '" << fmt.description << "' }"; + // Match the preferred order. + for (int i = 0; i < nFormats; i++) { + if (fmt.pixelformat == fmts[i] && i < fmtsIdx) + fmtsIdx = i; + } + // Keep enumerating. + fmt.index++; + } + + if (fmtsIdx == nFormats) { + RTC_LOG(LS_INFO) << "no supporting video formats found"; + return -1; + } else { + RTC_LOG(LS_INFO) << "We prefer format " + << cricket::GetFourccName(fmts[fmtsIdx]); + } + + struct v4l2_format video_fmt; + memset(&video_fmt, 0, sizeof(struct v4l2_format)); + video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + video_fmt.fmt.pix.sizeimage = 0; + video_fmt.fmt.pix.width = capability.width; + video_fmt.fmt.pix.height = capability.height; + video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; + + if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) + _captureVideoType = VideoType::kYUY2; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) + _captureVideoType = VideoType::kI420; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) + _captureVideoType = VideoType::kYV12; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) + _captureVideoType = VideoType::kUYVY; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_NV12) + _captureVideoType = VideoType::kNV12; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_BGR24) + _captureVideoType = VideoType::kRGB24; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) + _captureVideoType = VideoType::kBGR24; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB565) + _captureVideoType = VideoType::kRGB565; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_ABGR32 || + video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_BGR32) + _captureVideoType = VideoType::kARGB; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_ARGB32 || + video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB32) + _captureVideoType = VideoType::kBGRA; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGBA32) + _captureVideoType = VideoType::kABGR; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || + video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) + _captureVideoType = VideoType::kMJPEG; + else + RTC_DCHECK_NOTREACHED(); + + // set format and frame size now + if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) { + RTC_LOG(LS_INFO) << "error in VIDIOC_S_FMT, errno = " << errno; + return -1; + } + + // initialize current width and height + _currentWidth = video_fmt.fmt.pix.width; + _currentHeight = video_fmt.fmt.pix.height; + + // Trying to set frame rate, before check driver capability. + bool driver_framerate_support = true; + struct v4l2_streamparm streamparms; + memset(&streamparms, 0, sizeof(streamparms)); + streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) { + RTC_LOG(LS_INFO) << "error in VIDIOC_G_PARM errno = " << errno; + driver_framerate_support = false; + // continue + } else { + // check the capability flag is set to V4L2_CAP_TIMEPERFRAME. + if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { + // driver supports the feature. Set required framerate. + memset(&streamparms, 0, sizeof(streamparms)); + streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + streamparms.parm.capture.timeperframe.numerator = 1; + streamparms.parm.capture.timeperframe.denominator = capability.maxFPS; + if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) { + RTC_LOG(LS_INFO) << "Failed to set the framerate. errno=" << errno; + driver_framerate_support = false; + } else { + _currentFrameRate = capability.maxFPS; + } + } + } + // If driver doesn't support framerate control, need to hardcode. + // Hardcoding the value based on the frame size. + if (!driver_framerate_support) { + if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) { + _currentFrameRate = 15; + } else { + _currentFrameRate = 30; + } + } + + if (!AllocateVideoBuffers()) { + RTC_LOG(LS_INFO) << "failed to allocate video capture buffers"; + return -1; + } + + // start capture thread; + if (_captureThread.empty()) { + quit_ = false; + _captureThread = rtc::PlatformThread::SpawnJoinable( + [self = rtc::scoped_refptr(this)] { + while (self->CaptureProcess()) { + } + }, + "CaptureThread", + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh)); + } + + // Needed to start UVC camera - from the uvcview application + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) { + RTC_LOG(LS_INFO) << "Failed to turn on stream"; + return -1; + } + + _captureStarted = true; + return 0; +} + +int32_t VideoCaptureModuleV4L2::StopCapture() { + if (!_captureThread.empty()) { + { + MutexLock lock(&capture_lock_); + quit_ = true; + } + // Make sure the capture thread stops using the mutex. + _captureThread.Finalize(); + } + + MutexLock lock(&capture_lock_); + if (_captureStarted) { + _captureStarted = false; + + DeAllocateVideoBuffers(); + close(_deviceFd); + _deviceFd = -1; + } + + return 0; +} + +// critical section protected by the caller + +bool VideoCaptureModuleV4L2::AllocateVideoBuffers() { + struct v4l2_requestbuffers rbuffer; + memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); + + rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + rbuffer.memory = V4L2_MEMORY_MMAP; + rbuffer.count = kNoOfV4L2Bufffers; + + if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) { + RTC_LOG(LS_INFO) << "Could not get buffers from device. errno = " << errno; + return false; + } + + if (rbuffer.count > kNoOfV4L2Bufffers) + rbuffer.count = kNoOfV4L2Bufffers; + + _buffersAllocatedByDevice = rbuffer.count; + + // Map the buffers + _pool = new Buffer[rbuffer.count]; + + for (unsigned int i = 0; i < rbuffer.count; i++) { + struct v4l2_buffer buffer; + memset(&buffer, 0, sizeof(v4l2_buffer)); + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + buffer.index = i; + + if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) { + return false; + } + + _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, + MAP_SHARED, _deviceFd, buffer.m.offset); + + if (MAP_FAILED == _pool[i].start) { + for (unsigned int j = 0; j < i; j++) + munmap(_pool[j].start, _pool[j].length); + return false; + } + + _pool[i].length = buffer.length; + + if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) { + return false; + } + } + return true; +} + +bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { + // unmap buffers + for (int i = 0; i < _buffersAllocatedByDevice; i++) + munmap(_pool[i].start, _pool[i].length); + + delete[] _pool; + + // turn off stream + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) { + RTC_LOG(LS_INFO) << "VIDIOC_STREAMOFF error. errno: " << errno; + } + + return true; +} + +bool VideoCaptureModuleV4L2::CaptureStarted() { + return _captureStarted; +} + +bool VideoCaptureModuleV4L2::CaptureProcess() { + int retVal = 0; + struct pollfd rSet; + + rSet.fd = _deviceFd; + rSet.events = POLLIN; + rSet.revents = 0; + + retVal = poll(&rSet, 1, 1000); + + { + MutexLock lock(&capture_lock_); + + if (quit_) { + return false; + } + + if (retVal < 0 && errno != EINTR) { // continue if interrupted + // poll failed + return false; + } else if (retVal == 0) { + // poll timed out + return true; + } else if (!(rSet.revents & POLLIN)) { + // not event on camera handle + return true; + } + + if (_captureStarted) { + struct v4l2_buffer buf; + memset(&buf, 0, sizeof(struct v4l2_buffer)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + // dequeue a buffer - repeat until dequeued properly! + while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) { + if (errno != EINTR) { + RTC_LOG(LS_INFO) << "could not sync on a buffer on device " + << strerror(errno); + return true; + } + } + VideoCaptureCapability frameInfo; + frameInfo.width = _currentWidth; + frameInfo.height = _currentHeight; + frameInfo.videoType = _captureVideoType; + + // convert to to I420 if needed + IncomingFrame(reinterpret_cast<uint8_t*>(_pool[buf.index].start), + buf.bytesused, frameInfo); + // enqueue the buffer again + if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) { + RTC_LOG(LS_INFO) << "Failed to enqueue capture buffer"; + } + } + } + usleep(0); + return true; +} + +int32_t VideoCaptureModuleV4L2::CaptureSettings( + VideoCaptureCapability& settings) { + settings.width = _currentWidth; + settings.height = _currentHeight; + settings.maxFPS = _currentFrameRate; + settings.videoType = _captureVideoType; + + return 0; +} +} // namespace videocapturemodule +} // namespace webrtc + +#ifdef ABGR32_OVERRIDE +#undef ABGR32_OVERRIDE +#undef V4L2_PIX_FMT_ABGR32 +#endif + +#ifdef ARGB32_OVERRIDE +#undef ARGB32_OVERRIDE +#undef V4L2_PIX_FMT_ARGB32 +#endif + +#ifdef RGBA32_OVERRIDE +#undef RGBA32_OVERRIDE +#undef V4L2_PIX_FMT_RGBA32 +#endif diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h new file mode 100644 index 0000000000..65e89e2daa --- /dev/null +++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ + +#include <stddef.h> +#include <stdint.h> + +#include <memory> + +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace videocapturemodule { +class VideoCaptureModuleV4L2 : public VideoCaptureImpl { + public: + VideoCaptureModuleV4L2(); + ~VideoCaptureModuleV4L2() override; + int32_t Init(const char* deviceUniqueId); + int32_t StartCapture(const VideoCaptureCapability& capability) override; + int32_t StopCapture() override; + bool CaptureStarted() override; + int32_t CaptureSettings(VideoCaptureCapability& settings) override; + + private: + enum { kNoOfV4L2Bufffers = 4 }; + + static void CaptureThread(void*); + bool CaptureProcess(); + bool AllocateVideoBuffers(); + bool DeAllocateVideoBuffers(); + + rtc::PlatformThread _captureThread; + Mutex capture_lock_; + bool quit_ RTC_GUARDED_BY(capture_lock_); + int32_t _deviceId; + int32_t _deviceFd; + + int32_t _buffersAllocatedByDevice; + int32_t _currentWidth; + int32_t _currentHeight; + int32_t _currentFrameRate; + bool _captureStarted; + VideoType _captureVideoType; + struct Buffer { + void* start; + size_t length; + }; + Buffer* _pool; +}; +} // namespace videocapturemodule +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ |