From 26a029d407be480d791972afb5975cf62c9360a6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 19 Apr 2024 02:47:55 +0200 Subject: Adding upstream version 124.0.1. Signed-off-by: Daniel Baumann --- .../objc/AppRTCMobile/ARDCaptureController.m | 116 +++++++++++++++++++++ 1 file changed, 116 insertions(+) create mode 100644 third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m (limited to 'third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m') diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m new file mode 100644 index 0000000000..26cce9fdaa --- /dev/null +++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m @@ -0,0 +1,116 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "ARDCaptureController.h" + +#import "sdk/objc/base/RTCLogging.h" + +#import "ARDSettingsModel.h" + +const Float64 kFramerateLimit = 30.0; + +@implementation ARDCaptureController { + RTC_OBJC_TYPE(RTCCameraVideoCapturer) * _capturer; + ARDSettingsModel *_settings; + BOOL _usingFrontCamera; +} + +- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer + settings:(ARDSettingsModel *)settings { + if (self = [super init]) { + _capturer = capturer; + _settings = settings; + _usingFrontCamera = YES; + } + + return self; +} + +- (void)startCapture { + [self startCapture:nil]; +} + +- (void)startCapture:(void (^)(NSError *))completion { + AVCaptureDevicePosition position = + _usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice *device = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat *format = [self selectFormatForDevice:device]; + + if (format == nil) { + RTCLogError(@"No valid formats for device %@", device); + NSAssert(NO, @""); + + return; + } + + NSInteger fps = [self selectFpsForFormat:format]; + + [_capturer startCaptureWithDevice:device format:format fps:fps completionHandler:completion]; +} + +- (void)stopCapture { + [_capturer stopCapture]; +} + +- (void)switchCamera { + _usingFrontCamera = !_usingFrontCamera; + [self startCapture:nil]; +} + +- (void)switchCamera:(void (^)(NSError *))completion { + _usingFrontCamera = !_usingFrontCamera; + [self startCapture:completion]; +} + +#pragma mark - Private + +- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { + NSArray *captureDevices = + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]; + for (AVCaptureDevice *device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { + NSArray *formats = + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]; + int targetWidth = [_settings currentVideoResolutionWidthFromStore]; + int targetHeight = [_settings currentVideoResolutionHeightFromStore]; + AVCaptureDeviceFormat *selectedFormat = nil; + int currentDiff = INT_MAX; + + for (AVCaptureDeviceFormat *format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, kFramerateLimit); +} + +@end -- cgit v1.2.3