summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m')
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m215
1 files changed, 215 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m
new file mode 100644
index 0000000000..bcf1506259
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m
@@ -0,0 +1,215 @@
+/**
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCFileVideoCapturer.h"
+
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#include "rtc_base/system/gcd_helpers.h"
+
+NSString *const kRTCFileVideoCapturerErrorDomain =
+ @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)";
+
+typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) {
+ RTCFileVideoCapturerErrorCode_CapturerRunning = 2000,
+ RTCFileVideoCapturerErrorCode_FileNotFound
+};
+
+typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
+ RTCFileVideoCapturerStatusNotInitialized,
+ RTCFileVideoCapturerStatusStarted,
+ RTCFileVideoCapturerStatusStopped
+};
+
+@interface RTC_OBJC_TYPE (RTCFileVideoCapturer)
+() @property(nonatomic, assign) CMTime lastPresentationTime;
+@property(nonatomic, strong) NSURL *fileURL;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) {
+ AVAssetReader *_reader;
+ AVAssetReaderTrackOutput *_outTrack;
+ RTCFileVideoCapturerStatus _status;
+ dispatch_queue_t _frameQueue;
+}
+
+@synthesize lastPresentationTime = _lastPresentationTime;
+@synthesize fileURL = _fileURL;
+
+- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
+ onError:(RTCFileVideoCapturerErrorBlock)errorBlock {
+ if (_status == RTCFileVideoCapturerStatusStarted) {
+ NSError *error =
+ [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
+ code:RTCFileVideoCapturerErrorCode_CapturerRunning
+ userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}];
+
+ errorBlock(error);
+ return;
+ } else {
+ _status = RTCFileVideoCapturerStatusStarted;
+ }
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ NSString *pathForFile = [self pathForFileName:nameOfFile];
+ if (!pathForFile) {
+ NSString *errorString =
+ [NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile];
+ NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
+ code:RTCFileVideoCapturerErrorCode_FileNotFound
+ userInfo:@{NSUnderlyingErrorKey : errorString}];
+ errorBlock(error);
+ return;
+ }
+
+ self.lastPresentationTime = CMTimeMake(0, 0);
+
+ self.fileURL = [NSURL fileURLWithPath:pathForFile];
+ [self setupReaderOnError:errorBlock];
+ });
+}
+
+- (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock {
+ AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_fileURL options:nil];
+
+ NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
+ NSError *error = nil;
+
+ _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
+ if (error) {
+ errorBlock(error);
+ return;
+ }
+
+ NSDictionary *options = @{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
+ };
+ _outTrack =
+ [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options];
+ [_reader addOutput:_outTrack];
+
+ [_reader startReading];
+ RTCLog(@"File capturer started reading");
+ [self readNextBuffer];
+}
+- (void)stopCapture {
+ _status = RTCFileVideoCapturerStatusStopped;
+ RTCLog(@"File capturer stopped.");
+}
+
+#pragma mark - Private
+
+- (nullable NSString *)pathForFileName:(NSString *)fileName {
+ NSArray *nameComponents = [fileName componentsSeparatedByString:@"."];
+ if (nameComponents.count != 2) {
+ return nil;
+ }
+
+ NSString *path =
+ [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]];
+ return path;
+}
+
+- (dispatch_queue_t)frameQueue {
+ if (!_frameQueue) {
+ _frameQueue = RTCDispatchQueueCreateWithTarget(
+ "org.webrtc.filecapturer.video",
+ DISPATCH_QUEUE_SERIAL,
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0));
+ }
+ return _frameQueue;
+}
+
+- (void)readNextBuffer {
+ if (_status == RTCFileVideoCapturerStatusStopped) {
+ [_reader cancelReading];
+ _reader = nil;
+ return;
+ }
+
+ if (_reader.status == AVAssetReaderStatusCompleted) {
+ [_reader cancelReading];
+ _reader = nil;
+ [self setupReaderOnError:nil];
+ return;
+ }
+
+ CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer];
+ if (!sampleBuffer) {
+ [self readNextBuffer];
+ return;
+ }
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
+ !CMSampleBufferDataIsReady(sampleBuffer)) {
+ CFRelease(sampleBuffer);
+ [self readNextBuffer];
+ return;
+ }
+
+ [self publishSampleBuffer:sampleBuffer];
+}
+
+- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
+ CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ Float64 presentationDifference =
+ CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
+ _lastPresentationTime = presentationTime;
+ int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
+
+ __block dispatch_source_t timer = [self createStrictTimer];
+ // Strict timer that will fire `presentationDifferenceRound` ns from now and never again.
+ dispatch_source_set_timer(timer,
+ dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
+ DISPATCH_TIME_FOREVER,
+ 0);
+ dispatch_source_set_event_handler(timer, ^{
+ dispatch_source_cancel(timer);
+ timer = nil;
+
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (!pixelBuffer) {
+ CFRelease(sampleBuffer);
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [self readNextBuffer];
+ });
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
+ NSTimeInterval timeStampSeconds = CACurrentMediaTime();
+ int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+ rotation:0
+ timeStampNs:timeStampNs];
+ CFRelease(sampleBuffer);
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [self readNextBuffer];
+ });
+
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
+ });
+ dispatch_activate(timer);
+}
+
+- (dispatch_source_t)createStrictTimer {
+ dispatch_source_t timer = dispatch_source_create(
+ DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]);
+ return timer;
+}
+
+- (void)dealloc {
+ [self stopCapture];
+}
+
+@end