From 26a029d407be480d791972afb5975cf62c9360a6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 19 Apr 2024 02:47:55 +0200 Subject: Adding upstream version 124.0.1. Signed-off-by: Daniel Baumann --- .../components/renderer/metal/RTCMTLI420Renderer.h | 17 ++ .../renderer/metal/RTCMTLI420Renderer.mm | 177 +++++++++++ .../components/renderer/metal/RTCMTLNSVideoView.h | 22 ++ .../components/renderer/metal/RTCMTLNSVideoView.m | 122 ++++++++ .../components/renderer/metal/RTCMTLNV12Renderer.h | 18 ++ .../renderer/metal/RTCMTLNV12Renderer.mm | 164 +++++++++++ .../components/renderer/metal/RTCMTLRGBRenderer.h | 22 ++ .../components/renderer/metal/RTCMTLRGBRenderer.mm | 164 +++++++++++ .../renderer/metal/RTCMTLRenderer+Private.h | 33 +++ .../components/renderer/metal/RTCMTLRenderer.h | 61 ++++ .../components/renderer/metal/RTCMTLRenderer.mm | 328 +++++++++++++++++++++ .../components/renderer/metal/RTCMTLVideoView.h | 44 +++ .../components/renderer/metal/RTCMTLVideoView.m | 265 +++++++++++++++++ .../components/renderer/opengl/RTCDefaultShader.h | 23 ++ .../components/renderer/opengl/RTCDefaultShader.mm | 201 +++++++++++++ .../renderer/opengl/RTCDisplayLinkTimer.h | 24 ++ .../renderer/opengl/RTCDisplayLinkTimer.m | 59 ++++ .../components/renderer/opengl/RTCEAGLVideoView.h | 45 +++ .../components/renderer/opengl/RTCEAGLVideoView.m | 295 ++++++++++++++++++ .../renderer/opengl/RTCI420TextureCache.h | 25 ++ .../renderer/opengl/RTCI420TextureCache.mm | 149 ++++++++++ .../renderer/opengl/RTCNV12TextureCache.h | 33 +++ .../renderer/opengl/RTCNV12TextureCache.m | 113 +++++++ .../components/renderer/opengl/RTCOpenGLDefines.h | 23 ++ .../objc/components/renderer/opengl/RTCShader.h | 21 ++ .../objc/components/renderer/opengl/RTCShader.mm | 178 +++++++++++ .../renderer/opengl/RTCVideoViewShading.h | 39 +++ 27 files changed, 2665 insertions(+) create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm create mode 100644 third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h (limited to 'third_party/libwebrtc/sdk/objc/components/renderer') diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h new file mode 100644 index 0000000000..e5987fe22a --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h @@ -0,0 +1,17 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMTLRenderer.h" + +NS_AVAILABLE(10_11, 9_0) +@interface RTCMTLI420Renderer : RTCMTLRenderer +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm new file mode 100644 index 0000000000..eba8800240 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm @@ -0,0 +1,177 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCMTLI420Renderer.h" + +#import +#import + +#import "base/RTCI420Buffer.h" +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" +#import "base/RTCVideoFrameBuffer.h" + +#import "RTCMTLRenderer+Private.h" + +static NSString *const shaderSource = MTL_STRINGIFY( + using namespace metal; + + typedef struct { + packed_float2 position; + packed_float2 texcoord; + } Vertex; + + typedef struct { + float4 position[[position]]; + float2 texcoord; + } Varyings; + + vertex Varyings vertexPassthrough(constant Vertex *verticies[[buffer(0)]], + unsigned int vid[[vertex_id]]) { + Varyings out; + constant Vertex &v = verticies[vid]; + out.position = float4(float2(v.position), 0.0, 1.0); + out.texcoord = v.texcoord; + + return out; + } + + fragment half4 fragmentColorConversion( + Varyings in[[stage_in]], + texture2d textureY[[texture(0)]], + texture2d textureU[[texture(1)]], + texture2d textureV[[texture(2)]]) { + constexpr sampler s(address::clamp_to_edge, filter::linear); + float y; + float u; + float v; + float r; + float g; + float b; + // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php + y = textureY.sample(s, in.texcoord).r; + u = textureU.sample(s, in.texcoord).r; + v = textureV.sample(s, in.texcoord).r; + u = u - 0.5; + v = v - 0.5; + r = y + 1.403 * v; + g = y - 0.344 * u - 0.714 * v; + b = y + 1.770 * u; + + float4 out = float4(r, g, b, 1.0); + + return half4(out); + }); + +@implementation RTCMTLI420Renderer { + // Textures. + id _yTexture; + id _uTexture; + id _vTexture; + + MTLTextureDescriptor *_descriptor; + MTLTextureDescriptor *_chromaDescriptor; + + int _width; + int _height; + int _chromaWidth; + int _chromaHeight; +} + +#pragma mark - Virtual + +- (NSString *)shaderSource { + return shaderSource; +} + +- (void)getWidth:(nonnull int *)width + height:(nonnull int *)height + cropWidth:(nonnull int *)cropWidth + cropHeight:(nonnull int *)cropHeight + cropX:(nonnull int *)cropX + cropY:(nonnull int *)cropY + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + *width = frame.width; + *height = frame.height; + *cropWidth = frame.width; + *cropHeight = frame.height; + *cropX = 0; + *cropY = 0; +} + +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + if (![super setupTexturesForFrame:frame]) { + return NO; + } + + id device = [self currentMetalDevice]; + if (!device) { + return NO; + } + + // Chroma size must be >= 1 as per the Apple documentation, so skip ?x1 + // and 1x? frames. + // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=14892 + if (frame.width < 2 || frame.height < 2) { + return NO; + } + + id buffer = [frame.buffer toI420]; + + // Luma (y) texture. + if (!_descriptor || _width != frame.width || _height != frame.height) { + _width = frame.width; + _height = frame.height; + _descriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm + width:_width + height:_height + mipmapped:NO]; + _descriptor.usage = MTLTextureUsageShaderRead; + _yTexture = [device newTextureWithDescriptor:_descriptor]; + } + + // Chroma (u,v) textures + [_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height) + mipmapLevel:0 + withBytes:buffer.dataY + bytesPerRow:buffer.strideY]; + + if (!_chromaDescriptor || _chromaWidth != frame.width / 2 || _chromaHeight != frame.height / 2) { + _chromaWidth = frame.width / 2; + _chromaHeight = frame.height / 2; + _chromaDescriptor = + [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm + width:_chromaWidth + height:_chromaHeight + mipmapped:NO]; + _chromaDescriptor.usage = MTLTextureUsageShaderRead; + _uTexture = [device newTextureWithDescriptor:_chromaDescriptor]; + _vTexture = [device newTextureWithDescriptor:_chromaDescriptor]; + } + + [_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight) + mipmapLevel:0 + withBytes:buffer.dataU + bytesPerRow:buffer.strideU]; + [_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight) + mipmapLevel:0 + withBytes:buffer.dataV + bytesPerRow:buffer.strideV]; + + return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil); +} + +- (void)uploadTexturesToRenderEncoder:(id)renderEncoder { + [renderEncoder setFragmentTexture:_yTexture atIndex:0]; + [renderEncoder setFragmentTexture:_uTexture atIndex:1]; + [renderEncoder setFragmentTexture:_vTexture atIndex:2]; +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h new file mode 100644 index 0000000000..5a2e7d380f --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -0,0 +1,22 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCVideoRenderer.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView + +@property(nonatomic, weak) id delegate; + ++ (BOOL)isMetalAvailable; + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m new file mode 100644 index 0000000000..625fb1caa7 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m @@ -0,0 +1,122 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCMTLNSVideoView.h" + +#import +#import + +#import "base/RTCVideoFrame.h" + +#import "RTCMTLI420Renderer.h" + +@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) +() @property(nonatomic) id renderer; +@property(nonatomic, strong) MTKView *metalView; +@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; +@end + +@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) { + id _renderer; +} + +@synthesize delegate = _delegate; +@synthesize renderer = _renderer; +@synthesize metalView = _metalView; +@synthesize videoFrame = _videoFrame; + +- (instancetype)initWithFrame:(CGRect)frameRect { + self = [super initWithFrame:frameRect]; + if (self) { + [self configure]; + } + return self; +} + +- (instancetype)initWithCoder:(NSCoder *)aCoder { + self = [super initWithCoder:aCoder]; + if (self) { + [self configure]; + } + return self; +} + +#pragma mark - Private + ++ (BOOL)isMetalAvailable { + return [MTLCopyAllDevices() count] > 0; +} + +- (void)configure { + if ([[self class] isMetalAvailable]) { + _metalView = [[MTKView alloc] initWithFrame:self.bounds]; + [self addSubview:_metalView]; + _metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; + _metalView.translatesAutoresizingMaskIntoConstraints = NO; + _metalView.framebufferOnly = YES; + _metalView.delegate = self; + + _renderer = [[RTCMTLI420Renderer alloc] init]; + if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) { + _renderer = nil; + }; + } +} + +- (void)updateConstraints { + NSDictionary *views = NSDictionaryOfVariableBindings(_metalView); + + NSArray *constraintsHorizontal = + [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|" + options:0 + metrics:nil + views:views]; + [self addConstraints:constraintsHorizontal]; + + NSArray *constraintsVertical = + [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|" + options:0 + metrics:nil + views:views]; + [self addConstraints:constraintsVertical]; + [super updateConstraints]; +} + +#pragma mark - MTKViewDelegate methods +- (void)drawInMTKView:(nonnull MTKView *)view { + if (self.videoFrame == nil) { + return; + } + if (view == self.metalView) { + [_renderer drawFrame:self.videoFrame]; + } +} + +- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { +} + +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) + +- (void)setSize:(CGSize)size { + _metalView.drawableSize = size; + dispatch_async(dispatch_get_main_queue(), ^{ + [self.delegate videoView:self didChangeVideoSize:size]; + }); + [_metalView draw]; +} + +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + if (frame == nil) { + return; + } + self.videoFrame = [frame newI420VideoFrame]; +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h new file mode 100644 index 0000000000..866b7ea17e --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h @@ -0,0 +1,18 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMTLRenderer.h" + +NS_AVAILABLE(10_11, 9_0) +@interface RTCMTLNV12Renderer : RTCMTLRenderer + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm new file mode 100644 index 0000000000..7b037c6dbc --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm @@ -0,0 +1,164 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCMTLNV12Renderer.h" + +#import +#import + +#import "RTCMTLRenderer+Private.h" +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" +#import "base/RTCVideoFrameBuffer.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +#include "rtc_base/checks.h" + +static NSString *const shaderSource = MTL_STRINGIFY( + using namespace metal; + + typedef struct { + packed_float2 position; + packed_float2 texcoord; + } Vertex; + + typedef struct { + float4 position[[position]]; + float2 texcoord; + } Varyings; + + vertex Varyings vertexPassthrough(constant Vertex *verticies[[buffer(0)]], + unsigned int vid[[vertex_id]]) { + Varyings out; + constant Vertex &v = verticies[vid]; + out.position = float4(float2(v.position), 0.0, 1.0); + out.texcoord = v.texcoord; + return out; + } + + // Receiving YCrCb textures. + fragment half4 fragmentColorConversion( + Varyings in[[stage_in]], + texture2d textureY[[texture(0)]], + texture2d textureCbCr[[texture(1)]]) { + constexpr sampler s(address::clamp_to_edge, filter::linear); + float y; + float2 uv; + y = textureY.sample(s, in.texcoord).r; + uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5); + + // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php + float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0); + + return half4(out); + }); + +@implementation RTCMTLNV12Renderer { + // Textures. + CVMetalTextureCacheRef _textureCache; + id _yTexture; + id _CrCbTexture; +} + +- (BOOL)addRenderingDestination:(__kindof MTKView *)view { + if ([super addRenderingDestination:view]) { + return [self initializeTextureCache]; + } + return NO; +} + +- (BOOL)initializeTextureCache { + CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice], + nil, &_textureCache); + if (status != kCVReturnSuccess) { + RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status); + return NO; + } + + return YES; +} + +- (NSString *)shaderSource { + return shaderSource; +} + +- (void)getWidth:(nonnull int *)width + height:(nonnull int *)height + cropWidth:(nonnull int *)cropWidth + cropHeight:(nonnull int *)cropHeight + cropX:(nonnull int *)cropX + cropY:(nonnull int *)cropY + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer); + *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer); + *cropWidth = pixelBuffer.cropWidth; + *cropHeight = pixelBuffer.cropHeight; + *cropX = pixelBuffer.cropX; + *cropY = pixelBuffer.cropY; +} + +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]); + if (![super setupTexturesForFrame:frame]) { + return NO; + } + CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer; + + id lumaTexture = nil; + id chromaTexture = nil; + CVMetalTextureRef outTexture = nullptr; + + // Luma (y) texture. + int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); + int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); + + int indexPlane = 0; + CVReturn result = CVMetalTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, lumaWidth, + lumaHeight, indexPlane, &outTexture); + + if (result == kCVReturnSuccess) { + lumaTexture = CVMetalTextureGetTexture(outTexture); + } + + // Same as CFRelease except it can be passed NULL without crashing. + CVBufferRelease(outTexture); + outTexture = nullptr; + + // Chroma (CrCb) texture. + indexPlane = 1; + result = CVMetalTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, lumaWidth / 2, + lumaHeight / 2, indexPlane, &outTexture); + if (result == kCVReturnSuccess) { + chromaTexture = CVMetalTextureGetTexture(outTexture); + } + CVBufferRelease(outTexture); + + if (lumaTexture != nil && chromaTexture != nil) { + _yTexture = lumaTexture; + _CrCbTexture = chromaTexture; + return YES; + } + return NO; +} + +- (void)uploadTexturesToRenderEncoder:(id)renderEncoder { + [renderEncoder setFragmentTexture:_yTexture atIndex:0]; + [renderEncoder setFragmentTexture:_CrCbTexture atIndex:1]; +} + +- (void)dealloc { + if (_textureCache) { + CFRelease(_textureCache); + } +} +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h new file mode 100644 index 0000000000..9db422cd22 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h @@ -0,0 +1,22 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMTLRenderer.h" + +/** @abstract RGB/BGR renderer. + * @discussion This renderer handles both kCVPixelFormatType_32BGRA and + * kCVPixelFormatType_32ARGB. + */ +NS_AVAILABLE(10_11, 9_0) +@interface RTCMTLRGBRenderer : RTCMTLRenderer + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm new file mode 100644 index 0000000000..e5dc4ef80a --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm @@ -0,0 +1,164 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCMTLRGBRenderer.h" + +#import +#import + +#import "RTCMTLRenderer+Private.h" +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" +#import "base/RTCVideoFrameBuffer.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +#include "rtc_base/checks.h" + +static NSString *const shaderSource = MTL_STRINGIFY( + using namespace metal; + + typedef struct { + packed_float2 position; + packed_float2 texcoord; + } Vertex; + + typedef struct { + float4 position[[position]]; + float2 texcoord; + } VertexIO; + + vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], + uint vid[[vertex_id]]) { + VertexIO out; + constant Vertex &v = verticies[vid]; + out.position = float4(float2(v.position), 0.0, 1.0); + out.texcoord = v.texcoord; + return out; + } + + fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], + texture2d texture[[texture(0)]], + constant bool &isARGB[[buffer(0)]]) { + constexpr sampler s(address::clamp_to_edge, filter::linear); + + half4 out = texture.sample(s, in.texcoord); + if (isARGB) { + out = half4(out.g, out.b, out.a, out.r); + } + + return out; + }); + +@implementation RTCMTLRGBRenderer { + // Textures. + CVMetalTextureCacheRef _textureCache; + id _texture; + + // Uniforms. + id _uniformsBuffer; +} + +- (BOOL)addRenderingDestination:(__kindof MTKView *)view { + if ([super addRenderingDestination:view]) { + return [self initializeTextureCache]; + } + return NO; +} + +- (BOOL)initializeTextureCache { + CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice], + nil, &_textureCache); + if (status != kCVReturnSuccess) { + RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status); + return NO; + } + + return YES; +} + +- (NSString *)shaderSource { + return shaderSource; +} + +- (void)getWidth:(nonnull int *)width + height:(nonnull int *)height + cropWidth:(nonnull int *)cropWidth + cropHeight:(nonnull int *)cropHeight + cropX:(nonnull int *)cropX + cropY:(nonnull int *)cropY + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer); + *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer); + *cropWidth = pixelBuffer.cropWidth; + *cropHeight = pixelBuffer.cropHeight; + *cropX = pixelBuffer.cropX; + *cropY = pixelBuffer.cropY; +} + +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]); + if (![super setupTexturesForFrame:frame]) { + return NO; + } + CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer; + + id gpuTexture = nil; + CVMetalTextureRef textureOut = nullptr; + bool isARGB; + + int width = CVPixelBufferGetWidth(pixelBuffer); + int height = CVPixelBufferGetHeight(pixelBuffer); + OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); + + MTLPixelFormat mtlPixelFormat; + if (pixelFormat == kCVPixelFormatType_32BGRA) { + mtlPixelFormat = MTLPixelFormatBGRA8Unorm; + isARGB = false; + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + mtlPixelFormat = MTLPixelFormatRGBA8Unorm; + isARGB = true; + } else { + RTC_DCHECK_NOTREACHED(); + return NO; + } + + CVReturn result = CVMetalTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat, + width, height, 0, &textureOut); + if (result == kCVReturnSuccess) { + gpuTexture = CVMetalTextureGetTexture(textureOut); + } + CVBufferRelease(textureOut); + + if (gpuTexture != nil) { + _texture = gpuTexture; + _uniformsBuffer = + [[self currentMetalDevice] newBufferWithBytes:&isARGB + length:sizeof(isARGB) + options:MTLResourceCPUCacheModeDefaultCache]; + return YES; + } + + return NO; +} + +- (void)uploadTexturesToRenderEncoder:(id)renderEncoder { + [renderEncoder setFragmentTexture:_texture atIndex:0]; + [renderEncoder setFragmentBuffer:_uniformsBuffer offset:0 atIndex:0]; +} + +- (void)dealloc { + if (_textureCache) { + CFRelease(_textureCache); + } +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h new file mode 100644 index 0000000000..916d4d4430 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h @@ -0,0 +1,33 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMTLRenderer.h" + +#define MTL_STRINGIFY(s) @ #s + +NS_ASSUME_NONNULL_BEGIN + +@interface RTCMTLRenderer (Private) +- (nullable id)currentMetalDevice; +- (NSString *)shaderSource; +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; +- (void)uploadTexturesToRenderEncoder:(id)renderEncoder; +- (void)getWidth:(nonnull int *)width + height:(nonnull int *)height + cropWidth:(nonnull int *)cropWidth + cropHeight:(nonnull int *)cropHeight + cropX:(nonnull int *)cropX + cropY:(nonnull int *)cropY + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; +@end + +NS_ASSUME_NONNULL_END diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h new file mode 100644 index 0000000000..aa31545973 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -0,0 +1,61 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import +#if TARGET_OS_IPHONE +#import +#else +#import +#endif + +#import "base/RTCVideoFrame.h" + +NS_ASSUME_NONNULL_BEGIN +/** + * Protocol defining ability to render RTCVideoFrame in Metal enabled views. + */ +@protocol RTCMTLRenderer + +/** + * Method to be implemented to perform actual rendering of the provided frame. + * + * @param frame The frame to be rendered. + */ +- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +/** + * Sets the provided view as rendering destination if possible. + * + * If not possible method returns NO and callers of the method are responisble for performing + * cleanups. + */ + +#if TARGET_OS_IOS +- (BOOL)addRenderingDestination:(__kindof UIView *)view; +#else +- (BOOL)addRenderingDestination:(__kindof NSView *)view; +#endif + +@end + +/** + * Implementation of RTCMTLRenderer protocol. + */ +NS_AVAILABLE(10_11, 9_0) +@interface RTCMTLRenderer : NSObject + +/** @abstract A wrapped RTCVideoRotation, or nil. + @discussion When not nil, the rotation of the actual frame is ignored when rendering. + */ +@property(atomic, nullable) NSValue *rotationOverride; + +@end + +NS_ASSUME_NONNULL_END diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm new file mode 100644 index 0000000000..410590a7b1 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -0,0 +1,328 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCMTLRenderer+Private.h" + +#import +#import + +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" +#import "base/RTCVideoFrameBuffer.h" + +#include "api/video/video_rotation.h" +#include "rtc_base/checks.h" + +// As defined in shaderSource. +static NSString *const vertexFunctionName = @"vertexPassthrough"; +static NSString *const fragmentFunctionName = @"fragmentColorConversion"; + +static NSString *const pipelineDescriptorLabel = @"RTCPipeline"; +static NSString *const commandBufferLabel = @"RTCCommandBuffer"; +static NSString *const renderEncoderLabel = @"RTCEncoder"; +static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame"; + +// Computes the texture coordinates given rotation and cropping. +static inline void getCubeVertexData(int cropX, + int cropY, + int cropWidth, + int cropHeight, + size_t frameWidth, + size_t frameHeight, + RTCVideoRotation rotation, + float *buffer) { + // The computed values are the adjusted texture coordinates, in [0..1]. + // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the + // left/top edge. + // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the + // right/bottom edge (i.e. render up to 80% of the width/height). + float cropLeft = cropX / (float)frameWidth; + float cropRight = (cropX + cropWidth) / (float)frameWidth; + float cropTop = cropY / (float)frameHeight; + float cropBottom = (cropY + cropHeight) / (float)frameHeight; + + // These arrays map the view coordinates to texture coordinates, taking cropping and rotation + // into account. The first two columns are view coordinates, the last two are texture coordinates. + switch (rotation) { + case RTCVideoRotation_0: { + float values[16] = {-1.0, -1.0, cropLeft, cropBottom, + 1.0, -1.0, cropRight, cropBottom, + -1.0, 1.0, cropLeft, cropTop, + 1.0, 1.0, cropRight, cropTop}; + memcpy(buffer, &values, sizeof(values)); + } break; + case RTCVideoRotation_90: { + float values[16] = {-1.0, -1.0, cropRight, cropBottom, + 1.0, -1.0, cropRight, cropTop, + -1.0, 1.0, cropLeft, cropBottom, + 1.0, 1.0, cropLeft, cropTop}; + memcpy(buffer, &values, sizeof(values)); + } break; + case RTCVideoRotation_180: { + float values[16] = {-1.0, -1.0, cropRight, cropTop, + 1.0, -1.0, cropLeft, cropTop, + -1.0, 1.0, cropRight, cropBottom, + 1.0, 1.0, cropLeft, cropBottom}; + memcpy(buffer, &values, sizeof(values)); + } break; + case RTCVideoRotation_270: { + float values[16] = {-1.0, -1.0, cropLeft, cropTop, + 1.0, -1.0, cropLeft, cropBottom, + -1.0, 1.0, cropRight, cropTop, + 1.0, 1.0, cropRight, cropBottom}; + memcpy(buffer, &values, sizeof(values)); + } break; + } +} + +// The max number of command buffers in flight (submitted to GPU). +// For now setting it up to 1. +// In future we might use triple buffering method if it improves performance. +static const NSInteger kMaxInflightBuffers = 1; + +@implementation RTCMTLRenderer { + __kindof MTKView *_view; + + // Controller. + dispatch_semaphore_t _inflight_semaphore; + + // Renderer. + id _device; + id _commandQueue; + id _defaultLibrary; + id _pipelineState; + + // Buffers. + id _vertexBuffer; + + // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation. + int _oldFrameWidth; + int _oldFrameHeight; + int _oldCropWidth; + int _oldCropHeight; + int _oldCropX; + int _oldCropY; + RTCVideoRotation _oldRotation; +} + +@synthesize rotationOverride = _rotationOverride; + +- (instancetype)init { + if (self = [super init]) { + _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers); + } + + return self; +} + +- (BOOL)addRenderingDestination:(__kindof MTKView *)view { + return [self setupWithView:view]; +} + +#pragma mark - Private + +- (BOOL)setupWithView:(__kindof MTKView *)view { + BOOL success = NO; + if ([self setupMetal]) { + _view = view; + view.device = _device; + view.preferredFramesPerSecond = 30; + view.autoResizeDrawable = NO; + + [self loadAssets]; + + float vertexBufferArray[16] = {0}; + _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray + length:sizeof(vertexBufferArray) + options:MTLResourceCPUCacheModeWriteCombined]; + success = YES; + } + return success; +} +#pragma mark - Inheritance + +- (id)currentMetalDevice { + return _device; +} + +- (NSString *)shaderSource { + RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass."; + return nil; +} + +- (void)uploadTexturesToRenderEncoder:(id)renderEncoder { + RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass."; +} + +- (void)getWidth:(int *)width + height:(int *)height + cropWidth:(int *)cropWidth + cropHeight:(int *)cropHeight + cropX:(int *)cropX + cropY:(int *)cropY + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass."; +} + +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + // Apply rotation override if set. + RTCVideoRotation rotation; + NSValue *rotationOverride = self.rotationOverride; + if (rotationOverride) { +#if defined(__IPHONE_11_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ + (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0) + if (@available(iOS 11, *)) { + [rotationOverride getValue:&rotation size:sizeof(rotation)]; + } else +#endif + { + [rotationOverride getValue:&rotation]; + } + } else { + rotation = frame.rotation; + } + + int frameWidth, frameHeight, cropWidth, cropHeight, cropX, cropY; + [self getWidth:&frameWidth + height:&frameHeight + cropWidth:&cropWidth + cropHeight:&cropHeight + cropX:&cropX + cropY:&cropY + ofFrame:frame]; + + // Recompute the texture cropping and recreate vertexBuffer if necessary. + if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth || + cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth || + frameHeight != _oldFrameHeight) { + getCubeVertexData(cropX, + cropY, + cropWidth, + cropHeight, + frameWidth, + frameHeight, + rotation, + (float *)_vertexBuffer.contents); + _oldCropX = cropX; + _oldCropY = cropY; + _oldCropWidth = cropWidth; + _oldCropHeight = cropHeight; + _oldRotation = rotation; + _oldFrameWidth = frameWidth; + _oldFrameHeight = frameHeight; + } + + return YES; +} + +#pragma mark - GPU methods + +- (BOOL)setupMetal { + // Set the view to use the default device. + _device = MTLCreateSystemDefaultDevice(); + if (!_device) { + return NO; + } + + // Create a new command queue. + _commandQueue = [_device newCommandQueue]; + + // Load metal library from source. + NSError *libraryError = nil; + NSString *shaderSource = [self shaderSource]; + + id sourceLibrary = + [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError]; + + if (libraryError) { + RTCLogError(@"Metal: Library with source failed\n%@", libraryError); + return NO; + } + + if (!sourceLibrary) { + RTCLogError(@"Metal: Failed to load library. %@", libraryError); + return NO; + } + _defaultLibrary = sourceLibrary; + + return YES; +} + +- (void)loadAssets { + id vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName]; + id fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName]; + + MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init]; + pipelineDescriptor.label = pipelineDescriptorLabel; + pipelineDescriptor.vertexFunction = vertexFunction; + pipelineDescriptor.fragmentFunction = fragmentFunction; + pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat; + pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid; + NSError *error = nil; + _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error]; + + if (!_pipelineState) { + RTCLogError(@"Metal: Failed to create pipeline state. %@", error); + } +} + +- (void)render { + id commandBuffer = [_commandQueue commandBuffer]; + commandBuffer.label = commandBufferLabel; + + __block dispatch_semaphore_t block_semaphore = _inflight_semaphore; + [commandBuffer addCompletedHandler:^(id _Nonnull) { + // GPU work completed. + dispatch_semaphore_signal(block_semaphore); + }]; + + MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor; + if (renderPassDescriptor) { // Valid drawable. + id renderEncoder = + [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; + renderEncoder.label = renderEncoderLabel; + + // Set context state. + [renderEncoder pushDebugGroup:renderEncoderDebugGroup]; + [renderEncoder setRenderPipelineState:_pipelineState]; + [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0]; + [self uploadTexturesToRenderEncoder:renderEncoder]; + + [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip + vertexStart:0 + vertexCount:4 + instanceCount:1]; + [renderEncoder popDebugGroup]; + [renderEncoder endEncoding]; + + [commandBuffer presentDrawable:_view.currentDrawable]; + } + + // CPU work is completed, GPU work can be started. + [commandBuffer commit]; +} + +#pragma mark - RTCMTLRenderer + +- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + @autoreleasepool { + // Wait until the inflight (curently sent to GPU) command buffer + // has completed the GPU work. + dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER); + + if ([self setupTexturesForFrame:frame]) { + [self render]; + } else { + dispatch_semaphore_signal(_inflight_semaphore); + } + } +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h new file mode 100644 index 0000000000..3320d12076 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h @@ -0,0 +1,44 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMacros.h" +#import "RTCVideoFrame.h" +#import "RTCVideoRenderer.h" + +NS_ASSUME_NONNULL_BEGIN + +/** + * RTCMTLVideoView is thin wrapper around MTKView. + * + * It has id property that renders video frames in the view's + * bounds using Metal. + */ +NS_CLASS_AVAILABLE_IOS(9) + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView + +@property(nonatomic, weak) id delegate; + +@property(nonatomic) UIViewContentMode videoContentMode; + +/** @abstract Enables/disables rendering. + */ +@property(nonatomic, getter=isEnabled) BOOL enabled; + +/** @abstract Wrapped RTCVideoRotation, or nil. + */ +@property(nonatomic, nullable) NSValue* rotationOverride; + +@end + +NS_ASSUME_NONNULL_END diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m new file mode 100644 index 0000000000..c5d9e4385f --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -0,0 +1,265 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCMTLVideoView.h" + +#import +#import + +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" +#import "base/RTCVideoFrameBuffer.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +#import "RTCMTLI420Renderer.h" +#import "RTCMTLNV12Renderer.h" +#import "RTCMTLRGBRenderer.h" + +// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime. +// Linking errors occur when compiling for architectures that don't support Metal. +#define MTKViewClass NSClassFromString(@"MTKView") +#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer") +#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") +#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") + +@interface RTC_OBJC_TYPE (RTCMTLVideoView) +() @property(nonatomic) RTCMTLI420Renderer *rendererI420; +@property(nonatomic) RTCMTLNV12Renderer *rendererNV12; +@property(nonatomic) RTCMTLRGBRenderer *rendererRGB; +@property(nonatomic) MTKView *metalView; +@property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; +@property(nonatomic) CGSize videoFrameSize; +@property(nonatomic) int64_t lastFrameTimeNs; +@end + +@implementation RTC_OBJC_TYPE (RTCMTLVideoView) + +@synthesize delegate = _delegate; +@synthesize rendererI420 = _rendererI420; +@synthesize rendererNV12 = _rendererNV12; +@synthesize rendererRGB = _rendererRGB; +@synthesize metalView = _metalView; +@synthesize videoFrame = _videoFrame; +@synthesize videoFrameSize = _videoFrameSize; +@synthesize lastFrameTimeNs = _lastFrameTimeNs; +@synthesize rotationOverride = _rotationOverride; + +- (instancetype)initWithFrame:(CGRect)frameRect { + self = [super initWithFrame:frameRect]; + if (self) { + [self configure]; + } + return self; +} + +- (instancetype)initWithCoder:(NSCoder *)aCoder { + self = [super initWithCoder:aCoder]; + if (self) { + [self configure]; + } + return self; +} + +- (BOOL)isEnabled { + return !self.metalView.paused; +} + +- (void)setEnabled:(BOOL)enabled { + self.metalView.paused = !enabled; +} + +- (UIViewContentMode)videoContentMode { + return self.metalView.contentMode; +} + +- (void)setVideoContentMode:(UIViewContentMode)mode { + self.metalView.contentMode = mode; +} + +#pragma mark - Private + ++ (BOOL)isMetalAvailable { + return MTLCreateSystemDefaultDevice() != nil; +} + ++ (MTKView *)createMetalView:(CGRect)frame { + return [[MTKViewClass alloc] initWithFrame:frame]; +} + ++ (RTCMTLNV12Renderer *)createNV12Renderer { + return [[RTCMTLNV12RendererClass alloc] init]; +} + ++ (RTCMTLI420Renderer *)createI420Renderer { + return [[RTCMTLI420RendererClass alloc] init]; +} + ++ (RTCMTLRGBRenderer *)createRGBRenderer { + return [[RTCMTLRGBRenderer alloc] init]; +} + +- (void)configure { + NSAssert([RTC_OBJC_TYPE(RTCMTLVideoView) isMetalAvailable], + @"Metal not availiable on this device"); + + self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds]; + self.metalView.delegate = self; + self.metalView.contentMode = UIViewContentModeScaleAspectFill; + [self addSubview:self.metalView]; + self.videoFrameSize = CGSizeZero; +} + +- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { + [super setMultipleTouchEnabled:multipleTouchEnabled]; + self.metalView.multipleTouchEnabled = multipleTouchEnabled; +} + +- (void)layoutSubviews { + [super layoutSubviews]; + + CGRect bounds = self.bounds; + self.metalView.frame = bounds; + if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) { + self.metalView.drawableSize = [self drawableSize]; + } else { + self.metalView.drawableSize = bounds.size; + } +} + +#pragma mark - MTKViewDelegate methods + +- (void)drawInMTKView:(nonnull MTKView *)view { + NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance."); + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame; + // Skip rendering if we've already rendered this frame. + if (!videoFrame || videoFrame.width <= 0 || videoFrame.height <= 0 || + videoFrame.timeStampNs == self.lastFrameTimeNs) { + return; + } + + if (CGRectIsEmpty(view.bounds)) { + return; + } + + RTCMTLRenderer *renderer; + if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer; + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); + if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) { + if (!self.rendererRGB) { + self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer]; + if (![self.rendererRGB addRenderingDestination:self.metalView]) { + self.rendererRGB = nil; + RTCLogError(@"Failed to create RGB renderer"); + return; + } + } + renderer = self.rendererRGB; + } else { + if (!self.rendererNV12) { + self.rendererNV12 = [RTC_OBJC_TYPE(RTCMTLVideoView) createNV12Renderer]; + if (![self.rendererNV12 addRenderingDestination:self.metalView]) { + self.rendererNV12 = nil; + RTCLogError(@"Failed to create NV12 renderer"); + return; + } + } + renderer = self.rendererNV12; + } + } else { + if (!self.rendererI420) { + self.rendererI420 = [RTC_OBJC_TYPE(RTCMTLVideoView) createI420Renderer]; + if (![self.rendererI420 addRenderingDestination:self.metalView]) { + self.rendererI420 = nil; + RTCLogError(@"Failed to create I420 renderer"); + return; + } + } + renderer = self.rendererI420; + } + + renderer.rotationOverride = self.rotationOverride; + + [renderer drawFrame:videoFrame]; + self.lastFrameTimeNs = videoFrame.timeStampNs; +} + +- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { +} + +#pragma mark - + +- (void)setRotationOverride:(NSValue *)rotationOverride { + _rotationOverride = rotationOverride; + + self.metalView.drawableSize = [self drawableSize]; + [self setNeedsLayout]; +} + +- (RTCVideoRotation)frameRotation { + if (self.rotationOverride) { + RTCVideoRotation rotation; + if (@available(iOS 11, *)) { + [self.rotationOverride getValue:&rotation size:sizeof(rotation)]; + } else { + [self.rotationOverride getValue:&rotation]; + } + return rotation; + } + + return self.videoFrame.rotation; +} + +- (CGSize)drawableSize { + // Flip width/height if the rotations are not the same. + CGSize videoFrameSize = self.videoFrameSize; + RTCVideoRotation frameRotation = [self frameRotation]; + + BOOL useLandscape = + (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180); + BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || + (self.videoFrame.rotation == RTCVideoRotation_180); + + if (useLandscape == sizeIsLandscape) { + return videoFrameSize; + } else { + return CGSizeMake(videoFrameSize.height, videoFrameSize.width); + } +} + +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) + +- (void)setSize:(CGSize)size { + __weak RTC_OBJC_TYPE(RTCMTLVideoView) *weakSelf = self; + dispatch_async(dispatch_get_main_queue(), ^{ + RTC_OBJC_TYPE(RTCMTLVideoView) *strongSelf = weakSelf; + + strongSelf.videoFrameSize = size; + CGSize drawableSize = [strongSelf drawableSize]; + + strongSelf.metalView.drawableSize = drawableSize; + [strongSelf setNeedsLayout]; + [strongSelf.delegate videoView:self didChangeVideoSize:size]; + }); +} + +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + if (!self.isEnabled) { + return; + } + + if (frame == nil) { + RTCLogInfo(@"Incoming frame is nil. Exiting render callback."); + return; + } + self.videoFrame = frame; +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h new file mode 100644 index 0000000000..71a073ab21 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h @@ -0,0 +1,23 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCVideoViewShading.h" + +NS_ASSUME_NONNULL_BEGIN + +/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView + * and RTCEAGLVideoView if no external shader is specified. This shader will render + * the video in a rectangle without any color or geometric transformations. + */ +@interface RTCDefaultShader : NSObject + +@end + +NS_ASSUME_NONNULL_END diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm new file mode 100644 index 0000000000..9d686f625c --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm @@ -0,0 +1,201 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCDefaultShader.h" + +#import + +#import "RTCOpenGLDefines.h" +#import "RTCShader.h" +#import "base/RTCLogging.h" + +#include "absl/types/optional.h" + +static const int kYTextureUnit = 0; +static const int kUTextureUnit = 1; +static const int kVTextureUnit = 2; +static const int kUvTextureUnit = 1; + +// Fragment shader converts YUV values from input textures into a final RGB +// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php. +static const char kI420FragmentShaderSource[] = + SHADER_VERSION + "precision highp float;" + FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" + "uniform lowp sampler2D s_textureY;\n" + "uniform lowp sampler2D s_textureU;\n" + "uniform lowp sampler2D s_textureV;\n" + FRAGMENT_SHADER_OUT + "void main() {\n" + " float y, u, v, r, g, b;\n" + " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" + " u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n" + " v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n" + " u = u - 0.5;\n" + " v = v - 0.5;\n" + " r = y + 1.403 * v;\n" + " g = y - 0.344 * u - 0.714 * v;\n" + " b = y + 1.770 * u;\n" + " " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n" + " }\n"; + +static const char kNV12FragmentShaderSource[] = + SHADER_VERSION + "precision mediump float;" + FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" + "uniform lowp sampler2D s_textureY;\n" + "uniform lowp sampler2D s_textureUV;\n" + FRAGMENT_SHADER_OUT + "void main() {\n" + " mediump float y;\n" + " mediump vec2 uv;\n" + " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" + " uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n" + " vec2(0.5, 0.5);\n" + " " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n" + " y - 0.344 * uv.x - 0.714 * uv.y,\n" + " y + 1.770 * uv.x,\n" + " 1.0);\n" + " }\n"; + +@implementation RTCDefaultShader { + GLuint _vertexBuffer; + GLuint _vertexArray; + // Store current rotation and only upload new vertex data when rotation changes. + absl::optional _currentRotation; + + GLuint _i420Program; + GLuint _nv12Program; +} + +- (void)dealloc { + glDeleteProgram(_i420Program); + glDeleteProgram(_nv12Program); + glDeleteBuffers(1, &_vertexBuffer); + glDeleteVertexArrays(1, &_vertexArray); +} + +- (BOOL)createAndSetupI420Program { + NSAssert(!_i420Program, @"I420 program already created"); + _i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource); + if (!_i420Program) { + return NO; + } + GLint ySampler = glGetUniformLocation(_i420Program, "s_textureY"); + GLint uSampler = glGetUniformLocation(_i420Program, "s_textureU"); + GLint vSampler = glGetUniformLocation(_i420Program, "s_textureV"); + + if (ySampler < 0 || uSampler < 0 || vSampler < 0) { + RTCLog(@"Failed to get uniform variable locations in I420 shader"); + glDeleteProgram(_i420Program); + _i420Program = 0; + return NO; + } + + glUseProgram(_i420Program); + glUniform1i(ySampler, kYTextureUnit); + glUniform1i(uSampler, kUTextureUnit); + glUniform1i(vSampler, kVTextureUnit); + + return YES; +} + +- (BOOL)createAndSetupNV12Program { + NSAssert(!_nv12Program, @"NV12 program already created"); + _nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource); + if (!_nv12Program) { + return NO; + } + GLint ySampler = glGetUniformLocation(_nv12Program, "s_textureY"); + GLint uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV"); + + if (ySampler < 0 || uvSampler < 0) { + RTCLog(@"Failed to get uniform variable locations in NV12 shader"); + glDeleteProgram(_nv12Program); + _nv12Program = 0; + return NO; + } + + glUseProgram(_nv12Program); + glUniform1i(ySampler, kYTextureUnit); + glUniform1i(uvSampler, kUvTextureUnit); + + return YES; +} + +- (BOOL)prepareVertexBufferWithRotation:(RTCVideoRotation)rotation { + if (!_vertexBuffer && !RTCCreateVertexBuffer(&_vertexBuffer, &_vertexArray)) { + RTCLog(@"Failed to setup vertex buffer"); + return NO; + } + + glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); + if (!_currentRotation || rotation != *_currentRotation) { + _currentRotation = absl::optional(rotation); + RTCSetVertexData(*_currentRotation); + } + return YES; +} + +- (void)applyShadingForFrameWithWidth:(int)width + height:(int)height + rotation:(RTCVideoRotation)rotation + yPlane:(GLuint)yPlane + uPlane:(GLuint)uPlane + vPlane:(GLuint)vPlane { + if (![self prepareVertexBufferWithRotation:rotation]) { + return; + } + + if (!_i420Program && ![self createAndSetupI420Program]) { + RTCLog(@"Failed to setup I420 program"); + return; + } + + glUseProgram(_i420Program); + + glActiveTexture(static_cast(GL_TEXTURE0 + kYTextureUnit)); + glBindTexture(GL_TEXTURE_2D, yPlane); + + glActiveTexture(static_cast(GL_TEXTURE0 + kUTextureUnit)); + glBindTexture(GL_TEXTURE_2D, uPlane); + + glActiveTexture(static_cast(GL_TEXTURE0 + kVTextureUnit)); + glBindTexture(GL_TEXTURE_2D, vPlane); + + glDrawArrays(GL_TRIANGLE_FAN, 0, 4); +} + +- (void)applyShadingForFrameWithWidth:(int)width + height:(int)height + rotation:(RTCVideoRotation)rotation + yPlane:(GLuint)yPlane + uvPlane:(GLuint)uvPlane { + if (![self prepareVertexBufferWithRotation:rotation]) { + return; + } + + if (!_nv12Program && ![self createAndSetupNV12Program]) { + RTCLog(@"Failed to setup NV12 shader"); + return; + } + + glUseProgram(_nv12Program); + + glActiveTexture(static_cast(GL_TEXTURE0 + kYTextureUnit)); + glBindTexture(GL_TEXTURE_2D, yPlane); + + glActiveTexture(static_cast(GL_TEXTURE0 + kUvTextureUnit)); + glBindTexture(GL_TEXTURE_2D, uvPlane); + + glDrawArrays(GL_TRIANGLE_FAN, 0, 4); +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h new file mode 100644 index 0000000000..b78501e9e6 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h @@ -0,0 +1,24 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen +// refreshes, which should be 30fps. We wrap the display link in order to avoid +// a retain cycle since CADisplayLink takes a strong reference onto its target. +// The timer is paused by default. +@interface RTCDisplayLinkTimer : NSObject + +@property(nonatomic) BOOL isPaused; + +- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler; +- (void)invalidate; + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m new file mode 100644 index 0000000000..906bb898d6 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m @@ -0,0 +1,59 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCDisplayLinkTimer.h" + +#import + +@implementation RTCDisplayLinkTimer { + CADisplayLink *_displayLink; + void (^_timerHandler)(void); +} + +- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler { + NSParameterAssert(timerHandler); + if (self = [super init]) { + _timerHandler = timerHandler; + _displayLink = + [CADisplayLink displayLinkWithTarget:self + selector:@selector(displayLinkDidFire:)]; + _displayLink.paused = YES; +#if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0 + _displayLink.preferredFramesPerSecond = 30; +#else + [_displayLink setFrameInterval:2]; +#endif + [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] + forMode:NSRunLoopCommonModes]; + } + return self; +} + +- (void)dealloc { + [self invalidate]; +} + +- (BOOL)isPaused { + return _displayLink.paused; +} + +- (void)setIsPaused:(BOOL)isPaused { + _displayLink.paused = isPaused; +} + +- (void)invalidate { + [_displayLink invalidate]; +} + +- (void)displayLinkDidFire:(CADisplayLink *)displayLink { + _timerHandler(); +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h new file mode 100644 index 0000000000..24b26cd602 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h @@ -0,0 +1,45 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCVideoRenderer.h" +#import "RTCVideoViewShading.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCEAGLVideoView); + +/** + * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames + * in its bounds using OpenGLES 2.0 or OpenGLES 3.0. + */ +RTC_OBJC_EXPORT +NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.") +@interface RTC_OBJC_TYPE (RTCEAGLVideoView) : UIView + +@property(nonatomic, weak) id delegate; + +- (instancetype)initWithFrame:(CGRect)frame + shader:(id)shader + NS_DESIGNATED_INITIALIZER; + +- (instancetype)initWithCoder:(NSCoder *)aDecoder + shader:(id)shader + NS_DESIGNATED_INITIALIZER; + +/** @abstract Wrapped RTCVideoRotation, or nil. + */ +@property(nonatomic, nullable) NSValue *rotationOverride; +@end + +NS_ASSUME_NONNULL_END diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m new file mode 100644 index 0000000000..89e62d2ce7 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m @@ -0,0 +1,295 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCEAGLVideoView.h" + +#import + +#import "RTCDefaultShader.h" +#import "RTCDisplayLinkTimer.h" +#import "RTCI420TextureCache.h" +#import "RTCNV12TextureCache.h" +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" +#import "base/RTCVideoFrameBuffer.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +// RTC_OBJC_TYPE(RTCEAGLVideoView) wraps a GLKView which is setup with +// enableSetNeedsDisplay = NO for the purpose of gaining control of +// exactly when to call -[GLKView display]. This need for extra +// control is required to avoid triggering method calls on GLKView +// that results in attempting to bind the underlying render buffer +// when the drawable size would be empty which would result in the +// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is +// the method that will trigger the binding of the render +// buffer. Because the standard behaviour of -[UIView setNeedsDisplay] +// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains +// its own `isDirty` flag. + +@interface RTC_OBJC_TYPE (RTCEAGLVideoView) +() + // `videoFrame` is set when we receive a frame from a worker thread and is read + // from the display link callback so atomicity is required. + @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; +@property(nonatomic, readonly) GLKView *glkView; +@end + +@implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { + RTCDisplayLinkTimer *_timer; + EAGLContext *_glContext; + // This flag should only be set and read on the main thread (e.g. by + // setNeedsDisplay) + BOOL _isDirty; + id _shader; + RTCNV12TextureCache *_nv12TextureCache; + RTCI420TextureCache *_i420TextureCache; + // As timestamps should be unique between frames, will store last + // drawn frame timestamp instead of the whole frame to reduce memory usage. + int64_t _lastDrawnFrameTimeStampNs; +} + +@synthesize delegate = _delegate; +@synthesize videoFrame = _videoFrame; +@synthesize glkView = _glkView; +@synthesize rotationOverride = _rotationOverride; + +- (instancetype)initWithFrame:(CGRect)frame { + return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]]; +} + +- (instancetype)initWithCoder:(NSCoder *)aDecoder { + return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]]; +} + +- (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { + if (self = [super initWithFrame:frame]) { + _shader = shader; + if (![self configure]) { + return nil; + } + } + return self; +} + +- (instancetype)initWithCoder:(NSCoder *)aDecoder + shader:(id)shader { + if (self = [super initWithCoder:aDecoder]) { + _shader = shader; + if (![self configure]) { + return nil; + } + } + return self; +} + +- (BOOL)configure { + EAGLContext *glContext = + [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; + if (!glContext) { + glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; + } + if (!glContext) { + RTCLogError(@"Failed to create EAGLContext"); + return NO; + } + _glContext = glContext; + + // GLKView manages a framebuffer for us. + _glkView = [[GLKView alloc] initWithFrame:CGRectZero + context:_glContext]; + _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888; + _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone; + _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone; + _glkView.drawableMultisample = GLKViewDrawableMultisampleNone; + _glkView.delegate = self; + _glkView.layer.masksToBounds = YES; + _glkView.enableSetNeedsDisplay = NO; + [self addSubview:_glkView]; + + // Listen to application state in order to clean up OpenGL before app goes + // away. + NSNotificationCenter *notificationCenter = + [NSNotificationCenter defaultCenter]; + [notificationCenter addObserver:self + selector:@selector(willResignActive) + name:UIApplicationWillResignActiveNotification + object:nil]; + [notificationCenter addObserver:self + selector:@selector(didBecomeActive) + name:UIApplicationDidBecomeActiveNotification + object:nil]; + + // Frames are received on a separate thread, so we poll for current frame + // using a refresh rate proportional to screen refresh frequency. This + // occurs on the main thread. + __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; + _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{ + RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; + [strongSelf displayLinkTimerDidFire]; + }]; + if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) { + [self setupGL]; + } + return YES; +} + +- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { + [super setMultipleTouchEnabled:multipleTouchEnabled]; + _glkView.multipleTouchEnabled = multipleTouchEnabled; +} + +- (void)dealloc { + [[NSNotificationCenter defaultCenter] removeObserver:self]; + UIApplicationState appState = + [UIApplication sharedApplication].applicationState; + if (appState == UIApplicationStateActive) { + [self teardownGL]; + } + [_timer invalidate]; + [self ensureGLContext]; + _shader = nil; + if (_glContext && [EAGLContext currentContext] == _glContext) { + [EAGLContext setCurrentContext:nil]; + } +} + +#pragma mark - UIView + +- (void)setNeedsDisplay { + [super setNeedsDisplay]; + _isDirty = YES; +} + +- (void)setNeedsDisplayInRect:(CGRect)rect { + [super setNeedsDisplayInRect:rect]; + _isDirty = YES; +} + +- (void)layoutSubviews { + [super layoutSubviews]; + _glkView.frame = self.bounds; +} + +#pragma mark - GLKViewDelegate + +// This method is called when the GLKView's content is dirty and needs to be +// redrawn. This occurs on main thread. +- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { + // The renderer will draw the frame to the framebuffer corresponding to the + // one used by `view`. + RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; + if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) { + return; + } + RTCVideoRotation rotation = frame.rotation; + if(_rotationOverride != nil) { + [_rotationOverride getValue: &rotation]; + } + [self ensureGLContext]; + glClear(GL_COLOR_BUFFER_BIT); + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + if (!_nv12TextureCache) { + _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext]; + } + if (_nv12TextureCache) { + [_nv12TextureCache uploadFrameToTextures:frame]; + [_shader applyShadingForFrameWithWidth:frame.width + height:frame.height + rotation:rotation + yPlane:_nv12TextureCache.yTexture + uvPlane:_nv12TextureCache.uvTexture]; + [_nv12TextureCache releaseTextures]; + + _lastDrawnFrameTimeStampNs = self.videoFrame.timeStampNs; + } + } else { + if (!_i420TextureCache) { + _i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext]; + } + [_i420TextureCache uploadFrameToTextures:frame]; + [_shader applyShadingForFrameWithWidth:frame.width + height:frame.height + rotation:rotation + yPlane:_i420TextureCache.yTexture + uPlane:_i420TextureCache.uTexture + vPlane:_i420TextureCache.vTexture]; + + _lastDrawnFrameTimeStampNs = self.videoFrame.timeStampNs; + } +} + +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) + +// These methods may be called on non-main thread. +- (void)setSize:(CGSize)size { + __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; + dispatch_async(dispatch_get_main_queue(), ^{ + RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; + [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size]; + }); +} + +- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + self.videoFrame = frame; +} + +#pragma mark - Private + +- (void)displayLinkTimerDidFire { + // Don't render unless video frame have changed or the view content + // has explicitly been marked dirty. + if (!_isDirty && _lastDrawnFrameTimeStampNs == self.videoFrame.timeStampNs) { + return; + } + + // Always reset isDirty at this point, even if -[GLKView display] + // won't be called in the case the drawable size is empty. + _isDirty = NO; + + // Only call -[GLKView display] if the drawable size is + // non-empty. Calling display will make the GLKView setup its + // render buffer if necessary, but that will fail with error + // GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty. + if (self.bounds.size.width > 0 && self.bounds.size.height > 0) { + [_glkView display]; + } +} + +- (void)setupGL { + [self ensureGLContext]; + glDisable(GL_DITHER); + _timer.isPaused = NO; +} + +- (void)teardownGL { + self.videoFrame = nil; + _timer.isPaused = YES; + [_glkView deleteDrawable]; + [self ensureGLContext]; + _nv12TextureCache = nil; + _i420TextureCache = nil; +} + +- (void)didBecomeActive { + [self setupGL]; +} + +- (void)willResignActive { + [self teardownGL]; +} + +- (void)ensureGLContext { + NSAssert(_glContext, @"context shouldn't be nil"); + if ([EAGLContext currentContext] != _glContext) { + [EAGLContext setCurrentContext:_glContext]; + } +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h new file mode 100644 index 0000000000..9fdcc5a695 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h @@ -0,0 +1,25 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCOpenGLDefines.h" +#import "base/RTCVideoFrame.h" + +@interface RTCI420TextureCache : NSObject + +@property(nonatomic, readonly) GLuint yTexture; +@property(nonatomic, readonly) GLuint uTexture; +@property(nonatomic, readonly) GLuint vTexture; + +- (instancetype)init NS_UNAVAILABLE; +- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER; + +- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm new file mode 100644 index 0000000000..a91e927cb4 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -0,0 +1,149 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCI420TextureCache.h" + +#import + +#import "base/RTCI420Buffer.h" +#import "base/RTCVideoFrameBuffer.h" + +#include + +// Two sets of 3 textures are used here, one for each of the Y, U and V planes. Having two sets +// alleviates CPU blockage in the event that the GPU is asked to render to a texture that is already +// in use. +static const GLsizei kNumTextureSets = 2; +static const GLsizei kNumTexturesPerSet = 3; +static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; + +@implementation RTCI420TextureCache { + BOOL _hasUnpackRowLength; + GLint _currentTextureSet; + // Handles for OpenGL constructs. + GLuint _textures[kNumTextures]; + // Used to create a non-padded plane for GPU upload when we receive padded frames. + std::vector _planeBuffer; +} + +- (GLuint)yTexture { + return _textures[_currentTextureSet * kNumTexturesPerSet]; +} + +- (GLuint)uTexture { + return _textures[_currentTextureSet * kNumTexturesPerSet + 1]; +} + +- (GLuint)vTexture { + return _textures[_currentTextureSet * kNumTexturesPerSet + 2]; +} + +- (instancetype)initWithContext:(GlContextType *)context { + if (self = [super init]) { + _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3); + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + + [self setupTextures]; + } + return self; +} + +- (void)dealloc { + glDeleteTextures(kNumTextures, _textures); +} + +- (void)setupTextures { + glGenTextures(kNumTextures, _textures); + // Set parameters for each of the textures we created. + for (GLsizei i = 0; i < kNumTextures; i++) { + glBindTexture(GL_TEXTURE_2D, _textures[i]); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + } +} + +- (void)uploadPlane:(const uint8_t *)plane + texture:(GLuint)texture + width:(size_t)width + height:(size_t)height + stride:(int32_t)stride { + glBindTexture(GL_TEXTURE_2D, texture); + + const uint8_t *uploadPlane = plane; + if ((size_t)stride != width) { + if (_hasUnpackRowLength) { + // GLES3 allows us to specify stride. + glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); + glTexImage2D(GL_TEXTURE_2D, + 0, + RTC_PIXEL_FORMAT, + static_cast(width), + static_cast(height), + 0, + RTC_PIXEL_FORMAT, + GL_UNSIGNED_BYTE, + uploadPlane); + glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); + return; + } else { + // Make an unpadded copy and upload that instead. Quick profiling showed + // that this is faster than uploading row by row using glTexSubImage2D. + uint8_t *unpaddedPlane = _planeBuffer.data(); + for (size_t y = 0; y < height; ++y) { + memcpy(unpaddedPlane + y * width, plane + y * stride, width); + } + uploadPlane = unpaddedPlane; + } + } + glTexImage2D(GL_TEXTURE_2D, + 0, + RTC_PIXEL_FORMAT, + static_cast(width), + static_cast(height), + 0, + RTC_PIXEL_FORMAT, + GL_UNSIGNED_BYTE, + uploadPlane); +} + +- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; + + id buffer = [frame.buffer toI420]; + + const int chromaWidth = buffer.chromaWidth; + const int chromaHeight = buffer.chromaHeight; + if (buffer.strideY != frame.width || buffer.strideU != chromaWidth || + buffer.strideV != chromaWidth) { + _planeBuffer.resize(buffer.width * buffer.height); + } + + [self uploadPlane:buffer.dataY + texture:self.yTexture + width:buffer.width + height:buffer.height + stride:buffer.strideY]; + + [self uploadPlane:buffer.dataU + texture:self.uTexture + width:chromaWidth + height:chromaHeight + stride:buffer.strideU]; + + [self uploadPlane:buffer.dataV + texture:self.vTexture + width:chromaWidth + height:chromaHeight + stride:buffer.strideV]; +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h new file mode 100644 index 0000000000..f202b836b5 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h @@ -0,0 +1,33 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "base/RTCMacros.h" + +@class RTC_OBJC_TYPE(RTCVideoFrame); + +NS_ASSUME_NONNULL_BEGIN + +@interface RTCNV12TextureCache : NSObject + +@property(nonatomic, readonly) GLuint yTexture; +@property(nonatomic, readonly) GLuint uvTexture; + +- (instancetype)init NS_UNAVAILABLE; +- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER; + +- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +- (void)releaseTextures; + +@end + +NS_ASSUME_NONNULL_END diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m new file mode 100644 index 0000000000..a520ac45b4 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m @@ -0,0 +1,113 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCNV12TextureCache.h" + +#import "base/RTCVideoFrame.h" +#import "base/RTCVideoFrameBuffer.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +@implementation RTCNV12TextureCache { + CVOpenGLESTextureCacheRef _textureCache; + CVOpenGLESTextureRef _yTextureRef; + CVOpenGLESTextureRef _uvTextureRef; +} + +- (GLuint)yTexture { + return CVOpenGLESTextureGetName(_yTextureRef); +} + +- (GLuint)uvTexture { + return CVOpenGLESTextureGetName(_uvTextureRef); +} + +- (instancetype)initWithContext:(EAGLContext *)context { + if (self = [super init]) { + CVReturn ret = CVOpenGLESTextureCacheCreate( + kCFAllocatorDefault, NULL, +#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API + context, +#else + (__bridge void *)context, +#endif + NULL, &_textureCache); + if (ret != kCVReturnSuccess) { + self = nil; + } + } + return self; +} + +- (BOOL)loadTexture:(CVOpenGLESTextureRef *)textureOut + pixelBuffer:(CVPixelBufferRef)pixelBuffer + planeIndex:(int)planeIndex + pixelFormat:(GLenum)pixelFormat { + const int width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex); + const int height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex); + + if (*textureOut) { + CFRelease(*textureOut); + *textureOut = nil; + } + CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, pixelFormat, width, + height, pixelFormat, GL_UNSIGNED_BYTE, planeIndex, textureOut); + if (ret != kCVReturnSuccess) { + if (*textureOut) { + CFRelease(*textureOut); + *textureOut = nil; + } + return NO; + } + NSAssert(CVOpenGLESTextureGetTarget(*textureOut) == GL_TEXTURE_2D, + @"Unexpected GLES texture target"); + glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(*textureOut)); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + return YES; +} + +- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]], + @"frame must be CVPixelBuffer backed"); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer; + return [self loadTexture:&_yTextureRef + pixelBuffer:pixelBuffer + planeIndex:0 + pixelFormat:GL_LUMINANCE] && + [self loadTexture:&_uvTextureRef + pixelBuffer:pixelBuffer + planeIndex:1 + pixelFormat:GL_LUMINANCE_ALPHA]; +} + +- (void)releaseTextures { + if (_uvTextureRef) { + CFRelease(_uvTextureRef); + _uvTextureRef = nil; + } + if (_yTextureRef) { + CFRelease(_yTextureRef); + _yTextureRef = nil; + } +} + +- (void)dealloc { + [self releaseTextures]; + if (_textureCache) { + CFRelease(_textureCache); + _textureCache = nil; + } +} + +@end diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h new file mode 100644 index 0000000000..d84d992278 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h @@ -0,0 +1,23 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#define RTC_PIXEL_FORMAT GL_LUMINANCE +#define SHADER_VERSION +#define VERTEX_SHADER_IN "attribute" +#define VERTEX_SHADER_OUT "varying" +#define FRAGMENT_SHADER_IN "varying" +#define FRAGMENT_SHADER_OUT +#define FRAGMENT_SHADER_COLOR "gl_FragColor" +#define FRAGMENT_SHADER_TEXTURE "texture2D" + +@class EAGLContext; +typedef EAGLContext GlContextType; diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h new file mode 100644 index 0000000000..d1b91fb643 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h @@ -0,0 +1,21 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "base/RTCVideoFrame.h" + +RTC_EXTERN const char kRTCVertexShaderSource[]; + +RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar* source); +RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader); +RTC_EXTERN GLuint +RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]); +RTC_EXTERN BOOL RTCCreateVertexBuffer(GLuint* vertexBuffer, + GLuint* vertexArray); +RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation); diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm new file mode 100644 index 0000000000..25f6eee34e --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm @@ -0,0 +1,178 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCShader.h" + +#import + +#include +#include +#include + +#import "RTCOpenGLDefines.h" + +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +// Vertex shader doesn't do anything except pass coordinates through. +const char kRTCVertexShaderSource[] = + SHADER_VERSION + VERTEX_SHADER_IN " vec2 position;\n" + VERTEX_SHADER_IN " vec2 texcoord;\n" + VERTEX_SHADER_OUT " vec2 v_texcoord;\n" + "void main() {\n" + " gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n" + " v_texcoord = texcoord;\n" + "}\n"; + +// Compiles a shader of the given `type` with GLSL source `source` and returns +// the shader handle or 0 on error. +GLuint RTCCreateShader(GLenum type, const GLchar *source) { + GLuint shader = glCreateShader(type); + if (!shader) { + return 0; + } + glShaderSource(shader, 1, &source, NULL); + glCompileShader(shader); + GLint compileStatus = GL_FALSE; + glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus); + if (compileStatus == GL_FALSE) { + GLint logLength = 0; + // The null termination character is included in the returned log length. + glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) { + std::unique_ptr compileLog(new char[logLength]); + // The returned string is null terminated. + glGetShaderInfoLog(shader, logLength, NULL, compileLog.get()); + RTC_LOG(LS_ERROR) << "Shader compile error: " << compileLog.get(); + } + glDeleteShader(shader); + shader = 0; + } + return shader; +} + +// Links a shader program with the given vertex and fragment shaders and +// returns the program handle or 0 on error. +GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) { + if (vertexShader == 0 || fragmentShader == 0) { + return 0; + } + GLuint program = glCreateProgram(); + if (!program) { + return 0; + } + glAttachShader(program, vertexShader); + glAttachShader(program, fragmentShader); + glLinkProgram(program); + GLint linkStatus = GL_FALSE; + glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); + if (linkStatus == GL_FALSE) { + glDeleteProgram(program); + program = 0; + } + return program; +} + +// Creates and links a shader program with the given fragment shader source and +// a plain vertex shader. Returns the program handle or 0 on error. +GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) { + GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource); + RTC_CHECK(vertexShader) << "failed to create vertex shader"; + GLuint fragmentShader = + RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource); + RTC_CHECK(fragmentShader) << "failed to create fragment shader"; + GLuint program = RTCCreateProgram(vertexShader, fragmentShader); + // Shaders are created only to generate program. + if (vertexShader) { + glDeleteShader(vertexShader); + } + if (fragmentShader) { + glDeleteShader(fragmentShader); + } + + // Set vertex shader variables 'position' and 'texcoord' in program. + GLint position = glGetAttribLocation(program, "position"); + GLint texcoord = glGetAttribLocation(program, "texcoord"); + if (position < 0 || texcoord < 0) { + glDeleteProgram(program); + return 0; + } + + // Read position attribute with size of 2 and stride of 4 beginning at the start of the array. The + // last argument indicates offset of data within the vertex buffer. + glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0); + glEnableVertexAttribArray(position); + + // Read texcoord attribute with size of 2 and stride of 4 beginning at the first texcoord in the + // array. The last argument indicates offset of data within the vertex buffer. + glVertexAttribPointer( + texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)(2 * sizeof(GLfloat))); + glEnableVertexAttribArray(texcoord); + + return program; +} + +BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) { + glGenBuffers(1, vertexBuffer); + if (*vertexBuffer == 0) { + glDeleteVertexArrays(1, vertexArray); + return NO; + } + glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer); + glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW); + return YES; +} + +// Set vertex data to the currently bound vertex buffer. +void RTCSetVertexData(RTCVideoRotation rotation) { + // When modelview and projection matrices are identity (default) the world is + // contained in the square around origin with unit size 2. Drawing to these + // coordinates is equivalent to drawing to the entire screen. The texture is + // stretched over that square using texture coordinates (u, v) that range + // from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically + // here because the incoming frame has origin in upper left hand corner but + // OpenGL expects origin in bottom left corner. + std::array, 4> UVCoords = {{ + {{0, 1}}, // Lower left. + {{1, 1}}, // Lower right. + {{1, 0}}, // Upper right. + {{0, 0}}, // Upper left. + }}; + + // Rotate the UV coordinates. + int rotation_offset; + switch (rotation) { + case RTCVideoRotation_0: + rotation_offset = 0; + break; + case RTCVideoRotation_90: + rotation_offset = 1; + break; + case RTCVideoRotation_180: + rotation_offset = 2; + break; + case RTCVideoRotation_270: + rotation_offset = 3; + break; + } + std::rotate(UVCoords.begin(), UVCoords.begin() + rotation_offset, + UVCoords.end()); + + const GLfloat gVertices[] = { + // X, Y, U, V. + -1, -1, UVCoords[0][0], UVCoords[0][1], + 1, -1, UVCoords[1][0], UVCoords[1][1], + 1, 1, UVCoords[2][0], UVCoords[2][1], + -1, 1, UVCoords[3][0], UVCoords[3][1], + }; + + glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(gVertices), gVertices); +} diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h new file mode 100644 index 0000000000..9df30a8fa0 --- /dev/null +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h @@ -0,0 +1,39 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCVideoFrame.h" + +NS_ASSUME_NONNULL_BEGIN + +/** + * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders + * used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView. + */ +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCVideoViewShading) + + /** Callback for I420 frames. Each plane is given as a texture. */ + - (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation + : (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane + : (GLuint)vPlane; + +/** Callback for NV12 frames. Each plane is given as a texture. */ +- (void)applyShadingForFrameWithWidth:(int)width + height:(int)height + rotation:(RTCVideoRotation)rotation + yPlane:(GLuint)yPlane + uvPlane:(GLuint)uvPlane; + +@end + +NS_ASSUME_NONNULL_END -- cgit v1.2.3