From a4888f01a4fb532a1fbc25cd6bb5a42299dabb76 Mon Sep 17 00:00:00 2001 From: JT Teh Date: Wed, 30 May 2018 16:45:36 +0000 Subject: [PATCH] Revert "Metal rendering should account for cropping." MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit fc4a9c933326cac2eb048eb507e63021c75e705e. Reason for revert: Remote video is not showing in a video call. Original change's description: > Metal rendering should account for cropping. > > Also: > - added a rotation override to allow ignoring frame rotation > - fixed a couple of minor issues > - made it possible to run the MTKView without the DisplayLink > > Bug: webrtc:9301 > Change-Id: Ia83c152d9b6d45d56ceb80d287b5d3eacfaebddd > Reviewed-on: https://webrtc-review.googlesource.com/78282 > Reviewed-by: Kári Helgason > Reviewed-by: Anders Carlsson > Commit-Queue: Peter Hanspers > Cr-Commit-Position: refs/heads/master@{#23452} TBR=andersc@webrtc.org,kthelgason@webrtc.org,peterhanspers@webrtc.org Change-Id: Iddf7793368531d2d7268c1ec138bb3a9874a4ab7 No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: webrtc:9301 Reviewed-on: https://webrtc-review.googlesource.com/80020 Reviewed-by: JT Teh Commit-Queue: JT Teh Cr-Commit-Position: refs/heads/master@{#23455} --- .../objc/AppRTCMobile/ios/ARDVideoCallView.m | 4 +- .../Classes/Metal/RTCMTLI420Renderer.mm | 4 +- .../Classes/Metal/RTCMTLNV12Renderer.mm | 4 +- .../Classes/Metal/RTCMTLRGBRenderer.mm | 11 +- .../Framework/Classes/Metal/RTCMTLRenderer.h | 8 +- .../Framework/Classes/Metal/RTCMTLRenderer.mm | 159 +++++------------ .../Framework/Classes/Metal/RTCMTLVideoView.m | 161 +++++------------- .../Headers/WebRTC/RTCMTLVideoView.h | 17 +- 8 files changed, 99 insertions(+), 269 deletions(-) diff --git a/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m b/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m index be1d3de638..2241930d47 100644 --- a/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m +++ b/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m @@ -43,9 +43,7 @@ static CGFloat const kStatusBarHeight = 20; if (self = [super initWithFrame:frame]) { #if defined(RTC_SUPPORTS_METAL) - RTCMTLVideoView *metalView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero]; - metalView.useDisplayLink = NO; - _remoteVideoView = metalView; + _remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero]; #else RTCEAGLVideoView *remoteView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero]; remoteView.delegate = self; diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm index 03a381fc51..ae5525889e 100644 --- a/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm +++ b/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm @@ -90,9 +90,7 @@ static NSString *const shaderSource = MTL_STRINGIFY( } - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { - if (![super setupTexturesForFrame:frame]) { - return NO; - } + [super setupTexturesForFrame:frame]; id device = [self currentMetalDevice]; if (!device) { diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm index 7ba3393d50..d8dd7e71fc 100644 --- a/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm +++ b/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm @@ -89,9 +89,7 @@ static NSString *const shaderSource = MTL_STRINGIFY( - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]); - if ([super setupTexturesForFrame:frame] == NO) { - return NO; - } + [super setupTexturesForFrame:frame]; CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer; id lumaTexture = nil; diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm index b5190f77a7..e7358793b9 100644 --- a/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm +++ b/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm @@ -88,9 +88,7 @@ static NSString *const shaderSource = MTL_STRINGIFY( - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]); - if ([super setupTexturesForFrame:frame] == NO) { - return NO; - } + [super setupTexturesForFrame:frame]; CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer; id gpuTexture = nil; @@ -123,9 +121,10 @@ static NSString *const shaderSource = MTL_STRINGIFY( if (gpuTexture != nil) { _texture = gpuTexture; - _uniformsBuffer = [[self currentMetalDevice] newBufferWithBytes:&isARGB - length:sizeof(isARGB) - options:MTLResourceStorageModePrivate]; + _uniformsBuffer = + [[self currentMetalDevice] newBufferWithBytes:&isARGB + length:sizeof(isARGB) + options:MTLResourceCPUCacheModeDefaultCache]; return YES; } diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h index 244736d26c..e279b098b5 100644 --- a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h +++ b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h @@ -46,16 +46,10 @@ NS_ASSUME_NONNULL_BEGIN @end /** - * Implementation of RTCMTLRenderer protocol. + * Implementation of RTCMTLRenderer protocol for rendering native nv12 video frames. */ NS_AVAILABLE(10_11, 9_0) @interface RTCMTLRenderer : NSObject - -/** @abstract A wrapped RTCVideoRotation, or nil. - @discussion When not nil, the frame rotation is ignored when rendering. - */ -@property(atomic, nullable) NSValue *rotationOverride; - @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm index 44b63de0c7..68486dcfd0 100644 --- a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm +++ b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm @@ -15,7 +15,6 @@ #import "WebRTC/RTCLogging.h" #import "WebRTC/RTCVideoFrame.h" -#import "WebRTC/RTCVideoFrameBuffer.h" #include "api/video/video_rotation.h" #include "rtc_base/checks.h" @@ -29,57 +28,31 @@ static NSString *const commandBufferLabel = @"RTCCommandBuffer"; static NSString *const renderEncoderLabel = @"RTCEncoder"; static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame"; -// Computes the texture coordinates given rotation and cropping. -static inline void getCubeVertexData(int cropX, - int cropY, - int cropWidth, - int cropHeight, - size_t frameWidth, - size_t frameHeight, - RTCVideoRotation rotation, - float *buffer) { - // The computed values are the adjusted texture coordinates, in [0..1]. - // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the - // left/top edge. - // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the - // right/bottom edge (i.e. keeping 80%). - float cropLeft = cropX / (float)frameWidth; - float cropRight = (cropX + cropWidth) / (float)frameWidth; - float cropTop = cropY / (float)frameHeight; - float cropBottom = (cropY + cropHeight) / (float)frameHeight; +static const float cubeVertexData[64] = { + -1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, - // These arrays map the view coordinates to texture coordinates, taking cropping and rotation - // into account. The first two columns are view coordinates, the last two are texture coordinates. + // rotation = 90, offset = 16. + -1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, + + // rotation = 180, offset = 32. + -1.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, 0.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, + + // rotation = 270, offset = 48. + -1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, +}; + +static inline int offsetForRotation(RTCVideoRotation rotation) { switch (rotation) { - case RTCVideoRotation_0: { - float values[16] = {-1.0, -1.0, cropLeft, cropBottom, - 1.0, -1.0, cropRight, cropBottom, - -1.0, 1.0, cropLeft, cropTop, - 1.0, 1.0, cropRight, cropTop}; - memcpy(buffer, &values, sizeof(values)); - } break; - case RTCVideoRotation_90: { - float values[16] = {-1.0, -1.0, cropRight, cropBottom, - 1.0, -1.0, cropRight, cropTop, - -1.0, 1.0, cropLeft, cropBottom, - 1.0, 1.0, cropLeft, cropTop}; - memcpy(buffer, &values, sizeof(values)); - } break; - case RTCVideoRotation_180: { - float values[16] = {-1.0, -1.0, cropRight, cropTop, - 1.0, -1.0, cropLeft, cropTop, - -1.0, 1.0, cropRight, cropBottom, - 1.0, 1.0, cropLeft, cropBottom}; - memcpy(buffer, &values, sizeof(values)); - } break; - case RTCVideoRotation_270: { - float values[16] = {-1.0, -1.0, cropLeft, cropTop, - 1.0, -1.0, cropLeft, cropBottom, - -1.0, 1.0, cropRight, cropTop, - 1.0, 1.0, cropRight, cropBottom}; - memcpy(buffer, &values, sizeof(values)); - } break; + case RTCVideoRotation_0: + return 0; + case RTCVideoRotation_90: + return 16; + case RTCVideoRotation_180: + return 32; + case RTCVideoRotation_270: + return 48; } + return 0; } // The max number of command buffers in flight (submitted to GPU). @@ -102,20 +75,14 @@ static const NSInteger kMaxInflightBuffers = 1; // Buffers. id _vertexBuffer; - // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation. - size_t _oldFrameWidth; - size_t _oldFrameHeight; - int _oldCropWidth; - int _oldCropHeight; - int _oldCropX; - int _oldCropY; - RTCVideoRotation _oldRotation; + // RTC Frame parameters. + int _offset; } -@synthesize rotationOverride = _rotationOverride; - - (instancetype)init { if (self = [super init]) { + // _offset of 0 is equal to rotation of 0. + _offset = 0; _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers); } @@ -131,22 +98,13 @@ static const NSInteger kMaxInflightBuffers = 1; - (BOOL)setupWithView:(__kindof MTKView *)view { BOOL success = NO; if ([self setupMetal]) { - _view = view; - view.device = _device; - view.preferredFramesPerSecond = 30; - view.autoResizeDrawable = NO; - - float vertexBufferArray[16] = {0}; - _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray - length:sizeof(vertexBufferArray) - options:MTLResourceCPUCacheModeWriteCombined]; - + [self setupView:view]; [self loadAssets]; + [self setupBuffers]; success = YES; } return success; } - #pragma mark - Inheritance - (id)currentMetalDevice { @@ -163,47 +121,7 @@ static const NSInteger kMaxInflightBuffers = 1; } - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { - // Apply rotation override if set. - RTCVideoRotation rotation; - NSValue *rotationOverride = self.rotationOverride; - if (rotationOverride) { -#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0) - if (@available(iOS 11, *)) { - [rotationOverride getValue:&rotation size:sizeof(rotation)]; - } else -#endif - { - [rotationOverride getValue:&rotation]; - } - } else { - rotation = frame.rotation; - } - - RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer; - size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer); - size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer); - - // Recompute the texture cropping and recreate vertexBuffer if necessary. - if (pixelBuffer.cropX != _oldCropX || pixelBuffer.cropY != _oldCropY || - pixelBuffer.cropWidth != _oldCropWidth || pixelBuffer.cropHeight != _oldCropHeight || - rotation != _oldRotation || frameWidth != _oldFrameWidth || frameHeight != _oldFrameHeight) { - getCubeVertexData(pixelBuffer.cropX, - pixelBuffer.cropY, - pixelBuffer.cropWidth, - pixelBuffer.cropHeight, - frameWidth, - frameHeight, - rotation, - (float *)_vertexBuffer.contents); - _oldCropX = pixelBuffer.cropX; - _oldCropY = pixelBuffer.cropY; - _oldCropWidth = pixelBuffer.cropWidth; - _oldCropHeight = pixelBuffer.cropHeight; - _oldRotation = rotation; - _oldFrameWidth = frameWidth; - _oldFrameHeight = frameHeight; - } - + _offset = offsetForRotation(frame.rotation); return YES; } @@ -240,6 +158,16 @@ static const NSInteger kMaxInflightBuffers = 1; return YES; } +- (void)setupView:(__kindof MTKView *)view { + view.device = _device; + + view.preferredFramesPerSecond = 30; + view.autoResizeDrawable = NO; + + // We need to keep reference to the view as it's needed down the rendering pipeline. + _view = view; +} + - (void)loadAssets { id vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName]; id fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName]; @@ -258,6 +186,12 @@ static const NSInteger kMaxInflightBuffers = 1; } } +- (void)setupBuffers { + _vertexBuffer = [_device newBufferWithBytes:cubeVertexData + length:sizeof(cubeVertexData) + options:MTLResourceOptionCPUCacheModeDefault]; +} + - (void)render { // Wait until the inflight (curently sent to GPU) command buffer // has completed the GPU work. @@ -281,8 +215,7 @@ static const NSInteger kMaxInflightBuffers = 1; // Set context state. [renderEncoder pushDebugGroup:renderEncoderDebugGroup]; [renderEncoder setRenderPipelineState:_pipelineState]; - - [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0]; + [renderEncoder setVertexBuffer:_vertexBuffer offset:_offset * sizeof(float) atIndex:0]; [self uploadTexturesToRenderEncoder:renderEncoder]; [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m b/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m index 100b2f077c..08594c8d90 100644 --- a/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m +++ b/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m @@ -29,16 +29,17 @@ #define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") @interface RTCMTLVideoView () -@property(nonatomic) RTCMTLI420Renderer *rendererI420; -@property(nonatomic) RTCMTLNV12Renderer *rendererNV12; -@property(nonatomic) RTCMTLRGBRenderer *rendererRGB; -@property(nonatomic) MTKView *metalView; -@property(atomic) RTCVideoFrame *videoFrame; -@property(nonatomic) CGSize videoFrameSize; -@property(nonatomic) int64_t lastFrameTimeNs; +@property(nonatomic, strong) RTCMTLI420Renderer *rendererI420; +@property(nonatomic, strong) RTCMTLNV12Renderer *rendererNV12; +@property(nonatomic, strong) RTCMTLRGBRenderer *rendererRGB; +@property(nonatomic, strong) MTKView *metalView; +@property(atomic, strong) RTCVideoFrame *videoFrame; @end -@implementation RTCMTLVideoView +@implementation RTCMTLVideoView { + int64_t _lastFrameTimeNs; + CGSize _videoFrameSize; +} @synthesize delegate = _delegate; @synthesize rendererI420 = _rendererI420; @@ -46,11 +47,6 @@ @synthesize rendererRGB = _rendererRGB; @synthesize metalView = _metalView; @synthesize videoFrame = _videoFrame; -@synthesize useDisplayLink = _useDisplayLink; -@synthesize videoFrameSize = _videoFrameSize; -@synthesize lastFrameTimeNs = _lastFrameTimeNs; -@synthesize enabled = _enabled; -@synthesize rotationOverride = _rotationOverride; - (instancetype)initWithFrame:(CGRect)frameRect { self = [super initWithFrame:frameRect]; @@ -68,36 +64,8 @@ return self; } -- (void)setUseDisplayLink:(BOOL)useDisplayLink { - _useDisplayLink = useDisplayLink; - [self updateRunningState]; -} - -- (void)setEnabled:(BOOL)enabled { - _enabled = enabled; - [self updateRunningState]; -} - -- (UIViewContentMode)videoContentMode { - return self.metalView.contentMode; -} - -- (void)setVideoContentMode:(UIViewContentMode)mode { - self.metalView.contentMode = mode; -} - #pragma mark - Private -- (void)updateRunningState { - if (self.useDisplayLink) { - self.metalView.paused = !self.enabled; - self.metalView.enableSetNeedsDisplay = YES; - } else { - self.metalView.paused = YES; - self.metalView.enableSetNeedsDisplay = NO; - } -} - + (BOOL)isMetalAvailable { #if defined(RTC_SUPPORTS_METAL) return MTLCreateSystemDefaultDevice() != nil; @@ -106,6 +74,11 @@ #endif } ++ (MTKView *)createMetalView:(CGRect)frame { + MTKView *view = [[MTKViewClass alloc] initWithFrame:frame]; + return view; +} + + (RTCMTLNV12Renderer *)createNV12Renderer { return [[RTCMTLNV12RendererClass alloc] init]; } @@ -121,28 +94,33 @@ - (void)configure { NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device"); - _enabled = YES; - _useDisplayLink = YES; - [self updateRunningState]; - - self.metalView = [[MTKViewClass alloc] initWithFrame:self.bounds]; - self.metalView.delegate = self; - self.metalView.paused = YES; - self.metalView.enableSetNeedsDisplay = NO; - self.metalView.contentMode = UIViewContentModeScaleAspectFill; - [self addSubview:self.metalView]; - self.videoFrameSize = CGSizeZero; + _metalView = [RTCMTLVideoView createMetalView:self.bounds]; + [self configureMetalView]; } +- (void)configureMetalView { + if (_metalView) { + _metalView.delegate = self; + [self addSubview:_metalView]; + _metalView.contentMode = UIViewContentModeScaleAspectFit; + _videoFrameSize = CGSizeZero; + } +} + +- (void)setVideoContentMode:(UIViewContentMode)mode { + _metalView.contentMode = mode; +} + +#pragma mark - Private + - (void)layoutSubviews { [super layoutSubviews]; - CGRect bounds = self.bounds; - self.metalView.frame = bounds; - if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) { - self.metalView.drawableSize = [self drawableSize]; + _metalView.frame = bounds; + if (!CGSizeEqualToSize(_videoFrameSize, CGSizeZero)) { + _metalView.drawableSize = _videoFrameSize; } else { - self.metalView.drawableSize = bounds.size; + _metalView.drawableSize = bounds.size; } } @@ -152,11 +130,10 @@ NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance."); RTCVideoFrame *videoFrame = self.videoFrame; // Skip rendering if we've already rendered this frame. - if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) { + if (!videoFrame || videoFrame.timeStampNs == _lastFrameTimeNs) { return; } - RTCMTLRenderer *renderer; if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer; const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); @@ -169,7 +146,7 @@ return; } } - renderer = self.rendererRGB; + [self.rendererRGB drawFrame:videoFrame]; } else { if (!self.rendererNV12) { self.rendererNV12 = [RTCMTLVideoView createNV12Renderer]; @@ -179,7 +156,7 @@ return; } } - renderer = self.rendererNV12; + [self.rendererNV12 drawFrame:videoFrame]; } } else { if (!self.rendererI420) { @@ -190,82 +167,30 @@ return; } } - renderer = self.rendererI420; + [self.rendererI420 drawFrame:videoFrame]; } - - renderer.rotationOverride = self.rotationOverride; - - [renderer drawFrame:videoFrame]; - self.lastFrameTimeNs = videoFrame.timeStampNs; + _lastFrameTimeNs = videoFrame.timeStampNs; } - (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { } -- (RTCVideoRotation)frameRotation { - if (self.rotationOverride) { - RTCVideoRotation rotation; -#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0) - if (@available(iOS 11, *)) { - [self.rotationOverride getValue:&rotation size:sizeof(rotation)]; - } else -#endif - { - [self.rotationOverride getValue:&rotation]; - } - return rotation; - } - - return self.videoFrame.rotation; -} - -- (CGSize)drawableSize { - // Flip width/height if the rotations are not the same. - CGSize videoFrameSize = self.videoFrameSize; - - BOOL useLandscape = ([self frameRotation] == RTCVideoRotation_0) || - ([self frameRotation] == RTCVideoRotation_180); - BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || - (self.videoFrame.rotation == RTCVideoRotation_180); - - if (useLandscape == sizeIsLandscape) { - return videoFrameSize; - } else { - return CGSizeMake(videoFrameSize.height, videoFrameSize.width); - } -} - #pragma mark - RTCVideoRenderer - (void)setSize:(CGSize)size { - __weak RTCMTLVideoView *weakSelf = self; + self.metalView.drawableSize = size; dispatch_async(dispatch_get_main_queue(), ^{ - RTCMTLVideoView *strongSelf = weakSelf; - - strongSelf.videoFrameSize = size; - CGSize drawableSize = [strongSelf drawableSize]; - - strongSelf.metalView.drawableSize = drawableSize; - [strongSelf setNeedsLayout]; - [strongSelf.delegate videoView:self didChangeVideoSize:size]; + _videoFrameSize = size; + [self.delegate videoView:self didChangeVideoSize:size]; }); } - (void)renderFrame:(nullable RTCVideoFrame *)frame { - if (!self.isEnabled) { - return; - } - if (frame == nil) { RTCLogInfo(@"Incoming frame is nil. Exiting render callback."); return; } - self.videoFrame = frame; - - if (!self.useDisplayLink) { - [self.metalView draw]; - } } @end diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMTLVideoView.h b/sdk/objc/Framework/Headers/WebRTC/RTCMTLVideoView.h index 4cf09c0bf6..266f2c223f 100644 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMTLVideoView.h +++ b/sdk/objc/Framework/Headers/WebRTC/RTCMTLVideoView.h @@ -10,7 +10,6 @@ #import -#import "WebRTC/RTCVideoFrame.h" #import "WebRTC/RTCVideoRenderer.h" // Check if metal is supported in WebRTC. @@ -36,21 +35,7 @@ RTC_EXPORT @property(nonatomic, weak) id delegate; -@property(nonatomic) UIViewContentMode videoContentMode; - -/** @abstract Enables/disables rendering. - */ -@property(nonatomic, getter=isEnabled) BOOL enabled; - -/** @abstract If YES, the backing MTKView will use a display link to issue - draw calls. - @discussion Default is YES. - */ -@property(nonatomic) BOOL useDisplayLink; - -/** @abstract Wrapped RTCVideoRotation, or nil. - */ -@property(nullable) NSValue* rotationOverride; +- (void)setVideoContentMode:(UIViewContentMode)mode; @end