mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-13 22:00:47 +01:00
ObjC: Add interface for injecting custom shaders in video views
This CL adds a way for external clients to inject their own OpenGL(ES) shaders to RTCEAGLVideoView/RTCNSGLVideoView. The shader interface takes textures as arguments, and not RTCVideoFrame, so that implementations only has to deal with actual OpenGL rendering, and not converting frames into textures. This CL also moves the internal shader code around a bit. The current RTCShader interface with the implementations RTCI420Shader and RTCNativeNV12Shader are removed. RTCEAGLVideoView and RTCNSGLVideoView will be responsible for uploading the frames to textures instead using the helper classes RTCI420TextureCache and RTCNV12TextureCache. They then call the shader implementation with these textures. The rendering code that used to be in RTCI420Shader and RTCNativeNV12Shader have been merged into one RTCDefaultShaderDelegate class. BUG=webrtc:7473 Review-Url: https://codereview.webrtc.org/2869143002 Cr-Commit-Position: refs/heads/master@{#18326}
This commit is contained in:
parent
2cc70facaf
commit
13941912b1
16 changed files with 388 additions and 344 deletions
|
@ -83,11 +83,11 @@ if (is_ios || is_mac) {
|
|||
sources = [
|
||||
"objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.h",
|
||||
"objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm",
|
||||
"objc/Framework/Classes/Video/RTCI420Shader.mm",
|
||||
"objc/Framework/Classes/Video/RTCDefaultShader.h",
|
||||
"objc/Framework/Classes/Video/RTCDefaultShader.mm",
|
||||
"objc/Framework/Classes/Video/RTCI420TextureCache.h",
|
||||
"objc/Framework/Classes/Video/RTCI420TextureCache.mm",
|
||||
"objc/Framework/Classes/Video/RTCOpenGLDefines.h",
|
||||
"objc/Framework/Classes/Video/RTCShader+Private.h",
|
||||
"objc/Framework/Classes/Video/RTCShader.h",
|
||||
"objc/Framework/Classes/Video/RTCShader.mm",
|
||||
"objc/Framework/Classes/Video/avfoundationformatmapper.h",
|
||||
|
@ -102,7 +102,6 @@ if (is_ios || is_mac) {
|
|||
sources += [
|
||||
"objc/Framework/Classes/Video/RTCNV12TextureCache.h",
|
||||
"objc/Framework/Classes/Video/RTCNV12TextureCache.m",
|
||||
"objc/Framework/Classes/Video/RTCNativeNV12Shader.mm",
|
||||
]
|
||||
libs += [
|
||||
"GLKit.framework",
|
||||
|
@ -289,6 +288,7 @@ if (is_ios || is_mac) {
|
|||
"objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCVideoSource.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCVideoViewShading.h",
|
||||
]
|
||||
|
||||
libs = [ "AVFoundation.framework" ]
|
||||
|
@ -421,6 +421,7 @@ if (is_ios || is_mac) {
|
|||
"objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCVideoSource.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCVideoViewShading.h",
|
||||
"objc/Framework/Headers/WebRTC/UIDevice+RTCDevice.h",
|
||||
"objc/Framework/Headers/WebRTC/WebRTC.h",
|
||||
]
|
||||
|
|
|
@ -12,7 +12,9 @@
|
|||
|
||||
#import <GLKit/GLKit.h>
|
||||
|
||||
#import "RTCShader+Private.h"
|
||||
#import "RTCDefaultShader.h"
|
||||
#import "RTCI420TextureCache.h"
|
||||
#import "RTCNV12TextureCache.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
|
@ -97,8 +99,9 @@
|
|||
// This flag should only be set and read on the main thread (e.g. by
|
||||
// setNeedsDisplay)
|
||||
BOOL _isDirty;
|
||||
id<RTCShader> _i420Shader;
|
||||
id<RTCShader> _nv12Shader;
|
||||
id<RTCVideoViewShading> _shader;
|
||||
RTCNV12TextureCache *_nv12TextureCache;
|
||||
RTCI420TextureCache *_i420TextureCache;
|
||||
RTCVideoFrame *_lastDrawnFrame;
|
||||
}
|
||||
|
||||
|
@ -107,14 +110,24 @@
|
|||
@synthesize glkView = _glkView;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame {
|
||||
return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
|
||||
return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTCVideoViewShading>)shader {
|
||||
if (self = [super initWithFrame:frame]) {
|
||||
_shader = shader;
|
||||
[self configure];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder shader:(id<RTCVideoViewShading>)shader {
|
||||
if (self = [super initWithCoder:aDecoder]) {
|
||||
_shader = shader;
|
||||
[self configure];
|
||||
}
|
||||
return self;
|
||||
|
@ -207,22 +220,26 @@
|
|||
}
|
||||
[self ensureGLContext];
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
id<RTCShader> shader = nil;
|
||||
if (frame.nativeHandle) {
|
||||
if (!_nv12Shader) {
|
||||
_nv12Shader = [[RTCNativeNV12Shader alloc] initWithContext:_glContext];
|
||||
if (!_nv12TextureCache) {
|
||||
_nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
|
||||
}
|
||||
shader = _nv12Shader;
|
||||
} else {
|
||||
if (!_i420Shader) {
|
||||
_i420Shader = [[RTCI420Shader alloc] initWithContext:_glContext];
|
||||
if (_nv12TextureCache) {
|
||||
[_nv12TextureCache uploadFrameToTextures:frame];
|
||||
[_shader applyShadingForFrameWithRotation:frame.rotation
|
||||
yPlane:_nv12TextureCache.yTexture
|
||||
uvPlane:_nv12TextureCache.uvTexture];
|
||||
[_nv12TextureCache releaseTextures];
|
||||
}
|
||||
shader = _i420Shader;
|
||||
}
|
||||
if (shader && [shader drawFrame:frame]) {
|
||||
_lastDrawnFrame = frame;
|
||||
} else {
|
||||
RTCLog(@"Failed to draw frame.");
|
||||
if (!_i420TextureCache) {
|
||||
_i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext];
|
||||
}
|
||||
[_i420TextureCache uploadFrameToTextures:frame];
|
||||
[_shader applyShadingForFrameWithRotation:frame.rotation
|
||||
yPlane:_i420TextureCache.yTexture
|
||||
uPlane:_i420TextureCache.uTexture
|
||||
vPlane:_i420TextureCache.vTexture];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -275,8 +292,8 @@
|
|||
_timer.isPaused = YES;
|
||||
[_glkView deleteDrawable];
|
||||
[self ensureGLContext];
|
||||
_i420Shader = nil;
|
||||
_nv12Shader = nil;
|
||||
_nv12TextureCache = nil;
|
||||
_i420TextureCache = nil;
|
||||
}
|
||||
|
||||
- (void)didBecomeActive {
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
#import <CoreVideo/CVDisplayLink.h>
|
||||
#import <OpenGL/gl3.h>
|
||||
|
||||
#import "RTCShader+Private.h"
|
||||
#import "RTCDefaultShader.h"
|
||||
#import "RTCI420TextureCache.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
|
@ -26,7 +27,7 @@
|
|||
// |videoFrame| is set when we receive a frame from a worker thread and is read
|
||||
// from the display link callback so atomicity is required.
|
||||
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||
@property(atomic, strong) id<RTCShader> i420Shader;
|
||||
@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
|
||||
|
||||
- (void)drawFrame;
|
||||
@end
|
||||
|
@ -45,11 +46,25 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
|
|||
@implementation RTCNSGLVideoView {
|
||||
CVDisplayLinkRef _displayLink;
|
||||
RTCVideoFrame *_lastDrawnFrame;
|
||||
id<RTCVideoViewShading> _shader;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
@synthesize videoFrame = _videoFrame;
|
||||
@synthesize i420Shader = _i420Shader;
|
||||
@synthesize i420TextureCache = _i420TextureCache;
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format {
|
||||
return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frame
|
||||
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||
shader:(id<RTCVideoViewShading>)shader {
|
||||
if (self = [super initWithFrame:frame pixelFormat:format]) {
|
||||
_shader = shader;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self teardownDisplayLink];
|
||||
|
@ -85,7 +100,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
|
|||
|
||||
- (void)clearGLContext {
|
||||
[self ensureGLContext];
|
||||
self.i420Shader = nil;
|
||||
self.i420TextureCache = nil;
|
||||
[super clearGLContext];
|
||||
}
|
||||
|
||||
|
@ -118,15 +133,20 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
|
|||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
// Rendering native CVPixelBuffer is not supported on OS X.
|
||||
// TODO(magjed): Add support for NV12 texture cache on OS X.
|
||||
frame = [frame newI420VideoFrame];
|
||||
if (!self.i420Shader) {
|
||||
self.i420Shader = [[RTCI420Shader alloc] initWithContext:context];
|
||||
if (!self.i420TextureCache) {
|
||||
self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context];
|
||||
}
|
||||
if (self.i420Shader && [self.i420Shader drawFrame:frame]) {
|
||||
RTCI420TextureCache *i420TextureCache = self.i420TextureCache;
|
||||
if (i420TextureCache) {
|
||||
[i420TextureCache uploadFrameToTextures:frame];
|
||||
[_shader applyShadingForFrameWithRotation:frame.rotation
|
||||
yPlane:i420TextureCache.yTexture
|
||||
uPlane:i420TextureCache.uTexture
|
||||
vPlane:i420TextureCache.vTexture];
|
||||
[context flushBuffer];
|
||||
_lastDrawnFrame = frame;
|
||||
} else {
|
||||
RTCLog(@"Failed to draw frame.");
|
||||
}
|
||||
CGLUnlockContext([context CGLContextObj]);
|
||||
}
|
||||
|
|
23
webrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
Normal file
23
webrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
Normal file
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "WebRTC/RTCVideoViewShading.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView and
|
||||
* RTCEAGLVideoView if no external shader is specified. This shader will render
|
||||
* the video in a rectangle without any color or geometric transformations.
|
||||
*/
|
||||
@interface RTCDefaultShader : NSObject<RTCVideoViewShading>
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
203
webrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
Normal file
203
webrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
Normal file
|
@ -0,0 +1,203 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCDefaultShader.h"
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <OpenGLES/ES3/gl.h>
|
||||
#else
|
||||
#import <OpenGL/gl3.h>
|
||||
#endif
|
||||
|
||||
#import "RTCOpenGLDefines.h"
|
||||
#import "RTCShader.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
|
||||
#include "webrtc/base/optional.h"
|
||||
|
||||
static const int kYTextureUnit = 0;
|
||||
static const int kUTextureUnit = 1;
|
||||
static const int kVTextureUnit = 2;
|
||||
static const int kUvTextureUnit = 1;
|
||||
|
||||
// Fragment shader converts YUV values from input textures into a final RGB
|
||||
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
|
||||
static const char kI420FragmentShaderSource[] =
|
||||
SHADER_VERSION
|
||||
"precision highp float;"
|
||||
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
|
||||
"uniform lowp sampler2D s_textureY;\n"
|
||||
"uniform lowp sampler2D s_textureU;\n"
|
||||
"uniform lowp sampler2D s_textureV;\n"
|
||||
FRAGMENT_SHADER_OUT
|
||||
"void main() {\n"
|
||||
" float y, u, v, r, g, b;\n"
|
||||
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
|
||||
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
|
||||
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
|
||||
" u = u - 0.5;\n"
|
||||
" v = v - 0.5;\n"
|
||||
" r = y + 1.403 * v;\n"
|
||||
" g = y - 0.344 * u - 0.714 * v;\n"
|
||||
" b = y + 1.770 * u;\n"
|
||||
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
|
||||
" }\n";
|
||||
|
||||
static const char kNV12FragmentShaderSource[] =
|
||||
SHADER_VERSION
|
||||
"precision mediump float;"
|
||||
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
|
||||
"uniform lowp sampler2D s_textureY;\n"
|
||||
"uniform lowp sampler2D s_textureUV;\n"
|
||||
FRAGMENT_SHADER_OUT
|
||||
"void main() {\n"
|
||||
" mediump float y;\n"
|
||||
" mediump vec2 uv;\n"
|
||||
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
|
||||
" uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
|
||||
" vec2(0.5, 0.5);\n"
|
||||
" " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
|
||||
" y - 0.344 * uv.x - 0.714 * uv.y,\n"
|
||||
" y + 1.770 * uv.x,\n"
|
||||
" 1.0);\n"
|
||||
" }\n";
|
||||
|
||||
@implementation RTCDefaultShader {
|
||||
GLuint _vertexBuffer;
|
||||
GLuint _vertexArray;
|
||||
// Store current rotation and only upload new vertex data when rotation changes.
|
||||
rtc::Optional<RTCVideoRotation> _currentRotation;
|
||||
|
||||
GLuint _i420Program;
|
||||
GLuint _nv12Program;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
glDeleteProgram(_i420Program);
|
||||
glDeleteProgram(_nv12Program);
|
||||
glDeleteBuffers(1, &_vertexBuffer);
|
||||
glDeleteVertexArrays(1, &_vertexArray);
|
||||
}
|
||||
|
||||
- (BOOL)createAndSetupI420Program {
|
||||
NSAssert(!_i420Program, @"I420 program already created");
|
||||
_i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource);
|
||||
if (!_i420Program) {
|
||||
return NO;
|
||||
}
|
||||
GLint ySampler = glGetUniformLocation(_i420Program, "s_textureY");
|
||||
GLint uSampler = glGetUniformLocation(_i420Program, "s_textureU");
|
||||
GLint vSampler = glGetUniformLocation(_i420Program, "s_textureV");
|
||||
|
||||
if (ySampler < 0 || uSampler < 0 || vSampler < 0) {
|
||||
RTCLog(@"Failed to get uniform variable locations in I420 shader");
|
||||
glDeleteProgram(_i420Program);
|
||||
_i420Program = 0;
|
||||
return NO;
|
||||
}
|
||||
|
||||
glUseProgram(_i420Program);
|
||||
glUniform1i(ySampler, kYTextureUnit);
|
||||
glUniform1i(uSampler, kUTextureUnit);
|
||||
glUniform1i(vSampler, kVTextureUnit);
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)createAndSetupNV12Program {
|
||||
NSAssert(!_nv12Program, @"NV12 program already created");
|
||||
_nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource);
|
||||
if (!_nv12Program) {
|
||||
return NO;
|
||||
}
|
||||
GLint ySampler = glGetUniformLocation(_nv12Program, "s_textureY");
|
||||
GLint uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV");
|
||||
|
||||
if (ySampler < 0 || uvSampler < 0) {
|
||||
RTCLog(@"Failed to get uniform variable locations in NV12 shader");
|
||||
glDeleteProgram(_nv12Program);
|
||||
_nv12Program = 0;
|
||||
return NO;
|
||||
}
|
||||
|
||||
glUseProgram(_nv12Program);
|
||||
glUniform1i(ySampler, kYTextureUnit);
|
||||
glUniform1i(uvSampler, kUvTextureUnit);
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)prepareVertexBufferWithRotation:(RTCVideoRotation)rotation {
|
||||
if (!_vertexBuffer && !RTCCreateVertexBuffer(&_vertexBuffer, &_vertexArray)) {
|
||||
RTCLog(@"Failed to setup vertex buffer");
|
||||
return NO;
|
||||
}
|
||||
#if !TARGET_OS_IPHONE
|
||||
glBindVertexArray(_vertexArray);
|
||||
#endif
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
if (!_currentRotation || rotation != *_currentRotation) {
|
||||
_currentRotation = rtc::Optional<RTCVideoRotation>(rotation);
|
||||
RTCSetVertexData(*_currentRotation);
|
||||
}
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)applyShadingForFrameWithRotation:(RTCVideoRotation)rotation
|
||||
yPlane:(GLuint)yPlane
|
||||
uPlane:(GLuint)uPlane
|
||||
vPlane:(GLuint)vPlane {
|
||||
if (![self prepareVertexBufferWithRotation:rotation]) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_i420Program && ![self createAndSetupI420Program]) {
|
||||
RTCLog(@"Failed to setup I420 program");
|
||||
return;
|
||||
}
|
||||
|
||||
glUseProgram(_i420Program);
|
||||
|
||||
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
|
||||
glBindTexture(GL_TEXTURE_2D, yPlane);
|
||||
|
||||
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUTextureUnit));
|
||||
glBindTexture(GL_TEXTURE_2D, uPlane);
|
||||
|
||||
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kVTextureUnit));
|
||||
glBindTexture(GL_TEXTURE_2D, vPlane);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||
}
|
||||
|
||||
- (void)applyShadingForFrameWithRotation:(RTCVideoRotation)rotation
|
||||
yPlane:(GLuint)yPlane
|
||||
uvPlane:(GLuint)uvPlane {
|
||||
if (![self prepareVertexBufferWithRotation:rotation]) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_nv12Program && ![self createAndSetupNV12Program]) {
|
||||
RTCLog(@"Failed to setup NV12 shader");
|
||||
return;
|
||||
}
|
||||
|
||||
glUseProgram(_nv12Program);
|
||||
|
||||
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
|
||||
glBindTexture(GL_TEXTURE_2D, yPlane);
|
||||
|
||||
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUvTextureUnit));
|
||||
glBindTexture(GL_TEXTURE_2D, uvPlane);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||
}
|
||||
|
||||
@end
|
|
@ -1,119 +0,0 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCShader.h"
|
||||
|
||||
#import "RTCI420TextureCache.h"
|
||||
#import "RTCShader+Private.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
#include "webrtc/base/optional.h"
|
||||
|
||||
// Fragment shader converts YUV values from input textures into a final RGB
|
||||
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
|
||||
static const char kI420FragmentShaderSource[] =
|
||||
SHADER_VERSION
|
||||
"precision highp float;"
|
||||
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
|
||||
"uniform lowp sampler2D s_textureY;\n"
|
||||
"uniform lowp sampler2D s_textureU;\n"
|
||||
"uniform lowp sampler2D s_textureV;\n"
|
||||
FRAGMENT_SHADER_OUT
|
||||
"void main() {\n"
|
||||
" float y, u, v, r, g, b;\n"
|
||||
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
|
||||
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
|
||||
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
|
||||
" u = u - 0.5;\n"
|
||||
" v = v - 0.5;\n"
|
||||
" r = y + 1.403 * v;\n"
|
||||
" g = y - 0.344 * u - 0.714 * v;\n"
|
||||
" b = y + 1.770 * u;\n"
|
||||
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
|
||||
" }\n";
|
||||
|
||||
@implementation RTCI420Shader {
|
||||
RTCI420TextureCache* textureCache;
|
||||
// Handles for OpenGL constructs.
|
||||
GLuint _i420Program;
|
||||
GLuint _vertexArray;
|
||||
GLuint _vertexBuffer;
|
||||
GLint _ySampler;
|
||||
GLint _uSampler;
|
||||
GLint _vSampler;
|
||||
// Store current rotation and only upload new vertex data when rotation
|
||||
// changes.
|
||||
rtc::Optional<RTCVideoRotation> _currentRotation;
|
||||
}
|
||||
|
||||
- (instancetype)initWithContext:(GlContextType *)context {
|
||||
if (self = [super init]) {
|
||||
textureCache = [[RTCI420TextureCache alloc] initWithContext:context];
|
||||
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||
if (![self setupI420Program] ||
|
||||
!RTCSetupVerticesForProgram(_i420Program, &_vertexBuffer, &_vertexArray)) {
|
||||
RTCLog(@"Failed to initialize RTCI420Shader.");
|
||||
self = nil;
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
glDeleteProgram(_i420Program);
|
||||
glDeleteBuffers(1, &_vertexBuffer);
|
||||
glDeleteVertexArrays(1, &_vertexArray);
|
||||
}
|
||||
|
||||
- (BOOL)setupI420Program {
|
||||
_i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource);
|
||||
if (!_i420Program) {
|
||||
return NO;
|
||||
}
|
||||
_ySampler = glGetUniformLocation(_i420Program, "s_textureY");
|
||||
_uSampler = glGetUniformLocation(_i420Program, "s_textureU");
|
||||
_vSampler = glGetUniformLocation(_i420Program, "s_textureV");
|
||||
|
||||
return (_ySampler >= 0 && _uSampler >= 0 && _vSampler >= 0);
|
||||
}
|
||||
|
||||
- (BOOL)drawFrame:(RTCVideoFrame*)frame {
|
||||
glUseProgram(_i420Program);
|
||||
|
||||
[textureCache uploadFrameToTextures:frame];
|
||||
|
||||
#if !TARGET_OS_IPHONE
|
||||
glBindVertexArray(_vertexArray);
|
||||
#endif
|
||||
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(GL_TEXTURE_2D, textureCache.yTexture);
|
||||
glUniform1i(_ySampler, 0);
|
||||
|
||||
glActiveTexture(GL_TEXTURE1);
|
||||
glBindTexture(GL_TEXTURE_2D, textureCache.uTexture);
|
||||
glUniform1i(_uSampler, 1);
|
||||
|
||||
glActiveTexture(GL_TEXTURE2);
|
||||
glBindTexture(GL_TEXTURE_2D, textureCache.vTexture);
|
||||
glUniform1i(_vSampler, 2);
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
if (!_currentRotation || frame.rotation != *_currentRotation) {
|
||||
_currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation);
|
||||
RTCSetVertexData(*_currentRotation);
|
||||
}
|
||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
@end
|
|
@ -17,7 +17,8 @@
|
|||
@property(nonatomic, readonly) GLuint uTexture;
|
||||
@property(nonatomic, readonly) GLuint vTexture;
|
||||
|
||||
- (instancetype)initWithContext:(GlContextType *)context;
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (void)uploadFrameToTextures:(RTCVideoFrame *)frame;
|
||||
|
||||
|
|
|
@ -10,7 +10,11 @@
|
|||
|
||||
#import "RTCI420TextureCache.h"
|
||||
|
||||
#import "RTCShader+Private.h"
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <OpenGLES/ES3/gl.h>
|
||||
#else
|
||||
#import <OpenGL/gl3.h>
|
||||
#endif
|
||||
|
||||
#include <vector>
|
||||
|
||||
|
|
|
@ -19,7 +19,8 @@ NS_ASSUME_NONNULL_BEGIN
|
|||
@property(nonatomic, readonly) GLuint yTexture;
|
||||
@property(nonatomic, readonly) GLuint uvTexture;
|
||||
|
||||
- (nullable instancetype)initWithContext:(EAGLContext *)context;
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame;
|
||||
|
||||
|
|
|
@ -1,107 +0,0 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCShader.h"
|
||||
|
||||
#import "RTCNV12TextureCache.h"
|
||||
#import "RTCShader+Private.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/base/optional.h"
|
||||
|
||||
static const char kNV12FragmentShaderSource[] =
|
||||
SHADER_VERSION
|
||||
"precision mediump float;"
|
||||
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
|
||||
"uniform lowp sampler2D s_textureY;\n"
|
||||
"uniform lowp sampler2D s_textureUV;\n"
|
||||
FRAGMENT_SHADER_OUT
|
||||
"void main() {\n"
|
||||
" mediump float y;\n"
|
||||
" mediump vec2 uv;\n"
|
||||
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
|
||||
" uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
|
||||
" vec2(0.5, 0.5);\n"
|
||||
" " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
|
||||
" y - 0.344 * uv.x - 0.714 * uv.y,\n"
|
||||
" y + 1.770 * uv.x,\n"
|
||||
" 1.0);\n"
|
||||
" }\n";
|
||||
|
||||
@implementation RTCNativeNV12Shader {
|
||||
GLuint _vertexBuffer;
|
||||
GLuint _nv12Program;
|
||||
GLint _ySampler;
|
||||
GLint _uvSampler;
|
||||
RTCNV12TextureCache *_textureCache;
|
||||
// Store current rotation and only upload new vertex data when rotation
|
||||
// changes.
|
||||
rtc::Optional<RTCVideoRotation> _currentRotation;
|
||||
}
|
||||
|
||||
- (instancetype)initWithContext:(GlContextType *)context {
|
||||
if (self = [super init]) {
|
||||
_textureCache = [[RTCNV12TextureCache alloc] initWithContext:context];
|
||||
if (!_textureCache || ![self setupNV12Program] ||
|
||||
!RTCSetupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) {
|
||||
RTCLog(@"Failed to initialize RTCNativeNV12Shader.");
|
||||
self = nil;
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
glDeleteProgram(_nv12Program);
|
||||
glDeleteBuffers(1, &_vertexBuffer);
|
||||
}
|
||||
|
||||
- (BOOL)setupNV12Program {
|
||||
_nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource);
|
||||
if (!_nv12Program) {
|
||||
return NO;
|
||||
}
|
||||
_ySampler = glGetUniformLocation(_nv12Program, "s_textureY");
|
||||
_uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV");
|
||||
|
||||
return (_ySampler >= 0 && _uvSampler >= 0);
|
||||
}
|
||||
|
||||
- (BOOL)drawFrame:(RTCVideoFrame *)frame {
|
||||
glUseProgram(_nv12Program);
|
||||
if (![_textureCache uploadFrameToTextures:frame]) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
// Y-plane.
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glUniform1i(_ySampler, 0);
|
||||
glBindTexture(GL_TEXTURE_2D, _textureCache.yTexture);
|
||||
|
||||
// UV-plane.
|
||||
glActiveTexture(GL_TEXTURE1);
|
||||
glUniform1i(_uvSampler, 1);
|
||||
glBindTexture(GL_TEXTURE_2D, _textureCache.uvTexture);
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
if (!_currentRotation || frame.rotation != *_currentRotation) {
|
||||
_currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation);
|
||||
RTCSetVertexData(*_currentRotation);
|
||||
}
|
||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||
|
||||
[_textureCache releaseTextures];
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
@end
|
|
@ -1,29 +0,0 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCShader.h"
|
||||
|
||||
#import "WebRTC/RTCMacros.h"
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <OpenGLES/ES3/gl.h>
|
||||
#else
|
||||
#import <OpenGL/gl3.h>
|
||||
#endif
|
||||
|
||||
RTC_EXTERN const char kRTCVertexShaderSource[];
|
||||
|
||||
RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar *source);
|
||||
RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
|
||||
RTC_EXTERN GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
|
||||
RTC_EXTERN BOOL RTCSetupVerticesForProgram(
|
||||
GLuint program, GLuint* vertexBuffer, GLuint* vertexArray);
|
||||
RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation);
|
|
@ -8,38 +8,13 @@
|
|||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import "WebRTC/RTCVideoFrame.h"
|
||||
|
||||
#import "RTCOpenGLDefines.h"
|
||||
RTC_EXTERN const char kRTCVertexShaderSource[];
|
||||
|
||||
@class RTCVideoFrame;
|
||||
|
||||
@protocol RTCShader <NSObject>
|
||||
|
||||
- (BOOL)drawFrame:(RTCVideoFrame *)frame;
|
||||
|
||||
@end
|
||||
|
||||
// Shader for non-native I420 frames.
|
||||
@interface RTCI420Shader : NSObject <RTCShader>
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
|
||||
- (BOOL)drawFrame:(RTCVideoFrame *)frame;
|
||||
|
||||
@end
|
||||
|
||||
// Native CVPixelBufferRef rendering is only supported on iPhone because it
|
||||
// depends on CVOpenGLESTextureCacheCreate.
|
||||
#if TARGET_OS_IPHONE
|
||||
|
||||
// Shader for native NV12 frames.
|
||||
@interface RTCNativeNV12Shader : NSObject <RTCShader>
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
|
||||
- (BOOL)drawFrame:(RTCVideoFrame *)frame;
|
||||
|
||||
@end
|
||||
|
||||
#endif // TARGET_OS_IPHONE
|
||||
RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar *source);
|
||||
RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
|
||||
RTC_EXTERN GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
|
||||
RTC_EXTERN BOOL RTCCreateVertexBuffer(GLuint* vertexBuffer,
|
||||
GLuint* vertexArray);
|
||||
RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation);
|
||||
|
|
|
@ -10,11 +10,17 @@
|
|||
|
||||
#import "RTCShader.h"
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <OpenGLES/ES3/gl.h>
|
||||
#else
|
||||
#import <OpenGL/gl3.h>
|
||||
#endif
|
||||
|
||||
#include <algorithm>
|
||||
#include <array>
|
||||
#include <memory>
|
||||
|
||||
#import "RTCShader+Private.h"
|
||||
#import "RTCOpenGLDefines.h"
|
||||
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
|
@ -95,17 +101,30 @@ GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
|
|||
if (fragmentShader) {
|
||||
glDeleteShader(fragmentShader);
|
||||
}
|
||||
return program;
|
||||
}
|
||||
|
||||
// Set vertex shader variables 'position' and 'texcoord' in |program| to use
|
||||
// |vertexBuffer| and |vertexArray| to store the vertex data.
|
||||
BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* vertexArray) {
|
||||
// Set vertex shader variables 'position' and 'texcoord' in program.
|
||||
GLint position = glGetAttribLocation(program, "position");
|
||||
GLint texcoord = glGetAttribLocation(program, "texcoord");
|
||||
if (position < 0 || texcoord < 0) {
|
||||
return NO;
|
||||
glDeleteProgram(program);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Read position attribute with size of 2 and stride of 4 beginning at the start of the array. The
|
||||
// last argument indicates offset of data within the vertex buffer.
|
||||
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
|
||||
glEnableVertexAttribArray(position);
|
||||
|
||||
// Read texcoord attribute with size of 2 and stride of 4 beginning at the first texcoord in the
|
||||
// array. The last argument indicates offset of data within the vertex buffer.
|
||||
glVertexAttribPointer(
|
||||
texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)(2 * sizeof(GLfloat)));
|
||||
glEnableVertexAttribArray(texcoord);
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) {
|
||||
#if !TARGET_OS_IPHONE
|
||||
glGenVertexArrays(1, vertexArray);
|
||||
if (*vertexArray == 0) {
|
||||
|
@ -115,25 +134,11 @@ BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* ve
|
|||
#endif
|
||||
glGenBuffers(1, vertexBuffer);
|
||||
if (*vertexBuffer == 0) {
|
||||
glDeleteVertexArrays(1, vertexArray);
|
||||
return NO;
|
||||
}
|
||||
glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer);
|
||||
glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW);
|
||||
|
||||
// Read position attribute with size of 2 and stride of 4 beginning at the
|
||||
// start of the array. The last argument indicates offset of data within the
|
||||
// vertex buffer.
|
||||
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat),
|
||||
(void *)0);
|
||||
glEnableVertexAttribArray(position);
|
||||
|
||||
// Read texcoord attribute from |gVertices| with size of 2 and stride of 4
|
||||
// beginning at the first texcoord in the array. The last argument indicates
|
||||
// offset of data within |gVertices| as supplied to the vertex buffer.
|
||||
glVertexAttribPointer(texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat),
|
||||
(void *)(2 * sizeof(GLfloat)));
|
||||
glEnableVertexAttribArray(texcoord);
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
#import <WebRTC/RTCMacros.h>
|
||||
#import <WebRTC/RTCVideoRenderer.h>
|
||||
#import <WebRTC/RTCVideoViewShading.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
|
@ -26,13 +27,19 @@ RTC_EXPORT
|
|||
|
||||
/**
|
||||
* RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its
|
||||
* bounds using OpenGLES 2.0.
|
||||
* bounds using OpenGLES 2.0 or OpenGLES 3.0.
|
||||
*/
|
||||
RTC_EXPORT
|
||||
@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
|
||||
|
||||
@property(nonatomic, weak) id<RTCEAGLVideoViewDelegate> delegate;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder
|
||||
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#import <AppKit/NSOpenGLView.h>
|
||||
|
||||
#import <WebRTC/RTCVideoRenderer.h>
|
||||
#import <WebRTC/RTCVideoViewShading.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
|
@ -29,6 +30,10 @@ NS_ASSUME_NONNULL_BEGIN
|
|||
|
||||
@property(nonatomic, weak) id<RTCNSGLVideoViewDelegate> delegate;
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frameRect
|
||||
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||
shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#import <WebRTC/RTCVideoFrame.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/**
|
||||
* RTCVideoViewShading provides a way for apps to customize the OpenGL(ES) shaders used in
|
||||
* rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
|
||||
*/
|
||||
RTC_EXPORT
|
||||
@protocol RTCVideoViewShading <NSObject>
|
||||
|
||||
/** Callback for I420 frames. Each plane is given as a texture. */
|
||||
- (void)applyShadingForFrameWithRotation:(RTCVideoRotation)rotation
|
||||
yPlane:(GLuint)yPlane
|
||||
uPlane:(GLuint)uPlane
|
||||
vPlane:(GLuint)vPlane;
|
||||
|
||||
/** Callback for NV12 frames. Each plane is given as a texture. */
|
||||
- (void)applyShadingForFrameWithRotation:(RTCVideoRotation)rotation
|
||||
yPlane:(GLuint)yPlane
|
||||
uvPlane:(GLuint)uvPlane;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
Loading…
Reference in a new issue