iOS 直播系列二:美颜渲染

CIFilter

// 根据分类名获取对应的滤镜数组
NSArray<NSString *> *categoryArr = [CIFilter filterNamesInCategory:kCICategoryDistortionEffect];
NSLog(@"% ==== @", categoryArr);
复制代码

滤镜分类

/* Categories */
CORE_IMAGE_EXPORT NSString * const kCICategoryDistortionEffect;
CORE_IMAGE_EXPORT NSString * const kCICategoryGeometryAdjustment;
CORE_IMAGE_EXPORT NSString * const kCICategoryCompositeOperation;
CORE_IMAGE_EXPORT NSString * const kCICategoryHalftoneEffect;
CORE_IMAGE_EXPORT NSString * const kCICategoryColorAdjustment;
CORE_IMAGE_EXPORT NSString * const kCICategoryColorEffect;
CORE_IMAGE_EXPORT NSString * const kCICategoryTransition;
CORE_IMAGE_EXPORT NSString * const kCICategoryTileEffect;
CORE_IMAGE_EXPORT NSString * const kCICategoryGenerator;
CORE_IMAGE_EXPORT NSString * const kCICategoryReduction NS_AVAILABLE(10_5, 5_0);
CORE_IMAGE_EXPORT NSString * const kCICategoryGradient;
CORE_IMAGE_EXPORT NSString * const kCICategoryStylize;
CORE_IMAGE_EXPORT NSString * const kCICategorySharpen;
CORE_IMAGE_EXPORT NSString * const kCICategoryBlur;
CORE_IMAGE_EXPORT NSString * const kCICategoryVideo;
CORE_IMAGE_EXPORT NSString * const kCICategoryStillImage;
CORE_IMAGE_EXPORT NSString * const kCICategoryInterlaced;
CORE_IMAGE_EXPORT NSString * const kCICategoryNonSquarePixels;
CORE_IMAGE_EXPORT NSString * const kCICategoryHighDynamicRange;
CORE_IMAGE_EXPORT NSString * const kCICategoryBuiltIn;
CORE_IMAGE_EXPORT NSString * const kCICategoryFilterGenerator NS_AVAILABLE(10_5, 9_0);
复制代码

当我们不知道滤镜是什么效果时,我们可以去苹果官方文档Core Image Filter Reference 上搜索一下。

例如 CIComicEffect 这个滤镜,可以看到下方的图片经过该滤镜渲染之后的效果,上方红色标示出来的区域是告诉我们该滤镜所需参数,这里给滤镜设置参数的方式使用的是KVC。那么我们就来试验一下

直播平台源代码中iOS 美颜渲染如何开发_滤镜



接着上一篇iOS - 直播系列一:视频采集的内容,在代理中使用CIFilter进行滤镜渲染

// 获取图片帧数据
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciImage = [CIImage imageWithCVImageBuffer:imageBuffer];


// 滤镜处理
CIFilter *filter = [CIFilter filterWithName:@"CIComicEffect"];
[filter setValue:ciImage forKey:@"inputImage"];
ciImage = filter.outputImage;

// UIImage
UIImage *image = [UIImage imageWithCIImage:ciImage];

dispatch_async(dispatch_get_main_queue(), ^{
self.imageView.image = image;
});
复制代码


直播平台源代码中iOS 美颜渲染如何开发_滤镜_02



GPUImage美颜

集成和导入GPUImage框架

pod 'GPUImage'
复制代码
#import <GPUImage/GPUImage.h>
复制代码
// 1. 创建源
GPUImageVideoCamera *camera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionFront];
_camera = camera;
camera.outputImageOrientation = UIInterfaceOrientationPortrait;
camera.horizontallyMirrorFrontFacingCamera = YES;

// 2. 创建滤镜
// 磨皮:GPUImageBilateralFilter
GPUImageBilateralFilter *bilateraFilter = [[GPUImageBilateralFilter alloc] init];
// 设置磨皮强度 [0~100 值越小,磨皮强度越高]
bilateraFilter.distanceNormalizationFactor = 10;
// 美白:GPUImageBrightnessFilter
GPUImageBrightnessFilter *brightnessFilter = [[GPUImageBrightnessFilter alloc] init];
// Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level
brightnessFilter.brightness = 0.3;

GPUImageBeautifyFilter *beautifyFilter = [[GPUImageBeautifyFilter alloc] init];


// 3. 创建输出(1.显示 2.推流)
GPUImageView *imageView = [[GPUImageView alloc] initWithFrame:self.view.bounds];
[self.view addSubview:imageView];

// 4. 设置处理链条(使用最后加入的那一个去添加下一个)
[camera addTarget:bilateraFilter];
[bilateraFilter addTarget:brightnessFilter];
[brightnessFilter addTarget:imageView];

// 5. 开始录制
[camera startCameraCapture];
复制代码

也可以使用别人已经写好的GPUImageBeautifyFilter来进行美颜

GPUImageBeautifyFilter *beautifyFilter = [[GPUImageBeautifyFilter alloc] init];

[camera addTarget:beautifyFilter];
[beautifyFilter addTarget:imageView];
复制代码

GPUImageBeautifyFilter 源码

GPUImageBeautifyFilter.h

/*
GPUImageBeautifyFilter是基于GPUImage的实时美颜滤镜中的美颜滤镜,包括GPUImageBilateralFilter、GPUImageCannyEdgeDetectionFilter、GPUImageCombinationFilter、GPUImageHSBFilter。
*/

#import "GPUImage.h"

@class GPUImageCombinationFilter;

@interface GPUImageBeautifyFilter : GPUImageFilterGroup {
GPUImageBilateralFilter *bilateralFilter;
GPUImageCannyEdgeDetectionFilter *cannyEdgeFilter;
GPUImageCombinationFilter *combinationFilter;
GPUImageHSBFilter *hsbFilter;
}

@end
复制代码

GPUImageBeautifyFilter.m

#import "GPUImageBeautifyFilter.h"

// Internal CombinationFilter(It should not be used outside)
@interface GPUImageCombinationFilter : GPUImageThreeInputFilter
{
GLint smoothDegreeUniform;
}

@property (nonatomic, assign) CGFloat intensity;

@end

NSString *const kGPUImageBeautifyFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
varying highp vec2 textureCoordinate2;
varying highp vec2 textureCoordinate3;

uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
uniform sampler2D inputImageTexture3;
uniform mediump float smoothDegree;

void main()
{
highp vec4 bilateral = texture2D(inputImageTexture, textureCoordinate);
highp vec4 canny = texture2D(inputImageTexture2, textureCoordinate2);
highp vec4 origin = texture2D(inputImageTexture3,textureCoordinate3);
highp vec4 smooth;
lowp float r = origin.r;
lowp float g = origin.g;
lowp float b = origin.b;
if (canny.r < 0.2 && r > 0.3725 && g > 0.1568 && b > 0.0784 && r > b && (max(max(r, g), b) - min(min(r, g), b)) > 0.0588 && abs(r-g) > 0.0588) {
smooth = (1.0 - smoothDegree) * (origin - bilateral) + bilateral;
}
else {
smooth = origin;
}
smooth.r = log(1.0 + 0.2 * smooth.r)/log(1.2);
smooth.g = log(1.0 + 0.2 * smooth.g)/log(1.2);
smooth.b = log(1.0 + 0.2 * smooth.b)/log(1.2);
gl_FragColor = smooth;
}
);

@implementation GPUImageCombinationFilter

- (id)init {
if (self = [super initWithFragmentShaderFromString:kGPUImageBeautifyFragmentShaderString]) {
smoothDegreeUniform = [filterProgram uniformIndex:@"smoothDegree"];
}
self.intensity = 0.5;
return self;
}

- (void)setIntensity:(CGFloat)intensity {
_intensity = intensity;
[self setFloat:intensity forUniform:smoothDegreeUniform program:filterProgram];
}

@end

@implementation GPUImageBeautifyFilter

- (id)init;
{
if (!(self = [super init]))
{
return nil;
}

// First pass: face smoothing filter
bilateralFilter = [[GPUImageBilateralFilter alloc] init];
bilateralFilter.distanceNormalizationFactor = 4.0;
[self addFilter:bilateralFilter];

// Second pass: edge detection
cannyEdgeFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init];
[self addFilter:cannyEdgeFilter];

// Third pass: combination bilateral, edge detection and origin
combinationFilter = [[GPUImageCombinationFilter alloc] init];
[self addFilter:combinationFilter];

// Adjust HSB
hsbFilter = [[GPUImageHSBFilter alloc] init];
[hsbFilter adjustBrightness:1.1];
[hsbFilter adjustSaturation:1.1];

[bilateralFilter addTarget:combinationFilter];
[cannyEdgeFilter addTarget:combinationFilter];

[combinationFilter addTarget:hsbFilter];

self.initialFilters = [NSArray arrayWithObjects:bilateralFilter,cannyEdgeFilter,combinationFilter,nil];
self.terminalFilter = hsbFilter;

return self;
}


#pragma mark 绘制第一个纹理

- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in self.initialFilters)
{
if (currentFilter != self.inputFilterToIgnoreForUpdates)
{
if (currentFilter == combinationFilter) {
textureIndex = 2;
}
[currentFilter newFrameReadyAtTime:frameTime atIndex:textureIndex];
}
}
}

- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in self.initialFilters)
{
if (currentFilter == combinationFilter) {
textureIndex = 2;
}
[currentFilter setInputFramebuffer:newInputFramebuffer atIndex:textureIndex];
}
}

@end
复制代码

iOS版本

OpenGL ES版本

2.x

1.x

3.0~6.x

2.x

7.0

3.x