GPUImage的简单使用
图片添加滤镜
- 创建一个输入源 GPUImagePicture
- 创建滤镜 GPUImageTiltShiftFilter
- 创建一个输出,用于显示 GPUImageView
- 添加入filter
- 开始处理图片
UIImage *inputImage = [UIImage imageNamed:@"WID-small.jpg"];
// 创建一个输入源
self.sourcePicture = [[GPUImagePicture alloc] initWithImage:inputImage smoothlyScaleOutput:YES];
// 创建滤镜
self.sepiaFilter = [[GPUImageGrayscaleFilter alloc] init];
//创建一个输出
GPUImageView *imageView = (GPUImageView *)self.view;
//设置图片大小
[self.sepiaFilter forceProcessingAtSize:imageView.sizeInPixels];
//按顺序加入filter
[self.sourcePicture addTarget:sepiaFilter];
[self.sepiaFilter addTarget:imageView];
//开始处理图片
[self.sourcePicture processImage];
本地摄像头添加滤镜
- 使用本地摄像头为输入源 GPUImageVideoCamera
- 创建滤镜 GPUImageSepiaFilter
- 创建一个输出,用于显示 GPUImageView
- 创建另一个输出,写入本地 GPUImageMovieWriter
- 开始启动摄像头
//使用本地摄像头为输入源
self.videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
self.videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
self.videoCamera.horizontallyMirrorFrontFacingCamera = NO;
self.videoCamera.horizontallyMirrorRearFacingCamera = NO;
// 创建滤镜
self.filter = [[GPUImageSepiaFilter alloc] init];
[self.videoCamera addTarget:self.filter];
// 创建一个输出
GPUImageView *filterView = (GPUImageView *)self.view;
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
unlink([pathToMovie UTF8String]);
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
// 创建另一个输出,写入本地
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 640.0)];
self.movieWriter.encodingLiveVideo = YES;
//filter添加2个输出
[self.filter addTarget:self.movieWriter];
[self.filter addTarget:filterView];
// 开始启动摄像头
[self.videoCamera startCameraCapture];
double delayToStartRecording = 0.5;
dispatch_time_t startTime = dispatch_time(DISPATCH_TIME_NOW, delayToStartRecording * NSEC_PER_SEC);
dispatch_after(startTime, dispatch_get_main_queue(), ^(void){
NSLog(@"Start recording");
// 开始写入本地
self.videoCamera.audioEncodingTarget = self.movieWriter;
[self.movieWriter startRecording];
double delayInSeconds = 10.0;
dispatch_time_t stopTime = dispatch_time(DISPATCH_TIME_NOW, delayInSeconds * NSEC_PER_SEC);
dispatch_after(stopTime, dispatch_get_main_queue(), ^(void){
// 移除输出源并停止录制
[self.filter removeTarget:self.movieWriter];
self.videoCamera.audioEncodingTarget = nil;
[self.movieWriter finishRecording];
});
});
GPUImage源码分析
项目结构
GPUImage中结构目录大概分为4部分
- OpenGL ES环境
- 输入源
- 滤镜(组合滤镜)
- 输出
首先我们简单介绍一下OpenGL ES的渲染流程
- 设置上下文环境
- 加载shader
- 设置帧缓冲
- 顶点数据缓存
- 纹理贴图
- 渲染
- 销毁帧缓冲
设置上下文环境
- (void)setupContext {
EAGLContext *context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:context];
self.context = context;
CAEAGLLayer *layer = (CAEAGLLayer *)self.layer;
self.glLayer = layer;
[self setContentScaleFactor:[[UIScreen mainScreen] scale]];
self.glLayer.opaque = YES;
self.glLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO],
kEAGLDrawablePropertyRetainedBacking,
kEAGLColorFormatRGBA8,
kEAGLDrawablePropertyColorFormat,
nil];
}
加载shader
- (void)setupShader {
NSString *fragmentShader = nil;
NSString *vertexShader = kPanomaVertexShaderString;
self.program = [[DFProgram alloc] initWithVertexShaderString:vertexShader fragmentShaderString:fragmentShader];
if (!self.program.initialized) {
if (![self.program link]) {
NSLog(@"failed = %@", self.program.fragmentShaderLog);
return;
}
[self.program use];
}
}
设置帧缓冲
- (void)setupFrameBuffer {
glGenFramebuffers(1, &_frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer);
glGenRenderbuffers(1, &_renderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
[self.context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self.glLayer];
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderBuffer);
}
顶点数据缓存
- (void)setupVBO {
float *vertices = 0;// 顶点
float *texCoord = 0;// 纹理
uint16_t *indices = 0;// 索引
int numVertices = 0;
float sphereFov = 360;
self.numIndices = initSphere(200, 1.0f, sphereFov, &vertices, &texCoord, &indices, &numVertices);
GLuint positionBuffer;
glGenBuffers(1, &positionBuffer);
glBindBuffer(GL_ARRAY_BUFFER, positionBuffer);
glBufferData((GL_ARRAY_BUFFER), (numVertices) * 3 * sizeof(GLfloat), vertices, (GL_STATIC_DRAW));
GLuint position = [self.program attributeIndex:@"position"];
glEnableVertexAttribArray(position);
glVertexAttribPointer(position, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 3, NULL);
GLuint texcoordBuffer;
glGenBuffers(1, &texcoordBuffer);
glBindBuffer(GL_ARRAY_BUFFER, texcoordBuffer);
glBufferData((GL_ARRAY_BUFFER),(numVertices) * 2 * sizeof(GLfloat), texCoord, (GL_STATIC_DRAW));
GLuint textCoor = [self.program attributeIndex:@"inputTextureCoordinate"];
glEnableVertexAttribArray(textCoor);
glVertexAttribPointer(textCoor, 2, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 2,NULL);
/// 加载顶点索引数据
GLuint vertexIndicesBuffer;
glGenBuffers(1, &vertexIndicesBuffer); // 申请内存
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vertexIndicesBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, _numIndices * sizeof(GLushort),indices, GL_STATIC_DRAW);
free(vertices);
free(texCoord);
free(indices);
}
纹理贴图
- (void)setUpTextureWithImage:(UIImage *)image {
[EAGLContext setCurrentContext:self.context];
CGImageRef spriteImage = image.CGImage;
if (!spriteImage) {
NSLog(@"=== failed to load image");
exit(1);
}
size_t width = CGImageGetWidth(spriteImage);
size_t height = CGImageGetHeight(spriteImage);
GLubyte *spriteData = (GLubyte *)calloc(width * height * 4, sizeof(GLubyte));
CGContextRef spriteContext = CGBitmapContextCreate(spriteData,
width,
height,
8,
width * 4,
CGImageGetColorSpace(spriteImage),kCGImageAlphaPremultipliedLast);
CGContextDrawImage(spriteContext, CGRectMake(0, 0, width, height), spriteImage);
CGContextRelease(spriteContext);
glActiveTexture(GL_TEXTURE0);
glGenTextures(1, &_texture0);
glBindTexture(GL_TEXTURE_2D, _texture0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
float fw = width, fh = height;
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, fw, fh, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
free(spriteData);
}
渲染
- (void)render {
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawElements(GL_TRIANGLES, _numIndices, GL_UNSIGNED_SHORT, nil);
[self.context presentRenderbuffer:GL_RENDERBUFFER];
}
- 销毁帧缓冲
- (void)destoryBuffer {
glDeleteFramebuffers(1, &_frameBuffer);
_frameBuffer = 0;
glDeleteRenderbuffers(1, &_renderBuffer);
_renderBuffer = 0;
}
我们回来看看GPUImage中从输入到输出是一个怎样的流程
GPUImagePicture -> GPUImageGrayscaleFilter -> GPUImageView
这其实是应该响应链的流程,这中间还可以添加其他的滤镜。
一、首先我们创建了一个GPUImagePicture对象,创建这个对象时我们做了下面这些事。
1、通过UIImage创建一个texture(纹理)
imageData = (GLubyte *) calloc(1, (int)pixelSizeToUseForTexture.width * (int)pixelSizeToUseForTexture.height * 4);
CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
CGContextRef imageContext = CGBitmapContextCreate(imageData, (size_t)pixelSizeToUseForTexture.width, (size_t)pixelSizeToUseForTexture.height, 8, (size_t)pixelSizeToUseForTexture.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html
CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, pixelSizeToUseForTexture.width, pixelSizeToUseForTexture.height), newImageSource);
CGContextRelease(imageContext);
CGColorSpaceRelease(genericRGBColorspace);
2、设置上下文环境
GPUImageContext是一个单利,它里面主要是初始化上下文、和加载shader的代码
//设置上下文环境
[GPUImageContext useImageProcessingContext];
3、创建帧缓冲
GPUImageFramebuffer是用来管理帧缓冲的,它可以从帧缓冲中拿到图像数据传入下一个响应链中。
GPUImageFramebufferCache用来缓存GPUImageFramebuffer对象,可有效利用GPUImageFramebuffer对象,不用重复创建。
//创建帧缓冲
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:pixelSizeToUseForTexture onlyTexture:YES];
[outputFramebuffer disableReferenceCounting];
4、纹理贴图
//纹理贴图
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
if (self.shouldSmoothlyScaleOutput)
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
}
// no need to use self.outputTextureOptions here since pictures need this texture formats and type
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)pixelSizeToUseForTexture.width, (int)pixelSizeToUseForTexture.height, 0, format, GL_UNSIGNED_BYTE, imageData);
if (self.shouldSmoothlyScaleOutput)
{
glGenerateMipmap(GL_TEXTURE_2D);
}
glBindTexture(GL_TEXTURE_2D, 0);
二、创建滤镜GPUImageGrayscaleFilter(既响应链第二层)
一般滤镜都会继承GPUImageFilter。GPUImageGrayscaleFilter中主要代码就是创建属于自己的shader。
GPUImageFilter是可以作为输入,也可以作为输出的,所以GPUImageFilter是继承于GPUImageOutput,而且它还遵守
GPUImageFilter其实就是一次opengl es的完整流程,少了渲染到屏幕上
- 设置上下文环境
- 加载shader
/// 设置上下文环境
[GPUImageContext useImageProcessingContext];
/// 加载shader
filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];
if (!filterProgram.initialized)
{
[self initializeAttributes];
if (![filterProgram link])
{
NSString *progLog = [filterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [filterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [filterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
filterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
filterPositionAttribute = [filterProgram attributeIndex:@"position"];
filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
[GPUImageContext setActiveShaderProgram:filterProgram];
glEnableVertexAttribArray(filterPositionAttribute);
glEnableVertexAttribArray(filterTextureCoordinateAttribute);
- 创建帧缓冲
- 绑定纹理
- 顶点数据缓存
- 渲染
/// 设置当前shader处理
[GPUImageContext setActiveShaderProgram:filterProgram];
/// 创建帧缓冲
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
/// 绑定纹理
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
/// 顶点数据缓存
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
/// 渲染
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
三、显示在屏幕上GPUImageView(既响应链输出)
GPUImageView是一次opengl es的完整流程。
他和GPUImageFilter中的代码差不多,它多了将图片渲染到屏幕上。
创建CAEAGLLayer
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
创建渲染缓冲
- (void)createDisplayFramebuffer;
{
[GPUImageContext useImageProcessingContext];
glGenFramebuffers(1, &displayFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);
glGenRenderbuffers(1, &displayRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
[[[GPUImageContext sharedImageProcessingContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
GLint backingWidth, backingHeight;
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
if ( (backingWidth == 0) || (backingHeight == 0) )
{
[self destroyDisplayFramebuffer];
return;
}
_sizeInPixels.width = (CGFloat)backingWidth;
_sizeInPixels.height = (CGFloat)backingHeight;
// NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer);
__unused GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @"Failure with display framebuffer generation for display of size: %f, %f", self.bounds.size.width, self.bounds.size.height);
boundsSizeAtFrameBufferEpoch = self.bounds.size;
[self recalculateViewGeometry];
}
显示到屏幕上
- (void)presentFramebuffer;
{
glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
[[GPUImageContext sharedImageProcessingContext] presentBufferForDisplay];//[self.context presentRenderbuffer:GL_RENDERBUFFER];
}