FFmpeg学习之开发Mac播放器(四):使用MetalKit播放YUV数据(GPU)

上一篇直接使用YUV数据播放视频,但是YUV转换成可视化的图片是在CPU上完成的,这一篇要把这些工作通过MetalKit放到GPU上进行渲染。

//PlayESView.h  导入MetalKit库创建MTKView子类PlayESView
#import 
#import 
@interface PlayESView : MTKView
- (void)renderWithPixelBuffer:(CVPixelBufferRef)buffer;  //传入存储YUV数据的pixelbuffer进行渲染
@end
//PlayESView.m
@implementation PlayESView {
    id _pipelineState;
    id _commandQueue;
    CVMetalTextureCacheRef _textCache;
}

- (instancetype)initWithCoder:(NSCoder *)coder {
    self = [super initWithCoder:coder];
    if (self) {
        id device = MTLCreateSystemDefaultDevice();
        _commandQueue = [device newCommandQueue];
        id library  = [device newDefaultLibrary];
        /*这里使用的是计算管线,需要在metal文件中定义kernel关键字的yuvToRGB方法
        还可以使用渲染管线[device newRenderPipelineStateWithDescriptor:error:]
        需要创建MTLRenderPipelineDescriptor然后对vertexFunction和fragmentFunction进行赋值,分别对应的metal文件中vertex关键字的顶点着色器和fragment关键字的片段着色器
        */
        id function = [library newFunctionWithName:@"yuvToRGB"]; 
        NSError * error = NULL;
        _pipelineState = [device newComputePipelineStateWithFunction:function error:&error];
        CVReturn ret = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &_textCache);
        if (ret != kCVReturnSuccess) {
            NSLog(@"Unable to allocate texture cache");
            return nil;
        }

        self.device = device;
        self.framebufferOnly = NO;
        self.autoResizeDrawable = NO;
    }
    return self;
}
- (void)renderWithPixelBuffer:(CVPixelBufferRef)buffer {
    if (buffer == nil) return;
    CVMetalTextureRef y_texture ;
    //获取pixelbuffer中y数据的宽和高,然后创建包含y数据的MetalTexture,注意pixelformat为MTLPixelFormatR8Unorm
    size_t y_width = CVPixelBufferGetWidthOfPlane(buffer, 0);
    size_t y_height = CVPixelBufferGetHeightOfPlane(buffer, 0);
    CVReturn ret = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textCache, buffer, nil, MTLPixelFormatR8Unorm, y_width, y_height, 0, &y_texture);
    if (ret != kCVReturnSuccess) {
        NSLog(@"fail to create texture");
    }

    id y_inputTexture = CVMetalTextureGetTexture(y_texture);
    if (y_inputTexture == nil) {
        NSLog(@"failed to create metal texture");
    }

    CVMetalTextureRef uv_texture ;
    //获取pixelbuffer中uv数据的宽和高,然后创建包含uv数据的MetalTexture,注意pixelformat为MTLPixelFormatRG8Unorm
    size_t uv_width = CVPixelBufferGetWidthOfPlane(buffer, 1);
    size_t uv_height = CVPixelBufferGetHeightOfPlane(buffer, 1);
    ret = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textCache, buffer, nil, MTLPixelFormatRG8Unorm, uv_width, uv_height, 1, &uv_texture);
    if (ret != kCVReturnSuccess) {
        NSLog(@"fail to create texture");
    }
    id uv_inputTexture = CVMetalTextureGetTexture(uv_texture);
    if (uv_inputTexture == nil) {
        NSLog(@"failed to create metal texture");
    }

    CAMetalLayer * metalLayer = (CAMetalLayer *)self.layer;
    id drawable = metalLayer.nextDrawable;
    id commandBuffer = [_commandQueue commandBuffer];
    id computeCommandEncoder = [commandBuffer computeCommandEncoder];
    [computeCommandEncoder setComputePipelineState:_pipelineState];
    //把包含y数据的texture、uv数据的texture和承载渲染图像的texture传入
    [computeCommandEncoder setTexture:y_inputTexture atIndex:0];
    [computeCommandEncoder setTexture:uv_inputTexture atIndex:1];
    [computeCommandEncoder setTexture:drawable.texture atIndex:2];
    MTLSize threadgroupSize = MTLSizeMake(16, 16, 1);
    MTLSize threadgroupCount = MTLSizeMake((y_width  + threadgroupSize.width -  1) / threadgroupSize.width, (y_width + threadgroupSize.height - 1) / threadgroupSize.height, 1);
    [computeCommandEncoder dispatchThreadgroups:threadgroupCount threadsPerThreadgroup: threadgroupSize];
    [computeCommandEncoder endEncoding];
    [commandBuffer addCompletedHandler:^(id _Nonnull cmdBuffer) {
        CVBufferRelease(y_texture);   //销毁texture防止内存泄露
        CVBufferRelease(uv_texture);
    }];
    [commandBuffer presentDrawable:drawable];
    [commandBuffer commit];
}
//Metal.metal
kernel void yuvToRGB(texture2d y_inTexture [[ texture(0) ]],
                     texture2d uv_inTexture [[ texture(1) ]],
                     texture2d outTexture [[ texture(2) ]],
                     uint2 gid [[ thread_position_in_grid ]]) {
    float4 yFloat4 = y_inTexture.read(gid);
    float4 uvFloat4 = uv_inTexture.read(gid/2); //这里使用yuv420格式进行像素计算
    float y = yFloat4.x;
    float u = uvFloat4.x - 0.5;
    float v = uvFloat4.y - 0.5;

    float r = y + 1.403 * v;
    r = (r < 0.0) ? 0.0 : ((r > 1.0) ? 1.0 : r);
    r = 1 - r;
    float g = y - 0.343 * u - 0.714 * v;
    g = (g < 0.0) ? 0.0 : ((g > 1.0) ? 1.0 : g);
    g = 1 - g;
    float b = y + 1.770 * u;
    b = (b < 0.0) ? 0.0 : ((b > 1.0) ? 1.0 : b);
    b = 1 - b;
    outTexture.write(float4(r, g, b, 1.0), gid);
}
修改后的解码代码,将ViewController的View设置为PlayESView
- (void)decodeVideo {
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{  //在全局队列中解码
        AVPacket * packet = av_packet_alloc();
        if (av_read_frame(self->pFormatCtx, packet) >= 0) {
            if (packet->stream_index == self->videoIndex) {  //解码视频流
                //FFmpeg 3.0之后avcodec_send_packet和avcodec_receive_frame成对出现用于解码,包括音频和视频的解码,avcodec_decode_video2和avcodec_decode_audio4被废弃
                NSInteger ret = avcodec_send_packet(self->pCodecCtx, packet);
                if (ret < 0) {
                    NSLog(@"send packet error");
                    av_packet_free(&packet);
                    return;
                }
                AVFrame * frame = av_frame_alloc();
                ret = avcodec_receive_frame(self->pCodecCtx, frame);
                if (ret < 0) {
                    NSLog(@"receive frame error");
                    av_frame_free(&frame);
                    return;
                }
                 //frame中data存放解码出的yuv数据,data[0]中是y数据,data[1]中是u数据,data[2]中是v数据,linesize对应的数据长度
                float time = packet->pts * av_q2d(self->pFormatCtx->streams[self->videoIndex]->time_base);  //计算当前帧时间
                av_packet_free(&packet);

                CVReturn theError;
                if (!self->pixelBufferPool){  //创建pixelBuffer缓存池,从缓存池中创建pixelBuffer以便复用
                    NSMutableDictionary* attributes = [NSMutableDictionary dictionary];
                    [attributes setObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
                    [attributes setObject:[NSNumber numberWithInt:frame->width] forKey: (NSString*)kCVPixelBufferWidthKey];
                    [attributes setObject:[NSNumber numberWithInt:frame->height] forKey: (NSString*)kCVPixelBufferHeightKey];
                    [attributes setObject:@(frame->linesize[0]) forKey:(NSString*)kCVPixelBufferBytesPerRowAlignmentKey];
                    [attributes setObject:[NSDictionary dictionary] forKey:(NSString*)kCVPixelBufferIOSurfacePropertiesKey];
                    theError = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef) attributes, &self->pixelBufferPool);
                    if (theError != kCVReturnSuccess){
                        NSLog(@"CVPixelBufferPoolCreate Failed");
                    }
                }

                CVPixelBufferRef pixelBuffer = nil;
                theError = CVPixelBufferPoolCreatePixelBuffer(NULL, self->pixelBufferPool, &pixelBuffer);
                if(theError != kCVReturnSuccess){
                    NSLog(@"CVPixelBufferPoolCreatePixelBuffer Failed");
                }

                theError = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
                if (theError != kCVReturnSuccess) {
                    NSLog(@"lock error");
                }
                /*
                 PixelBuffer中Y数据存放在Plane0中,UV数据存放在Plane1中,数据格式如下
                 frame->data[0]  .........   YYYYYYYYY
                 frame->data[1]  .........   UUUUUUUU
                 frame->data[2]  .........   VVVVVVVVV
                 PixelBuffer->Plane0 .......  YYYYYYYY
                 PixelBuffer->Plane1 .......  UVUVUVUVUV
                 所以需要把Y数据拷贝到Plane0上,把U和V数据交叉拷到Plane1上
                 */
                size_t bytePerRowY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
                size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
                //获取Plane0的起始地址
                void* base = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
                memcpy(base, frame->data[0], bytePerRowY * frame->height);
                //获取Plane1的起始地址
                base = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
                uint32_t size = frame->linesize[1] * frame->height / 2;
                //把UV数据交叉存储到dstData然后拷贝到Plane1上
                uint8_t* dstData = new uint8_t[2 * size];
                uint8_t * firstData = new uint8_t[size];
                memcpy(firstData, frame->data[1], size);
                uint8_t * secondData  = new uint8_t[size];
                memcpy(secondData, frame->data[2], size);
                for (int i = 0; i < 2 * size; i++){
                    if (i % 2 == 0){
                        dstData[i] = firstData[i/2];
                    }else {
                        dstData[i] = secondData[i/2];
                    }
                }
                memcpy(base, dstData, bytesPerRowUV * frame->height/2);
                CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
                av_frame_free(&frame);
                free(dstData);
                free(firstData);
                free(secondData);

                
//                CIImage *coreImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
//                CGImageRef videoImage = [self->context createCGImage:coreImage
//                                                                   fromRect:CGRectMake(0, 0, self->pCodecCtx->width, self->pCodecCtx->height)];
//                NSImage * image = [[NSImage alloc] initWithCGImage:videoImage size:NSSizeFromCGSize(CGSizeMake(self->pCodecCtx->width, self->pCodecCtx->height))];
//                CVPixelBufferRelease(pixelBuffer);
//                CGImageRelease(videoImage);

                dispatch_async(dispatch_get_main_queue(), ^{
                    self.label.stringValue = [NSString stringWithFormat:@"%.2d:%.2d", (int)time/60, (int)time%60];
//                    self.imageView.image = image;
                    PlayESView * esView = (PlayESView *)self.view;
                    [esView renderWithPixelBuffer:pixelBuffer];
                    self.slider.floatValue = time / (float)self->videoDuration;
                });
            }
        } else {
            avcodec_free_context(&self->pCodecCtx);
            avformat_close_input(&self->pFormatCtx);
            avformat_free_context(self->pFormatCtx);
            [self->timer invalidate];
        }
    });
}
FFmpeg学习之开发Mac播放器(四):使用MetalKit播放YUV数据(GPU)_第1张图片
使用CoreImage CPU利用率.png
FFmpeg学习之开发Mac播放器(四):使用MetalKit播放YUV数据(GPU)_第2张图片
使用MetalKit CPU利用率.png

Demo地址

你可能感兴趣的:(FFmpeg学习之开发Mac播放器(四):使用MetalKit播放YUV数据(GPU))