[FFMPEG-MACOS]AVFrame转NSImage


思路具体为:
AVFrame -> CGImageRef -> NSImage

  • 代码
- (NSImage *)currentImage {
    AVFrame *now_frame = vs_obj->v_frame;
    AVCodecContext *nowCodCtx = vs_obj->rtspVideoCodecContext;
    if (!now_frame->data[0]) {
        return nil;
    }
    //[self convertFrameToRGB];
    
    sws_scale(img_convert_context, now_frame->data, now_frame->linesize, 0, nowCodCtx->height, picture.data, picture.linesize);
    
    //return [self imageFromAVPicture:picture width:_outputWidth height:_outputHeight];
    
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
    CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault,picture.data[0],picture.linesize[0] * nowCodCtx->height,kCFAllocatorNull);
    
    CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    
    // 设置输出图片 重要
    CGImageRef cgImage = CGImageCreate(nowCodCtx->width, nowCodCtx->height, 8, 24, picture.linesize[0], colorSpace, bitmapInfo, provider, NULL, NO, kCGRenderingIntentDefault);
    
    CGColorSpaceRelease(colorSpace);
//    NSImage *image = [NSImage imageWithCGImage:cgImage];
    NSImage *image = [image_lib imageFromCGImageRef:cgImage];
    
    CGImageRelease(cgImage);
    CGDataProviderRelease(provider);
    CFRelease(data);
    return image;
}

-(void)setupScaler {
    AVFrame *now_frame = vs_obj->v_frame;
    AVCodecContext *nowCodCtx = vs_obj->rtspVideoCodecContext;
    
    // Release old picture and scaler
    avpicture_free(&picture);
    sws_freeContext(img_convert_context);
    
    // Allocate RGB picture
    avpicture_alloc(&picture, AV_PIX_FMT_RGB24, nowCodCtx->width, nowCodCtx->height);
    
    // Setup scaler
    static int sws_flags =  SWS_FAST_BILINEAR;
    img_convert_context = sws_getContext(nowCodCtx->width,
                                         nowCodCtx->height,
                                         nowCodCtx->pix_fmt,
                                         nowCodCtx->width,
                                         nowCodCtx->height,
                                         AV_PIX_FMT_RGB24,
                                         sws_flags, NULL, NULL, NULL);
}

// 将CGImageRef转换为NSImage *
+ (NSImage*) imageFromCGImageRef:(CGImageRef)image {
//    NSRect imageRect = NSMakeRect(0.0, 0.0, 0.0, 0.0);
    
    CGContextRef imageContext = nil;
    
    // Get the image dimensions.
//    imageRect.size.height = CGImageGetHeight(image);
//    imageRect.size.width = CGImageGetWidth(image);
//
    NSSize size = NSMakeSize(CGImageGetWidth(image), CGImageGetHeight(image));
    
    // Create a new image to receive the Quartz image data.
    NSImage* newImage = [[NSImage alloc] initWithCGImage:image size:size];
    
//    [newImage lockFocus];
//
//    // Get the Quartz context and draw.
//    imageContext = (CGContextRef)[[NSGraphicsContext currentContext] graphicsPort];
//
//    CGContextDrawImage(imageContext, *(CGRect*)&imageRect, image);
//
//    [newImage unlockFocus];
//
    return newImage;
    
}

你可能感兴趣的:([FFMPEG-MACOS]AVFrame转NSImage)