使用ffmepg解码视频一般都是生成AVFrame。然后再转换成RGB或YUV。一直不明白它们之间的转换关系。其实很简单:
AVFrame 转RGB:
AVFrame ---> RGB
data[0] --- RGG数据
linesize[0] ---- width*pixel_size for RGB
AVFrame 转YUV:
AVFrame --->YUV
data[0] ---- Y
data[1] ---- Ulinesize[0] ------ Y 的长度
linesize[1] (linesize[0]/2) ------ U 或V 的长度.
代码:
- (UIImage *)convertFrameToRGB:(float)with Height:(float)height {
/* float with = pCodecCtx->width;
float height = pCodecCtx->height;*/
if (pFrame->data[0]) {
struct SwsContext * scxt =sws_getContext(with,height,PIX_FMT_YUV420P,with,height,PIX_FMT_RGBA,SWS_POINT,NULL,NULL,NULL);
if (scxt == NULL) {
return nil;
}
avpicture_alloc(&picture,PIX_FMT_RGBA, with, height);
sws_scale (scxt, (const uint8_t **)pFrame->data, pFrame->linesize, 0,height,picture.data, picture.linesize);
CGBitmapInfo bitmapInfo =kCGBitmapByteOrderDefault;
CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault,picture.data[0],picture.linesize[0]*height,kCFAllocatorNull);
CGDataProviderRef provider =CGDataProviderCreateWithCFData(data);
CGColorSpaceRef colorSpace =CGColorSpaceCreateDeviceRGB();
CGImageRef cgImage = CGImageCreate(with,height,
8,24,picture.linesize[0],
colorSpace,bitmapInfo,provider,NULL,NO,kCGRenderingIntentDefault);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImageimageWithCGImage:cgImage];
CGImageRelease(cgImage);
CGDataProviderRelease(provider);
CFRelease(data);
return image;
}
return nil;
}
- (NSData *)frameDataToYUVData:(UInt8 *)data Size:(int)linesize Width:(int)width Height:(int)height {
if (linesize < width) {
width = linesize;
}
NSMutableData *tempData = [NSMutableDatadataWithLength: width * height];
Byte *dstData = tempData.mutableBytes;
for (NSUInteger i =0; i < height; ++i) {
memcpy(dstData, data, width);
dstData += width;
data += linesize;
}
return tempData;
}
- (void)convertFrameToYUV:(float)with Height:(float)height {
NSData *luma = [selfframeDataToYUVData:pFrame->data[0]
Size:pFrame->linesize[0]
Width:with
Height:height];
NSData *chroma1 = [selfframeDataToYUVData:pFrame->data[1]
Size:pFrame->linesize[1]
Width:with/2
Height:height/2];
NSData *chroma2 = [selfframeDataToYUVData:pFrame->data[2]
Size:pFrame->linesize[2]
Width:with/2
Height:height/2];
}