AVFrame的data和linesize:
YUV: linesize[0] = width + padding size(16+16),linesize[1]=linesize[0]/2
data[0],data[1],data[2]分别代表yuv
RGB: linesize[0] = width*pixel_size for RGB
data[0]为packet rgb
AVFrame* pFrame; // Frame AVCodecContext* pContext; // Codec Context int nUsedBytes = avcodec_decode_video(pContext, pFrame, &nFrame, pSrcBuffer, nSrcLength); if(nUsedBytes > 0) { AVFrame out_pic; SwsContext* img_convert_ctx = sws_getContext(pContext->width, pContext->height , pContext->pix_fmt, nDestW, nDestH,(PixelFormat)PIX_FMT_BGR24,SWS_BICUBIC, NULL, NULL, NULL); if(img_convert_ctx != NULL) { if(avpicture_alloc((AVPicture *)&out_pic, PIX_FMT_RGB24, nDestW, nDestH)>=0) { pFrame->data[0] = pFrame->data[0]+pFrame->linesize[0]*(pContext->height-1); pFrame->data[1] = pFrame->data[1]+pFrame->linesize[0]*pContext->height/4-1; pFrame->data[2] = pFrame->data[2]+pFrame->linesize[0]*pContext->height/4-1; pFrame->linesize[0] *= -1; pFrame->linesize[1] *= -1; pFrame->linesize[2] *= -1; sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pContext->height , out_pic.data, out_pic.linesize); avpicture_free((AVPicture *)&out_pic); } sws_freeContext(img_convert_ctx); } }