RGB24转YUV420P

void CVideoEncoder::RGB24ToYUV420(int Width,int Height,uint8_t* RgbBuffer,uint8_t*YuvBuffer)
{ 
	uint8_t* yuvBuf=YuvBuffer;//YUV空间
	int nWidth=Width;
	int nHeight=Height;
	/下面转换算法是网上查到的
	int i, j; 
	uint8_t*bufY = yuvBuf; 
	uint8_t*bufU = yuvBuf + nWidth * nHeight; 
	uint8_t*bufV = bufU + (nWidth* nHeight* 1/4); 
	uint8_t*Y=bufY;
	uint8_t*U=bufU;
	uint8_t*V=bufV;
	uint8_t*bufRGB;
	unsigned char y, u, v, r, g, b,testu,testv;
	if (NULL==pRGBBuffer)
	{
	   return ;
	}
	for (j = 0; j>8) + 16;//16
			v = (unsigned char)((-38 * r -  74 * g + 112 * b + 128) >>8) +128  ; //128          
			u = (unsigned char)((112 * r -  94 * g -  18 * b + 128) >> 8) + 128 ;
			*(bufY++)=max(0,min(y, 255 ));

			if (j%2==0&&i%2 ==0)
			{
				if (u>255)
				{
					u=255;
				}
				if (u<0)
				{
					u = 0;
				}
				*(bufU++) =u;
				//存u分量
			}
			else
			{
				//存v分量
				if (i%2==0)
				{
					if (v>255)
					{
						v = 255;
					}
					if (v<0)
					{
						v = 0;
					}
					*(bufV++) =v;
				}
			}
		}
	}
}
//YUV对应到data
			pVideoEncoder->m_pYUVFrame->data[0]=pVideoEncoder->yuv_buff;
			pVideoEncoder->m_pYUVFrame->data[1]=pVideoEncoder->yuv_buff+y_size;
			pVideoEncoder->m_pYUVFrame->data[2]=pVideoEncoder->yuv_buff+(y_size*5)/4; 


 
//第二种方法 用FFMPEG
uint8_t *rgb_src[3]= {rgb, NULL, NULL};
int rgb_stride[3]={3*biWidth, 0, 0};
AVFrame picture;
avpicture_alloc((AVPicture*)(&picture), AV_PIX_FMT_YUV420P,biWidth,biHeight);
SwsContext *yuvContext=sws_getContext(biWidth,biHeight,AV_PIX_FMT_BGR24,biWidth,
biHeight,AV_PIX_FMT_YUV420P,SWS_BICUBIC, NULL, NULL, NULL);
sws_scale(yuvContext,rgb_src, rgb_stride, 0, biHeight, picture.data, picture.linesize);

//注意 如果转换后出现倒立,请安下面方式更正

picture.data[0] += picture.linesize[0] * (biHeight - 1);
picture.linesize[0] *= -1;                      
picture.data[1] += picture.linesize[1] * (biHeight / 2 - 1);
picture.linesize[1] *= -1;
picture.data[2] += picture.linesize[2] * (biHeight/ 2 - 1);
picture.linesize[2] *= -1; 



你可能感兴趣的:(vc++,C++,FFMPEG)