摘自:http://www.rosoo.net/a/201102/10948.html
写了一个程序可以把openCV的BGR图像格式转换成YUV4:2:0,然后通 过FFmpeg的API把YUV4:2:0的图像编码压缩,最后利用live555把压缩后的buffer打包成RTP包,最后用Unicast将它们发 送出去。简单的说就是 BGR—>YUV4:2:0—>encode to buffer—>RTP—>Unicast这样的过程。
首先用到opencv的函数来打开这个设备获取图像
CvCapture* m_pCapture;
IplImage* m_pFrameImage;
m_pCapture = cvCaptureFromCAM(0);
m_pFrameImage = cvQueryFrame(m_pCapture);
//把转换的yuv420格式存到文件中用RawPlayer.exe软件可以查看文件的格式是否转换成功
int fd;
fd = open("file420.yuv",O_WRONLY|O_CREAT|O_APPEND);
这样获得的图像是640*480的bgr格式;
要转成240*320的yuv420格式
img_cvsize.Width = 320;
img_cvsize.heigh = 240;
IplImage*dst=cvCreateImage(img_cvsize,m_pVideoInfor->m_pFrameImage->depth,m_pVideoInfor->m_pFrameImage->nChannels);
cvResize(m_pVideoInfor->m_pFrameImage, dst, CV_INTER_LINEAR);
开始转换yuv420格式
uchar *yuv;
yuv = (unsigned char *)malloc(srcwidth*srcheight*3/2);
调用函数转换
iplimage_to_yuv420(dst,yuv);
转换成功后写入文件中。
write(fd,yuv,srcwidth*srcheight*3/2);
转换函数如下:
void MainWindow::iplimage_to_yuv420(IplImage* dst,uchar * yuv)
{
uchar *out_y = yuv;
uchar *out_u = yuv+320*240;
uchar *out_v = out_u +(320*240)/4;
// IplImage * tmp = cvCreateImage(CvSize(dst->width,dst->height),8,3);
IplImage* tmp = cvCreateImage(img_cvsize,m_pVideoInfor->m_pFrameImage->depth, m_pVideoInfor->m_pFrameImage->nChannels);
cvCvtColor(dst,tmp,CV_RGB2YCrCb);
int idx_in = 0;
int idx_out = 0;
int idx_out_y = 0;
int idx_out_u = 0;
int idx_out_v = 0;
for(int j = 0; j < dst->height;j+=1){
idx_in = j*dst->widthStep;
for(int i = 0;i < dst->widthStep;i+=12){
out_y[idx_out_y] = tmp->imageData[idx_in +i +0];
idx_out_y++;
out_y[idx_out_y] = tmp->imageData[idx_in +i +3];
idx_out_y++;
out_y[idx_out_y] = tmp->imageData[idx_in +i +6];
idx_out_y++;
out_y[idx_out_y] = tmp->imageData[idx_in +i +9];
idx_out_y++;
if((j % 2) == 0){
out_u[idx_out_u++] = tmp->imageData[idx_in+i+1];
out_u[idx_out_u++] = tmp->imageData[idx_in+i+7];
out_v[idx_out_v++] = tmp->imageData[idx_in+i+2];
out_v[idx_out_v++] = tmp->imageData[idx_in+i+8];
}
}
}
cvReleaseImage(&tmp);
}