前面是我自己的代码,后面是别人的代码,大同小异,我自己的只有部分代码,测试通过的
我的代码
PointF[] srcQuad = new PointF[4];
PointF[] desQuad = new PointF[4];
desQuad[0] = intersectionTransformPointLT;
desQuad[1] = intersectionTransformPointRT;
desQuad[2] = intersectionTransformPointLB;
desQuad[3] = intersectionTransformPointRB;
srcQuad[0] = intersectionPointLT;
srcQuad[1] = intersectionPointRT;
srcQuad[2] = intersectionPointLB;
srcQuad[3] = intersectionPointRB;
perspective_after_ptr_show = IntPtr2IntPtr(original_ptr);
IntPtr map_matrix=new Matrix<double>(3,3);
//生成透视变换矩阵
//Emgu.CV.CameraCalibration.GetPerspectiveTransform();
map_matrix = CvInvoke.cvGetPerspectiveTransform(srcQuad, desQuad, map_matrix);
//进行透视变换(第三个参数决定了矩形框之外的部分进行处理的方式,现在所选为用黑色填充)
CvInvoke.cvWarpPerspective(original_ptr, perspective_after_ptr_show, map_matrix,(int)Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS,new MCvScalar(0));
别人的代码
对于一个矩形区域的校正,主要方式是通过畸变后的四边形的四个顶点和想要结果的矩形的四个顶点,确定并构建一个3*3映射矩阵:然后通过该映射矩阵实现其校正。
其Emgucv代码:
MCvMat warp_matrix = new MCvMat();
Image<Bgr, Byte> frame = _capture.QueryFrame();
Image<Gray, byte> gray = frame.Convert<Gray,byte>();
// Image<Gray,double> gray1 = new Image<Gray, double>(640, 480);
Image<Gray, byte> gray1 = gray.CopyBlank();
frame.Draw(myorirect,new Bgr(0,255,0),2);
PointF[] srcquad = new PointF[4];
PointF[] dstquad = new PointF[4];
srcquad[0].X = 0;
srcquad[0].Y = 0;
srcquad[1].X = gray.Width-1;
srcquad[1].Y = 0;
srcquad[2].X = 0;
srcquad[2].Y = gray.Height - 1;
srcquad[3].X = gray.Width-1;
srcquad[3].Y = gray.Height-1;
dstquad[0].X =(float) (gray.Width*0.05);
dstquad[0].Y = (float)(gray.Height * 0.03);
dstquad[1].X = (float)(gray.Width * 0.9);
dstquad[1].Y = (float) (gray.Height*0.25);
dstquad[2].X = (float) (gray.Width*0.2);
dstquad[2].Y = (float) (gray.Height*0.7) ;
dstquad[3].X = (float) (gray.Width*0.8);
dstquad[3].Y = (float) (gray.Height*0.8);
HomographyMatrix mywarpmat = Emgu.CV.CameraCalibration.GetPerspectiveTransform(srcquad, dstquad);
Matrix<double> mymat = new Matrix<double>(3, 3);
mymat =(Matrix<double>) mywarpmat;
gray1 =gray.WarpPerspective<double>(mymat, Emgu.CV.CvEnum.INTER.CV_INTER_NN, Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS, new Gray(255));
mywarpmat = Emgu.CV.CameraCalibration.GetPerspectiveTransform(dstquad, srcquad);
mymat = (Matrix<double>)mywarpmat;
gray = gray1.WarpPerspective<double>(mymat, Emgu.CV.CvEnum.INTER.CV_INTER_NN, Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS, new Gray(255));
imageBox1.Image = gray1;
imageBox2.Image = gray;