标定的资料很多,就是确定其相机的内参矩阵fx,fy,x0,y0,k1,k2,k3,p1,p2;目前来看可分为online 和offline两种方式,前者在cv领域较多,后者在摄影测量中较多。摄影测量中标定分为平面场标定和三维场标定。
平面场的标定多采用张正友的方法,而三维场标定即是2D-3D的变换。
极线矫正的目的是使得立体相对具有相同的y坐标,可分为两种:
1、标定相机的极线矫正
2、未标定相机的极线矫正,也称为Hartly方法
opencv 自带函数是BM和SGBM、GC,其中GC算法效果最好,但是时间太慢。而且这三种算法都只能用于分辨率不大图像,当分辨率太大,就效果欠佳。2010年《Library for Efficient Large-scale Stereo Matching》问世,解决了大尺度视差图的计算,其原理是先匹配,得到稳定的点称为support points,然后根据sp来建立三角网,得到先验区域然后利用MAP得到候选点。
本程序介绍:
1、自动完成标定,极线矫正,视差图,标定图放在文件夹1,要矫正的图像在外边,由于网上的标定板子,所以从标定板子中选了两幅图来计算视差,因为是平面图,所有效果不好。也尝试用手机拍摄标定板,再拍摄其它东西来标定,发现苹果手机畸变贼大,所以就放弃。
2、本程序流程
#include"camera_calibrate.h"
#include
#include
#include "elas/elas.h"
#include "elas/image.h"
void LK_points_pairs(const Mat im1,const Mat im2,vector<Point2f> &p1,vector<Point2f> &p2)
{
goodFeaturesToTrack(im1, p1, 100, 0.01, 30);
// vector<cv::KeyPoint> kps
// cv::Ptr<cv::FastFeatureDetector> detector = cv::FastFeatureDetector::create();
// detector->detect(im1, kps );
// for ( auto kp:kps )
// {
// p1.push_back( kp.pt );
// }
vector <uchar> status;
vector <float> err;
calcOpticalFlowPyrLK(im1, im2, p1, p2 ,status, err);
auto prev_it = p1.begin();
auto curr_it = p2.begin();//如果不用迭代器的删除,循环非常耗时
for(size_t k = 0; k < status.size(); k++)
{
if(status[k])
{
prev_it++;
curr_it++;
}
else
{
prev_it = p1.erase(prev_it);
curr_it = p2.erase(curr_it);
}
}
Mat con_im;
cv::hconcat(im1,im2,con_im);
for(int i=0;i<p1.size();++i)
{
cv::line(con_im,cv::Point(p1[i].x,p1[i].y),cv::Point(p2[i].x+im1.cols,p2[i].y),cv::Scalar(0,0,255),1);
}
// for(int i=0;i<p1.size();++i)
// {
// cv::circle(im1,p1[i],3,Scalar(0,255,0),2);
// cv::circle(im2,p2[i],3,Scalar(0,255,0),2);
// }
// imwrite("im1.jpg",im1);
imwrite("result.jpg",con_im);
}
void SURF_points_pairs(const Mat im1,const Mat im2,vector<Point2f> &p1,vector<Point2f> &p2)
{
Ptr<cv::xfeatures2d::SURF> surf=cv::xfeatures2d::SURF::create();
vector<KeyPoint> kp1,kp2;
Mat des1,des2;
surf->detectAndCompute(im1,Mat(),kp1,des1);
surf->detectAndCompute(im2,Mat(),kp2,des2);
FlannBasedMatcher matcher;
vector<DMatch> mps;
vector<vector<DMatch>> knn_matches;
// matcher.match(des1,des2,mps, Mat());
matcher.knnMatch(des1,des2,knn_matches,2);//lowe NNDR算法
float distanceRatio=0.6;
for(int i=0;i<knn_matches.size();++i)
{
if(knn_matches[i][0].distance<knn_matches[i][1].distance*distanceRatio)
{
mps.push_back(knn_matches[i][0]);
}
}
for(int i=0;i<30;++i)
{
p1.push_back(kp1[mps[i].queryIdx].pt);
p2.push_back(kp2[mps[i].trainIdx].pt);
}
if(mps.size()<8)
{
throw runtime_error("估计F矩阵匹配点不够");
}
Mat F;
vector<uchar> status;
F=cv::findFundamentalMat(p1,p2,status,CV_FM_RANSAC);//剔除点
auto prev_it = p1.begin();
auto curr_it = p2.begin();
for(size_t k = 0; k < status.size(); k++)
{
if(status[k])
{
prev_it++;
curr_it++;
}
else
{
prev_it = p1.erase(prev_it);
curr_it = p2.erase(curr_it);
}
}
Mat con_im;
cv::hconcat(im1,im2,con_im);
for(int i=0;i<p1.size();++i)
{
cv::line(con_im,cv::Point(p1[i].x,p1[i].y),cv::Point(p2[i].x+im1.cols,p2[i].y),cv::Scalar(0,0,255),1);
}
imwrite("result.jpg",con_im);
// for(int i=0;i<p1.size();++i)
// {
// cv::circle(im1,p1[i],3,Scalar(0,255,0),2);
// cv::circle(im2,p2[i],3,Scalar(0,255,0),2);
// }
// imwrite("im1.jpg",im1);
// imwrite("im2.jpg",im2);
}
void getR_T(vector<Point2f> p1,vector<Point2f> p2,Mat cam,Mat &R,Mat &t)//这里的t是把左像当作世界坐标系的源
{
// Mat R1,R2;
Mat E=findEssentialMat(p1,p2,cam);
recoverPose(E,p1,p2,cam,R,t);
// decomposeEssentialMat(E,R1,R2,t);
// R=R1.clone();
// t=-t.clone();//R*t才是两相机坐标系之间的平移。
}
void rectified_image(const Mat im1,const Mat im2,Mat cam,Mat dis,Mat R,Mat t,Mat &rectified_L,Mat &rectified_R)
{
Mat R1,R2,P1,P2,Q;
stereoRectify(cam,dis,cam,dis,im1.size(),R,-R*t,R1,R2,P1,P2,Q);//t是相机坐标系之间的平移
Mat mapx1,mapy1,mapx2,mapy2;
initUndistortRectifyMap(P1(Rect(0,0,3,3)),dis,R1,P1(Rect(0,0,3,3)),im1.size(),CV_32FC1,mapx1,mapy1);
initUndistortRectifyMap(P2(Rect(0,0,3,3)),dis,R2,P2(Rect(0,0,3,3)),im2.size(),CV_32FC1,mapx2,mapy2);
remap(im1, rectified_L, mapx1, mapy1, CV_INTER_LINEAR);
remap(im2, rectified_R, mapx2,mapy2,CV_INTER_LINEAR);
imwrite("rectified_left_img.pgm",rectified_L);
imwrite("rectified_right_img.pgm",rectified_R);
}
Mat generateDisparityMap(Mat& left, Mat& right) {
Mat lb, rb;
if (left.empty() || right.empty())
return left;
// convert images to grayscale
if (left.dims==3 || right.dims==3)
{
cvtColor(left, lb, CV_BGR2GRAY);
cvtColor(right, rb, CV_BGR2GRAY);
}
else
{
lb=left.clone();
rb=right.clone();
}
const Size imsize = lb.size();
const int32_t dims[3] = {imsize.width,imsize.height,imsize.width};
Mat leftdpf = Mat::zeros(imsize, CV_32F);
Mat rightdpf = Mat::zeros(imsize, CV_32F);
Elas::parameters param;
param.postprocess_only_left = true;
Elas elas(param);
elas.process(lb.data,rb.data,leftdpf.ptr<float>(0),rightdpf.ptr<
float>(0),dims);
// normalize disparity values between 0 and 255
int max_disp = -1;
for (int i = 0; i < imsize.width; i++) {
for (int j = 0; j < imsize.height; j++) {
if (leftdpf.at<uchar>(j,i) > max_disp) max_disp = leftdpf.at<uchar>(j,i);
}
}
for (int i = 0; i < imsize.width; i++) {
for (int j = 0; j < imsize.height; j++) {
leftdpf.at<uchar>(j,i) =
(int)max(255.0*(float)leftdpf.at<uchar>(j,i)/max_disp,0.0);
}
}
Mat show = Mat(left.rows, left.cols, CV_8UC1, Scalar(0));
leftdpf.convertTo(show, CV_8U, 5.);
return show;
}
int main()
{
Mat Cam;
Mat dis;
vector<cv::String> filename;
glob("1/*.jpg",filename);
camera_params(filename,9,6,Cam,dis);
Mat im1=imread("1.jpg",0);
Mat im2=imread("2.jpg",0);
Mat im1U,im2U;
undistort(im1,im1U,Cam,dis);//去畸变
undistort(im2,im2U,Cam,dis);
vector<Point2f> p1;
vector<Point2f> p2;
// LK_points_pairs(im1U,im2U,p1,p2);//这个环节,如果错误的匹配多,那么E就出错了
SURF_points_pairs(im1U,im2U,p1,p2);
Mat R,t;
getR_T(p1,p2,Cam,R,t);
Mat rectified_left_img,rectified_right_img;
rectified_image(im1U,im2U,Cam,dis,R,t,rectified_left_img,rectified_right_img);
// SURF_points_pairs(rectified_left_img,rectified_right_img,p1,p2);
//用elas 计算视差图
Mat img_disp = generateDisparityMap(rectified_left_img, rectified_right_img);
imwrite("im_disp.pgm",img_disp);
return 0;
}
参考学习网站:http://www.cvlibs.net/projects.php