寻找关键点,计算描述子,寻找匹配点
利用对极几何计算本质矩阵,基础矩阵,单应矩阵
8点法求解本质矩阵(基础矩阵)
大于8点时可采用最小二乘法、随机采样一致性(RANSAC)
存在误匹配时倾向使用RANSAC
利用SVD(奇异值分解)求得旋转矩阵和平移矩阵
数值法、分析法分解单应矩阵求得R、t
验证本质矩阵,对极约束
相机内参数矩阵、焦距、光心已知
因为对极约束的尺度特性,真实的地图通常与计算出的相差一个非0常数倍
因为本质矩阵为平移矩阵叉乘旋转矩阵求得,当相机没有做平移运动时,对极几何无法应用
分析对极约束结果可知,算法精度不高
//
// Created by wcm on 2020/6/6.
//
#include
#include
#include
#include
#include
using namespace std;
using namespace cv;
/*********************************
*本程序演示了如何使用2D-2D的特征匹配估计相机运动
***********************************/
void find_feature_matches(
const Mat &img_1,const Mat &img_2,
std::vector<KeyPoint> &keypoints_1,
std::vector<KeyPoint> &keypoints_2,
std::vector<DMatch> &matches
);
void pose_estimation_2d2d(
std::vector<KeyPoint> keypoints_1,
std::vector<KeyPoint> keypoints_2,
std::vector<DMatch> matches,
Mat &R, Mat &t
);
//像素坐标转化为相机归一化坐标
Point2d pixel2cam(const Point2d &p, const Mat &K);
int main( ){
//--读取图像
Mat img_1=imread("/home/automobile/wcm/slambook2/ch7/1.png", CV_LOAD_IMAGE_COLOR);
Mat img_2=imread("/home/automobile/wcm/slambook2/ch7/2.png", CV_LOAD_IMAGE_COLOR);
assert(img_1.data && img_2.data && "Cannot load images");
//--寻找关键点,计算描述子,匹配关键点
vector<KeyPoint> keypoints_1, keypoints_2;
vector<DMatch> matches;
find_feature_matches(img_1, img_2, keypoints_1, keypoints_2, matches);
cout << " 一共找到了 " << matches.size() << " 组匹配点 " << endl;
//--估计两张图像间运动
//R=R21
Mat R, t;
pose_estimation_2d2d(keypoints_1, keypoints_2, matches, R, t);
//--验证E=t^R*scale
//首先声明一个3*3的空矩阵,然后根据t求取t的反对称矩阵t^
//at(i,j)函数表示像素点的位置坐标
Mat t_x=(
Mat_<double> (3,3) << 0, -t.at<double>(2, 0), t.at<double>(1, 0),
t.at<double>(2,0), 0, -t.at<double>(0,0),
-t.at<double>(1,0), t.at<double>(0,0), 0
);
cout << " t^R = " << endl << t_x * R << endl;
//--验证对极约束
//首先给出相机内参数矩阵K,然后将像素坐标转换为归一化坐标
//将坐标转换为矩阵,验证极限约束表达式是否为0
//.t()表示转秩
Mat K = (Mat_<double>(3,3) << 520.9, 0, 325.1, 0, 521.0, 249.7, 0, 0, 1);//给相机内参数矩阵赋值
for(DMatch m:matches){
Point2d pt1 = pixel2cam(keypoints_1[m.queryIdx].pt, K);
Mat y1 = (Mat_<double>(3,1) << pt1.x, pt1.y, 1);
Point2d pt2 = pixel2cam(keypoints_2[m.trainIdx].pt, K);
Mat y2 = (Mat_<double>(3, 1) << pt2.x, pt2.y, 1);
Mat d = y2.t() * t_x * R *y1;
cout << " epipolar constraint = " << d <<endl;
}
return 0;
}
void find_feature_matches(const Mat &img_1, const Mat &img_2,
std::vector<KeyPoint> &keypoints_1,
std::vector<KeyPoint> &keypoints_2,
std::vector<DMatch> &matches){
//--初始化
Mat descriptors_1, descriptors_2;
//used in OpenCV3
Ptr<FeatureDetector> detector = ORB::create();
Ptr<DescriptorExtractor> descriptor = ORB::create();
Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
//--第一步:检测Oriented FAST角点位置
detector->detect(img_1, keypoints_1);
detector->detect(img_2, keypoints_2);
//--第二步:根据角点位置计算BRIEF描述子
descriptor->compute(img_1, keypoints_1, descriptors_1);
descriptor->compute(img_2, keypoints_2, descriptors_2);
//--第三步:对两幅图像中的BRIEF描述子进行匹配,使用Hamming距离
vector<DMatch> match;
//BFMatcher matcher(NORM_HAMMING);
matcher->match(descriptors_1, descriptors_2, match);
//--第四步:匹配点筛选
double min_dist = 10000, max_dist = 0;
//找出所有匹配之间的最小距离和最大距离,既是最相似和最不相似的两组点之间的距离
for (int i = 0; i < descriptors_1.rows; i++){
double dist = match[i].distance;
if (dist < min_dist) min_dist = dist;
if (dist > max_dist) max_dist = dist;
}
printf(" --Max dist : %f \n", max_dist);
printf(" --Min dist : %f \n", min_dist);
//当描述子之间距离大于两倍最小距离时,即认为匹配有误。但有时候最小距离会非常小,设置一个经验值30作为下限
for (int i = 0; i < descriptors_1.rows; i++){
if (match[i].distance <= max(2 * min_dist, 30.0)){
matches.push_back(match[i]);
}
}
}
//像素坐标与相机坐标转换
Point2d pixel2cam(const Point2d &p, const Mat &K) {
return Point2d(
(p.x - K.at<double>(0, 2)) / K.at<double>(0, 0),
(p.y - K.at<double>(1, 2)) / K.at<double>(1, 1)
);
}
void pose_estimation_2d2d(
std::vector<KeyPoint> keypoints_1,
std::vector<KeyPoint> keypoints_2,
std::vector<DMatch> matches,
Mat &R, Mat &t
){
//相机内参。TUM Freiburg2
Mat K = (Mat_<double>(3, 3) << 520.9, 0, 325.1, 0, 521.0, 249.7, 0, 0, 1);
//--把匹配点转换成vector形式
vector<Point2f> points1;
vector<Point2f> points2;
for (int i = 0; i < (int) matches.size(); i++){
points1.push_back(keypoints_1[matches[i].queryIdx].pt);
points2.push_back(keypoints_2[matches[i].queryIdx].pt);
}
//--计算基础矩阵
//使用像素坐标计算基础矩阵
//CV_FM_8POINT采用8点法求解基础矩阵
Mat fundamental_matrix;
fundamental_matrix = findFundamentalMat(points1, points2, CV_FM_8POINT);
cout << " fundamental_matrix is " << endl << fundamental_matrix <<endl;
//--计算本质矩阵
//使用归一化坐标计算本质矩阵,需要知道相机的光心,焦距
Point2d principal_point(325.1, 249.7); //相机光心,TUM dataset标定值
double focal_length = 521; //相机焦距,TUM dataset标定值
Mat essential_matrix;
essential_matrix = findEssentialMat(points1, points2, focal_length, principal_point);
cout << "essential_Matrix is " << endl <<essential_matrix <<endl;
//--计算单应矩阵
//--但是本例中场景不是平面,单应矩阵意义不大
Mat homography_matrix;
homography_matrix = findHomography(points1, points2, RANSAC, 3);
cout << "homography_matrix is " << endl << homography_matrix << endl;
//--从本质矩阵中恢复旋转和平移信息
//SVD奇异值分解
//此函数仅在OpenCV3中提供
recoverPose(essential_matrix, points1, points2, R, t, focal_length, principal_point);
cout << "R is " << endl << R << endl;
cout << "t is " << endl << t <<endl;
}
[ INFO:0] Initialize OpenCL runtime...
--Max dist : 95.000000
--Min dist : 7.000000
一共找到了 81 组匹配点
fundamental_matrix is
[1.455900569774558e-07, 4.172375256092383e-07, -0.000247367342610671;
1.984383403826e-06, 1.639671857682763e-05, -0.004365075631543996;
-0.0004620281656059453, -0.003641645418601539, 1]
essential_Matrix is
[0.3016723863865509, 0.6285032399186232, 0.02435146277233041;
-0.6010017169873993, 0.3163461564700642, -0.1572219925828741;
-0.1542190765679919, -0.05131484904355308, -0.03116100395886274]
homography_matrix is
[-0.7483500806577918, -0.06258427032975751, 325.3185890263245;
-0.238998269067723, -0.01160828848589558, 102.8107936795173;
-0.002274322034400319, -0.0002520331084630441, 1]
R is
[-0.8986890561148634, 0.433848611244593, -0.06429123533199321;
-0.4137934068328298, -0.8873149959911232, -0.2035856437741956;
-0.1453719260416433, -0.1563569007444517, 0.9769439711204183]
t is
[-0.1636120714646943;
0.1673516687283813;
-0.9722265728958749]
t^R =
[-0.4266291802213185, -0.8888378058883485, -0.03443816891625309;
0.8499447791730966, -0.4473810248845647, 0.2223454742140221;
0.2180987096591085, 0.07257015546852119, 0.04406831441578762]
epipolar constraint = [-0.0753274369265802]
epipolar constraint = [-0.0197752973799656]
epipolar constraint = [-0.1046929147455941]
epipolar constraint = [0.0005126175133886024]
epipolar constraint = [-0.1213941807408835]
epipolar constraint = [0.02403999634887131]
epipolar constraint = [-0.1671191278662195]
epipolar constraint = [-0.1654927648396349]
epipolar constraint = [0.08008318133752024]
epipolar constraint = [0.07987401303791081]
epipolar constraint = [0.02004886716519435]
epipolar constraint = [-0.1295686192381137]
epipolar constraint = [0.04728527035355962]
epipolar constraint = [0.07956578239334794]
epipolar constraint = [-0.02010893812539163]
epipolar constraint = [0.02535571662279874]
epipolar constraint = [-0.0725455187807351]
epipolar constraint = [-0.0648802293330764]
epipolar constraint = [-0.1432906102530886]
epipolar constraint = [0.0795074765672304]
epipolar constraint = [0.04698029361071185]
epipolar constraint = [0.02043948672883346]
epipolar constraint = [0.07887714352210105]
epipolar constraint = [0.02409017705086632]
epipolar constraint = [-0.118665570392088]
epipolar constraint = [4.975159681777641e-05]
epipolar constraint = [-0.02070309538864171]
epipolar constraint = [0.09482041447460186]
epipolar constraint = [0.08182358399842843]
epipolar constraint = [0.02426878878025789]
epipolar constraint = [0.02338380562908568]
epipolar constraint = [0.04598268064876031]
epipolar constraint = [0.02001780253126184]
epipolar constraint = [-0.0003069505996259048]
epipolar constraint = [-0.1220772359154428]
epipolar constraint = [0.08325145780153809]
epipolar constraint = [0.08236049675707369]
epipolar constraint = [0.07869747038600788]
epipolar constraint = [-0.07990720858727506]
epipolar constraint = [0.04564596841157867]
epipolar constraint = [0.02513498140701478]
epipolar constraint = [0.02337542451177792]
epipolar constraint = [-0.07295260521777472]
epipolar constraint = [-0.1466903373879551]
epipolar constraint = [-0.1395273996603885]
epipolar constraint = [-0.01947703069502896]
epipolar constraint = [-0.001141843294882525]
epipolar constraint = [-0.002456622453787444]
epipolar constraint = [-0.0732793545158362]
epipolar constraint = [0.01973318595401235]
epipolar constraint = [-0.08083851314227503]
epipolar constraint = [0.02354058695529868]
epipolar constraint = [-0.01980839500064362]
epipolar constraint = [0.0456828129048461]
epipolar constraint = [-0.06631671784511953]
epipolar constraint = [-0.001834739444027178]
epipolar constraint = [-0.07437755472915204]
epipolar constraint = [-0.0001807371694217824]
epipolar constraint = [0.04540427905859452]
epipolar constraint = [-0.02005764036946135]
epipolar constraint = [-0.08144013822751789]
epipolar constraint = [0.02050452810374639]
epipolar constraint = [0.1232878056105139]
epipolar constraint = [-0.06540835801465628]
epipolar constraint = [0.04340646585659767]
epipolar constraint = [-0.01932535269732853]
epipolar constraint = [-0.05191972230379729]
epipolar constraint = [0.1275598467816627]
epipolar constraint = [0.02231074084289995]
epipolar constraint = [0.129278607563628]
epipolar constraint = [0.1266038441333597]
epipolar constraint = [-0.02094436566004827]
epipolar constraint = [0.0418862919457927]
epipolar constraint = [0.02177178826535921]
epipolar constraint = [-0.08173698020641162]
epipolar constraint = [0.04301362181850519]
epipolar constraint = [-0.06698270016010799]
epipolar constraint = [0.02743731289131052]
epipolar constraint = [0.000436628349686885]
epipolar constraint = [0.04659252356669603]
epipolar constraint = [0.1120717034333355]
Process finished with exit code 0