void KLTFeatureTracker::TrackImage(const cv::Mat& image,
std::vector* tracked_pts,
std::vector* tracked_pt_ids,
std::vector* lost_pt_ids,
std::set* new_pt_ids) {
tracked_pts->clear();
tracked_pt_ids->clear();
if (lost_pt_ids != nullptr) { lost_pt_ids->clear(); }
if (new_pt_ids != nullptr) { new_pt_ids->clear(); }
// 1. Track features from the last frame.
//1.从上一帧跟踪特征
if (!last_image_.empty() && !last_pt_ids_.empty()) {
std::vector track_cv_pts;
std::vector track_status;
std::vector error;
// Forward track. 前向跟踪
/******************************************************************************************/
//使用带金字塔的迭代Lucas-Kanade方法计算稀疏特征集的光流
//输入参数:第一帧输入图像;第二帧输入图像;发现光流的2D坐标点;输出2D点;输出向量误差;每个金子塔
//搜索窗口的大小;
cv::calcOpticalFlowPyrLK(last_image_, image, last_cv_pts_, track_cv_pts, track_status, error,
cv::Size(51, 51), 5,
cv::TermCriteria(cv::TermCriteria::COUNT+cv::TermCriteria::EPS, 30, 0.01));
// Backward track. 后向跟踪
std::vector bacK_track_cv_pts = last_cv_pts_;
std::vector back_track_status;
std::vector back_error;
cv::calcOpticalFlowPyrLK(image, last_image_, track_cv_pts, bacK_track_cv_pts, back_track_status, back_error,
cv::Size(51, 51), 5,
cv::TermCriteria(cv::TermCriteria::COUNT+cv::TermCriteria::EPS, 30, 0.01),
cv::OPTFLOW_USE_INITIAL_FLOW);
std::vector prev_points; //先前点
std::vector cur_points;//当前点
std::vector cur_ids;
for (size_t i = 0; i < track_cv_pts.size(); ++i) {
const auto& pt_id = last_pt_ids_[i];
//status=0为找到对应的特征点
if (track_status[i] == 0 || back_track_status[i] == 0) {
if (lost_pt_ids != nullptr) { lost_pt_ids->push_back(pt_id); }
continue;
}
prev_points.push_back(last_cv_pts_[i]);
cur_points.push_back(track_cv_pts[i]);
cur_ids.push_back(pt_id);
}
if (!cur_points.empty()) {
// Remove with F.
//使用基础矩阵中RANSAC消除匹配不对的特征点
std::vector f_status;
const double f_thresh = 0.5;
cv::findFundamentalMat(cur_points, prev_points, cv::FM_RANSAC, f_thresh, 0.999, f_status);
for (size_t i = 0; i < cur_points.size(); ++i) {
if (!f_status[i]) {
continue;
}
tracked_pts->emplace_back(cur_points[i].x, cur_points[i].y);
tracked_pt_ids->push_back(cur_ids[i]);
}
}
}
// 2. Create new features. 创建新的特征
const int num_new_pts = config_.max_num_corners - tracked_pt_ids->size();
if (num_new_pts > 0) {
cv::Mat mask;
CreateMask(image.cols, image.rows, *tracked_pts, &mask);
std::vector new_pts;
//GFTT
cv::goodFeaturesToTrack(image, new_pts, num_new_pts, config_.quality_level, config_.min_dist, mask);
for (const auto& pt : new_pts) {
const auto id = corner_id_;
++corner_id_;
if (new_pt_ids != nullptr) { new_pt_ids->insert(id); }
tracked_pts->emplace_back(pt.x, pt.y);
tracked_pt_ids->push_back(id);
}
}
// 3. Reset last frame data. 重置上一帧数据
last_image_ = image;
last_pt_ids_ = *tracked_pt_ids;
last_cv_pts_.clear();
last_cv_pts_.reserve(tracked_pts->size());
for (size_t i = 0; i < tracked_pts->size(); ++i) {
last_cv_pts_.emplace_back(tracked_pts->at(i)[0], tracked_pts->at(i)[1]);
}
}
void KLTFeatureTracker::CreateMask(const int width, const int heigth,
const std::vector& points, cv::Mat* mask) {
*mask = cv::Mat(heigth, width, CV_8UC1, cv::Scalar(1));
for (const auto& pt : points) {
cv::circle(*mask, cv::Point2i(pt[0], pt[1]), config_.min_dist, 0, -1);//0是啥颜色?
}
}
更新视觉状态
/******************************更新视觉状态*************************************/
/*输入:图像;边缘化旧的;跟踪的特征点;跟踪特征点的索引;丢失特征点索引;新的特征点索引;
输出:状态;跟踪的特征点;新的特征点;地图点(landmark)
*/
void VisualUpdater::UpdateState(const cv::Mat& image, const bool marg_oldest,
const std::vector& tracked_pts,
const std::vector& tracked_pt_ids,
const std::vector& lost_pt_ids,
const std::set& new_pt_ids,
State* state,
std::vector* tracked_features,
std::vector* new_features,
std::vector* map_points) {
// Collect features for observation. 收集特征为了观测
*tracked_features = tracked_pts;
for (size_t i = 0; i < tracked_pt_ids.size(); ++i) {
const long long pt_id = tracked_pt_ids[i];
if (new_pt_ids.count(pt_id) > 0) {
new_features->push_back(tracked_pts[i]);
}
}
// Assuming a new clone has been inserted into the sliding window.
//假设新的关键帧插入到滑动窗口
//camera_frames:相机的位姿
CameraFramePtr new_cam_frame = state->camera_frames.back();
// Insert tracked features to frame. 向帧中插入跟踪的特征
for (size_t i = 0; i < tracked_pt_ids.size(); ++i) {
const long int ft_id = tracked_pt_ids[i];
const Eigen::Vector2d& pt = tracked_pts[i];
// Insert feature to camera frame.往相机帧中插入特征
new_cam_frame->id_pt_map[ft_id] = pt;
}
/// Collect features use to update. 收集特征用于更新
// 1. Tracked lost features. 跟踪丢失的特征点
std::unordered_set lost_ft_ids_set(lost_pt_ids.begin(), lost_pt_ids.end());
// 2. We always marginalize the last camera frame. So we need to collect all features in the last frame.
//对上一帧进行边缘化,在上一帧中收集所有的特征
if (marg_oldest) {
const CameraFramePtr marg_cam = state->camera_frames.front();
for (const auto& id_pt : marg_cam->id_pt_map) {
lost_ft_ids_set.insert(id_pt.first);
}
}
// Collect image point & camera state pairs.
//收集图像点&相机状态对
std::vector>> features_obs;
features_obs.reserve(lost_ft_ids_set.size());
for (const long int id : lost_ft_ids_set) {
std::vector> one_feature;
for (const auto cam_fm : state->camera_frames) {
const auto iter = cam_fm->id_pt_map.find(id);
if (iter == cam_fm->id_pt_map.end()) { continue; }
const Eigen::Vector2d& im_pt = iter->second;
one_feature.emplace_back(im_pt, cam_fm);
//特征点的坐标和相机位姿
}
if (one_feature.size() < 3) { continue; }
features_obs.push_back(one_feature);
}
const int state_size = state->covariance.rows();
/// Triangulate points.
map_points->clear();
map_points->reserve(features_obs.size());
std::vector features_full_obs;
// 特征观测数据结构:FeatureObservation
/*struct FeatureObservation {
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
std::vector camera_frame; 相机位姿
std::vector im_pts;特征点坐标
Eigen::Vector3d G_p;路标点
}; */
features_full_obs.reserve(features_obs.size());
for (const std::vector>& one_ft_obs : features_obs) {
FeatureObservation one_feaute;
one_feaute.im_pts.reserve(one_ft_obs.size());
one_feaute.camera_frame.reserve(one_ft_obs.size());
std::vector G_R_Cs;
std::vector G_p_Cs;
for (const auto& one_obs : one_ft_obs) {
G_R_Cs.push_back(one_obs.second->G_R_C);
G_p_Cs.push_back(one_obs.second->G_p_C);
one_feaute.im_pts.push_back(one_obs.first);
one_feaute.camera_frame.push_back(one_obs.second);
}
// Check camera distance. 检查三角化相机距离
if ((one_feaute.camera_frame.front()->G_p_C - one_feaute.camera_frame.back()->G_p_C).norm() <
config_.min_cam_dist_to_triangulate) {
continue;
}
if (!triangulator_->Triangulate(G_R_Cs, G_p_Cs, one_feaute.im_pts, &one_feaute.G_p)) { continue; }
features_full_obs.push_back(one_feaute);
map_points->push_back(one_feaute.G_p);//得到地图点
}
// Compute Jacobian. 计算雅可比矩阵
Eigen::MatrixXd H;
Eigen::VectorXd r;
ComputeVisualResidualJacobian(features_full_obs, state_size, &r, &H);
//计算窗口的长度
const double window_length = (state->camera_frames.front()->G_p_C - state->camera_frames.back()->G_p_C).norm();
if (H.rows() > config_.min_res_size && window_length > config_.min_window_length) {
// Compress measurement.
Eigen::MatrixXd H_cmp;
Eigen::VectorXd r_cmp;
CompressMeasurement(H, r, &H_cmp, &r_cmp);
/// EKF update.
Eigen::MatrixXd V(H_cmp.rows(), H_cmp.rows());
V.setIdentity();
V = V.eval() * config_.visual_noise;//.val()函数解决矩阵乘法混淆问题
//EKF更新
EKFUpdate(H_cmp, r_cmp, V, state);
}
// Remove use/lost features. 消除丢失的特征
RemoveUsedFeatures(lost_ft_ids_set, state);
}
三角化
疑问:G_R_Cs中的G代表什么?全局坐标系还是GPS坐标系
bool Triangulator::Triangulate(const std::vector& G_R_Cs,
const std::vector& G_p_Cs,
const std::vector& im_pts,
Eigen::Vector3d* G_p) {
// Convert camera pose. 转换成相机位姿
std::vector C_R_Gs;
std::vector C_p_Gs;
C_R_Gs.reserve(G_R_Cs.size());
C_p_Gs.reserve(G_p_Cs.size());
for (size_t i = 0; i < G_R_Cs.size(); ++i) {
const Eigen::Matrix3d& G_R_C = G_R_Cs[i];
const Eigen::Vector3d& G_p_C = G_p_Cs[i];
const Eigen::Matrix3d C_R_G = G_R_C.transpose();//旋转矩阵中A^T=A
const Eigen::Vector3d C_p_G = -C_R_G * G_p_C;
C_R_Gs.push_back(C_R_G);
C_p_Gs.push_back(C_p_G);
}
// Project image points to NSP. 投影到图像的点到NSP
std::vector NSP_pts;
NSP_pts.reserve(im_pts.size());
for (size_t i = 0; i < im_pts.size(); ++i) {
const Eigen::Vector2d& im_pt = im_pts[i];
const Eigen::Vector3d NSP_pt = camera_->ImageToCamera(im_pt, 1.);
NSP_pts.push_back(NSP_pt.head<2>());
}
// Linear triangulation. 线性三角化
if (!TriangulateDLT(C_R_Gs, C_p_Gs, NSP_pts, G_p)) {
LOG(WARNING) << "[Triangulate]: Failed to triangulate point using the DLT method.";
return false;
}
// Non-linear refinement. 非线性提纯
if (!RefineGlobalPoint(camera_, C_R_Gs, C_p_Gs, im_pts, G_p)) {
return false;
}
// Reject if the projection error is too large.
//如果重投影误差太大则拒绝
for (size_t i = 0; i < C_R_Gs.size(); ++i) {
const Eigen::Matrix3d& C_R_G = C_R_Gs[i];
const Eigen::Vector3d& C_p_G = C_p_Gs[i];
const Eigen::Vector3d C_p = C_R_G * *G_p + C_p_G;
if (C_p[2] < config_.min_dist || C_p[2] > config_.max_dist) {
return false;
}
Eigen::Vector2d exp_I_p;
//相机坐标->像素坐标
if (!camera_->CameraToImage(C_p, &exp_I_p)) {
LOG(WARNING) << "[Triangulate]: Failed to project refined triangulated point to image.";
return false;
}
//计算重投影误差
const double error = (im_pts[i] - exp_I_p).norm();
if (error > config_.max_proj_res) {
LOG(WARNING) << "[Triangulate]: Too large projection error: " << std::fixed << error;
return false;
}
}
return true;
}
[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-NhvWDtIC-1613482206303)(/home/sunshine/.config/Typora/typora-user-images/image-20210215175931704.png)]
图像->相机坐标
Eigen::Vector3d PinholeRadTanCamera::ImageToCamera(const Eigen::Vector2d& I_p, const double z) const {
const Eigen::Vector2d D_p = ImageToDistortion(I_p);
const Eigen::Vector2d N_p = DistortionToNormalized(D_p);
return NormalizedToCamera(N_p, z);
}
// Project the 2D image points to the 3D points in the camera.
//得到相机坐标系下的坐标(X,Y)
//像素坐标->畸变坐标
Eigen::Vector2d PinholeRadTanCamera::ImageToDistortion(const Eigen::Vector2d& I_p) const {
const double x_dist = (I_p[0] - cx_) / fx_;
const double y_dist = (I_p[1] - cy_) / fy_;
return Eigen::Vector2d(x_dist, y_dist);
}
//畸变校正后的坐标->归一化坐标
Eigen::Vector2d PinholeRadTanCamera::DistortionToNormalized(const Eigen::Vector2d& D_p) const {
const int max_niter = 10;
const double epsilon = 1e-12;
const double x_dist = D_p[0];
const double y_dist = D_p[1];
double x = x_dist;
double y = y_dist;
int cnt = 0;
double error = std::numeric_limits::max();
while ((cnt++) < max_niter && error > epsilon) {
// Update x, y.
const double r2 = x * x + y * y;
const double icdist = 1. / (1. + ((k3_ * r2 + k2_) * r2 + k1_) * r2);
const double delta_x = 2. * p1_ * x * y + p2_ * (r2 + 2. * x * x);
const double delta_y = p1_ * (r2 + 2. * y * y) + 2. * p2_ * x * y;
x = (x_dist - delta_x) * icdist;
y = (y_dist - delta_y) * icdist;
// Compute error. 计算残差,确实存在误差
const Eigen::Vector2d D_p_predict = NormalizedToDistortion(Eigen::Vector2d(x, y));
error = (D_p - D_p_predict).norm();
}
return Eigen::Vector2d(x, y);
}
[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-rLM333Tj-1613482206305)(/home/sunshine/.config/Typora/typora-user-images/image-20210215194738896.png)]
归一化坐标->畸变校正后的坐标==(主要为了验证)==
Eigen::Vector2d PinholeRadTanCamera::NormalizedToDistortion(const Eigen::Vector2d& N_p,
Eigen::Matrix2d* J_dist_wrt_norm) const {
const double x = N_p[0];
const double y = N_p[1];
// Normalized to distortion.
const double r2 = x * x + y * y;
const double r4 = r2 * r2;
const double r6 = r2 * r4;
const double a1 = 2. * x * y;
const double a2 = r2 + 2. * x * x;
const double a3 = r2 + 2. * y * y;
const double radial_factor = 1. + k1_*r2 + k2_*r4 + k3_*r6;
const double x_dist = radial_factor * x + p1_ * a1 + p2_ * a2;
const double y_dist = radial_factor * y + p1_ * a3 + p2_ * a1;
// Compute jacobian. 雅可比的作用?
if (J_dist_wrt_norm) {
const double J_factor = 2. * k1_ + 4 * k2_ * r2 + 6. * k3_ * r4;
const double x2 = x * x;
const double y2 = y * y;
const double J_xdist_wrt_xnorm = radial_factor + x2 * J_factor + 2. * p1_ * y + 6. * p2_ * x;
const double J_xdist_wrt_ynorm = x * y * J_factor + 2. * p1_ * x + 2. * p2_ * y;
const double J_ydist_wrt_xnorm = J_xdist_wrt_ynorm;
const double J_ydist_wrt_ynorm = radial_factor + y2 * J_factor + 6. * p1_ * y + 2. * p2_ * x;
*J_dist_wrt_norm << J_xdist_wrt_xnorm, J_xdist_wrt_ynorm,
J_ydist_wrt_xnorm, J_ydist_wrt_ynorm;
}
return Eigen::Vector2d(x_dist, y_dist);
}
归一化坐标到相机坐标
Eigen::Vector3d PinholeRadTanCamera::NormalizedToCamera(const Eigen::Vector2d& N_p, const double z) const {
const double x = N_p[0] * z;
const double y = N_p[1] * z;
return Eigen::Vector3d(x, y, z);
}
三角化的方法(DLT变换实现)(归一化坐标->相机坐标)
bool TriangulateDLT(const std::vector& C_R_Gs,
const std::vector& C_p_Gs,
const std::vector& NSP_points,//归一化坐标
Eigen::Vector3d* G_p) {
const size_t n_obs = C_R_Gs.size();
Eigen::MatrixXd A(2 * n_obs, 4);
Eigen::Matrix P1, P2, P3;
size_t idx = 0;
for (size_t i = 0; i < n_obs; ++i) {
const auto& pt = NSP_points[i];
const double x = pt[0];
const double y = pt[1];
const auto& R = C_R_Gs[i];
const auto& p = C_p_Gs[i];
P1 << R.block<1, 3>(0, 0), p(0);
P2 << R.block<1, 3>(1, 0), p(1);
P3 << R.block<1, 3>(2, 0), p(2);
A.block<1, 4>(idx, 0) = x * P3 - P1;
++idx;
A.block<1, 4>(idx, 0) = y * P3 - P2;
++idx;
}
// Solve Ax = 0.
const Eigen::Matrix4d H = A.transpose() * A;
//求特征值
const Eigen::SelfAdjointEigenSolver eigen_solver(H);
if (eigen_solver.info() != Eigen::Success) {
LOG(WARNING) << "[TriangulateDLT]: Failed to compute eigenvector of H.";
return false;
}
const Eigen::Vector4d X = eigen_solver.eigenvectors().leftCols<1>();
if (std::abs(X[3]) < 1e-12) {
LOG(WARNING) << "[TriangulateDLT]: X[3] is too small!";
return false;
}
//相机坐标
*G_p = X.topRows<3>() / X[3];
return true;
}
[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-UsSdyEKu-1613482206306)(/home/sunshine/.config/Typora/typora-user-images/image-20210215203153138.png)]
通过调用ceres库进行优化得到优化后的相机坐标
bool RefineGlobalPoint(const Camera::CameraPtr camera,
const std::vector& C_R_Gs,
const std::vector& C_p_Gs,
const std::vector& im_points,
Eigen::Vector3d* G_p) {
// Build the problem.
ceres::Problem problem;
// Add cost functions.
for (size_t i = 0; i < C_R_Gs.size(); ++i) {
ceres::CostFunction* cost_func = new ProjectionCostFunc(camera, C_R_Gs[i], C_p_Gs[i], im_points[i]);
problem.AddResidualBlock(cost_func, new ceres::HuberLoss(std::sqrt(5.99)), G_p->data());
}
// Run the solver!
ceres::Solver::Options options;
options.linear_solver_type = ceres::DENSE_NORMAL_CHOLESKY;
options.trust_region_strategy_type = ceres::LEVENBERG_MARQUARDT;
options.max_num_iterations = 10;
options.minimizer_progress_to_stdout = false;
ceres::Solver::Summary summary;
ceres::Solve(options, &problem, &summary);
if (summary.termination_type == ceres::FAILURE) { return false; }
return true;
}
计算视觉残差的雅可比矩阵
void VisualUpdater::ComputeVisualResidualJacobian(const std::vector& features_full_obs,
const int state_size,
Eigen::VectorXd* res,
Eigen::MatrixXd* Jacobian) {
std::vector r_bars;
std::vector H_bars;
r_bars.reserve(features_full_obs.size());
H_bars.reserve(features_full_obs.size());
int num_rows = 0;
//C++新标准
for (const FeatureObservation& one_feature_obs : features_full_obs) {
Eigen::VectorXd res;
Eigen::MatrixXd Hx;
Eigen::Matrix Hf;
ComputeOnePointResidualJacobian(one_feature_obs, state_size, &res, &Hx, &Hf);
if (res.size() < 4) { continue; }
// Left projection. 左投影
Eigen::MatrixXd H_bar;
Eigen::VectorXd r_bar;
LeftNullspaceProjection(Hx, Hf, res, &H_bar, &r_bar);
H_bars.push_back(H_bar);
r_bars.push_back(r_bar);
num_rows += H_bar.rows();
}
// Stack to big H and r matrices.
//存储大H和r
res->resize(num_rows);
Jacobian->resize(num_rows, state_size);
int row_idx = 0;
for (size_t i = 0; i < H_bars.size(); ++i) {
const int one_num_rows = H_bars[i].rows();
//残差
res->segment(row_idx, one_num_rows) = r_bars[i];
//雅可比
Jacobian->block(row_idx, 0, one_num_rows, state_size) = H_bars[i];
row_idx += one_num_rows;
}
}
计算一个点的残差雅可比矩阵
void VisualUpdater::ComputeOnePointResidualJacobian(const FeatureObservation& one_feature_obs,
const int state_size,
Eigen::VectorXd* residual,
Eigen::MatrixXd* Hx,
Eigen::Matrix* Hf) {
const Eigen::Vector3d& G_p = one_feature_obs.G_p;
const std::vector& camera_frame = one_feature_obs.camera_frame;
const std::vector& im_pts = one_feature_obs.im_pts;
std::vector residuals;//残差
std::vector> Js_wrt_state;//??????
std::vector> Js_wrt_feature;//???????
for (size_t i = 0; i < camera_frame.size(); ++i) {
int col_idx = camera_frame[i]->state_idx;
const Eigen::Matrix3d& G_R_C = camera_frame[i]->G_R_C;
const Eigen::Vector3d& G_p_C = camera_frame[i]->G_p_C;
const Eigen::Vector2d& im_pt = im_pts[i];
//Bundle Ajustment
Eigen::Vector2d res;
Eigen::Matrix J_wrt_cam_pose; //误差对于相机位姿的雅可比矩阵
Eigen::Matrix J_wrt_Gp;//误差对于空间点的雅可比矩阵
if (!ComputeProjectionResidualJacobian(G_R_C, G_p_C, im_pt, G_p, &res, &J_wrt_cam_pose, &J_wrt_Gp)) {
continue;
}
residuals.push_back(res);//残差
Eigen::Matrix one_J_wrt_state(2, state_size);
one_J_wrt_state.setZero();
one_J_wrt_state.block<2, 6>(0, col_idx) = J_wrt_cam_pose;
Js_wrt_state.push_back(one_J_wrt_state);//误差对相机位姿的雅可比
Js_wrt_feature.push_back(J_wrt_Gp);//误差对路标点的雅可比
}
const int num_rows = Js_wrt_state.size() * 2;
residual->resize(num_rows);
Hx->resize(num_rows, state_size);
Hf->resize(num_rows, 3);
for (size_t i = 0; i < Js_wrt_state.size(); ++i) {
const int row_idx = 2 * i;
residual->segment<2>(row_idx) = residuals[i];
Hx->block(row_idx, 0, 2, state_size) = Js_wrt_state[i];
Hf->block(row_idx, 0, 2, 3) = Js_wrt_feature[i];
}
}
计算重投影残差的雅可比矩阵
bool VisualUpdater::ComputeProjectionResidualJacobian(const Eigen::Matrix3d& G_R_C,
const Eigen::Vector3d& G_p_C,
const Eigen::Vector2d& im_pt,
const Eigen::Vector3d& G_p,
Eigen::Vector2d* res,
Eigen::Matrix* J_wrt_cam_pose,
Eigen::Matrix* J_wrt_Gp) {
const Eigen::Vector3d C_p = G_R_C.transpose() * (G_p - G_p_C);
Eigen::Vector2d exp_im_pt;
Eigen::Matrix J_wrt_Cp;
//相机坐标->像素坐标
if (!camera_->CameraToImage(C_p, &exp_im_pt, &J_wrt_Cp)) {
return false;
}
// Compute residual
*res = im_pt - exp_im_pt;
// Jacobian.
Eigen::Matrix J_Cp_wrt_cam_pose;
J_Cp_wrt_cam_pose << TGK::Util::Skew(C_p), -G_R_C.transpose();//skew表示矩阵的反对称矩阵
const Eigen::Matrix3d J_Cp_wrt_Gp = G_R_C.transpose();
//具体推导没看懂?????????????????????????
*J_wrt_cam_pose = J_wrt_Cp * J_Cp_wrt_cam_pose;
*J_wrt_Gp = J_wrt_Cp * J_Cp_wrt_Gp;
return true;
}
//左零空间投影
void LeftNullspaceProjection(const Eigen::MatrixXd& Hx,
const Eigen::MatrixXd& Hf,
const Eigen::VectorXd& res,
Eigen::MatrixXd* H,
Eigen::VectorXd* r) {
const size_t rows = Hf.rows();
const Eigen::HouseholderQR qr(Hf);
const Eigen::MatrixXd Q = qr.householderQ();
const Eigen::MatrixXd Q2_trans = Q.rightCols(rows - 3).transpose();
*H = Q2_trans * Hx;
*r = Q2_trans * res;
}
//压缩测量值
void CompressMeasurement(const Eigen::MatrixXd& H,
const Eigen::VectorXd& r,
Eigen::MatrixXd* H_cmp,
Eigen::VectorXd* r_cmp) {
if (H.rows() <= H.cols()) {
*H_cmp = H;
*r_cmp = r;
return;
}
Eigen::HouseholderQR qr(H);
Eigen::MatrixXd Q = qr.householderQ();
Eigen::MatrixXd Q1_trans = Q.leftCols(H.cols()).transpose();
*H_cmp = qr.matrixQR().topRows(H.cols()).triangularView();
*r_cmp = Q1_trans * r;
}
EKF更新
void EKFUpdate(const Eigen::MatrixXd& H,
const Eigen::VectorXd& r,
const Eigen::MatrixXd& V,
State* state) {
const Eigen::MatrixXd P_minus = state->covariance;
const Eigen::MatrixXd H_trans = H.transpose();
const Eigen::MatrixXd S = H * P_minus * H_trans + V;
const Eigen::MatrixXd S_inv = S.llt().solve(Eigen::MatrixXd::Identity(S.rows(), S.cols()));
const Eigen::MatrixXd K = P_minus * H_trans * S_inv;
// Compute delta x. 计算delta x
const Eigen::VectorXd delta_x = K * r;
// Update. 更新状态
state->Update(delta_x);
// Update covariance. 更新协方差
const Eigen::MatrixXd I_KH = Eigen::MatrixXd::Identity(state->covariance.rows(), state->covariance.rows()) - K * H;
state->covariance = I_KH * P_minus * I_KH.transpose() + K * V * K.transpose();
state->covariance = state->covariance.eval().selfadjointView();
LimitMinDiagValue(1e-12, &state->covariance);
}
unordered_set的作用: 无序的容器
vector reserve
const auto
for (const FeatureObservation& one_feature_obs : features_full_obs)
.eval()函数解决矩阵乘法的混淆问题