注释代码已公开,欢迎交流~~
注释代码已公开,欢迎交流
其他系列文章地址
class LocalMapping
{
public:
LocalMapping(Map* pMap, const float bMonocular);
void SetLoopCloser(LoopClosing* pLoopCloser);
void SetTracker(Tracking* pTracker);
// Main function
void Run();
void InsertKeyFrame(KeyFrame* pKF);
// Thread Synch
// 当检测到闭环时,请求局部地图停止,防止局部地图线程中InsertKeyFrame函数插入新的关键帧
void RequestStop();
void RequestReset();
bool Stop();
void Release();
bool isStopped();
bool stopRequested();
//返回mbAcceptKeyFrames,查询局部地图管理器是否繁忙
bool AcceptKeyFrames();
// 设置mbAcceptKeyFrames,false表示告诉Tracking,
//LocalMapping正处于繁忙状态,不能接受插入新的keyframe
void SetAcceptKeyFrames(bool flag);
bool SetNotStop(bool flag);
void InterruptBA();
void RequestFinish();
bool isFinished();
int KeyframesInQueue(){
unique_lock<std::mutex> lock(mMutexNewKFs);
return mlNewKeyFrames.size();
}
protected:
//返回mlNewKeyFrames是否为空,也就是查询等待处理的关键帧列表是否空
bool CheckNewKeyFrames();
/**
* @brief 处理列表中的关键帧
*
* - 计算Bow,加速三角化新的MapPoints
* - 关联当前关键帧至MapPoints,并更新MapPoints的平均观测方向和观测距离范围
* - 插入关键帧,更新Covisibility图和Essential图,以及spanningtree
*/
void ProcessNewKeyFrame();
/**
* 1.找出和当前关键帧共视程度前10/20(单目/双目RGBD)的关键帧
* 2.计算这些关键帧和当前关键帧的F矩阵;
* 3.在满足对级约束条件下,匹配关键帧之间的特征点,通过BOW加速匹配;
*/
void CreateNewMapPoints();
// 剔除ProcessNewKeyFrame和CreateNewMapPoints函数中引入在mlpRecentAddedMapPoints的质量不好的MapPoints
void MapPointCulling();
// 检查并融合与当前关键帧共视程度高的帧重复的MapPoints
void SearchInNeighbors();
// 检测并剔除当前帧相邻的关键帧中冗余的关键帧
// 剔除的标准是:该关键帧的90%的MapPoints可以被其它至少3个关键帧观测到
void KeyFrameCulling();
cv::Mat ComputeF12(KeyFrame* &pKF1, KeyFrame* &pKF2);
cv::Mat SkewSymmetricMatrix(const cv::Mat &v);
bool mbMonocular;
void ResetIfRequested();
bool mbResetRequested;
std::mutex mMutexReset;
bool CheckFinish();
void SetFinish();
bool mbFinishRequested;
bool mbFinished;
std::mutex mMutexFinish;
Map* mpMap;
LoopClosing* mpLoopCloser;
Tracking* mpTracker;
std::list<KeyFrame*> mlNewKeyFrames;
KeyFrame* mpCurrentKeyFrame;
std::list<MapPoint*> mlpRecentAddedMapPoints;
std::mutex mMutexNewKFs;
bool mbAbortBA;
bool mbStopped;
bool mbStopRequested;
bool mbNotStop;
std::mutex mMutexStop;
//是否能接受插入新的keyframe标志位
bool mbAcceptKeyFrames;
std::mutex mMutexAccept;
};
void LocalMapping::Run()
{
mbFinished = false;
while(1)
{
// Tracking will see that Local Mapping is busy
// 告诉Tracking,LocalMapping正处于繁忙状态
// LocalMapping线程处理的关键帧都是Tracking线程发过的
// 在LocalMapping线程还没有处理完关键帧之前Tracking线程最好不要发送太快
SetAcceptKeyFrames(false);
// Check if there are keyframes in the queue
//返回mlNewKeyFrames是否为空,也就是查询等待处理的关键帧列表是否空
if(CheckNewKeyFrames())
{
// BoW conversion and insertion in Map
// 计算关键帧特征点的BoW映射,将关键帧插入地图
ProcessNewKeyFrame();
// Check recent MapPoints
// 剔除ProcessNewKeyFrame函数中引入的不合格MapPoints
MapPointCulling();
// Triangulate new MapPoints
/**
* 1.找出和当前关键帧共视程度前10/20(单目/双目RGBD)的关键帧
* 2.计算这些关键帧和当前关键帧的F矩阵;
* 3.在满足对级约束条件下,匹配关键帧之间的特征点,通过BOW加速匹配;
*/
CreateNewMapPoints();
// 如果已经处理完队列中的最后的一个关键帧
if(!CheckNewKeyFrames())
{
// Find more matches in neighbor keyframes and fuse point duplications
// 检查并融合当前关键帧与相邻帧(两级相邻)重复的MapPoints
SearchInNeighbors();
}
mbAbortBA = false;
// 已经处理完队列中的最后的一个关键帧,并且闭环检测此时没有请求停止LocalMapping
//有空就来次local BA
if(!CheckNewKeyFrames() && !stopRequested())
{
// Local BA
if(mpMap->KeyFramesInMap()>2)
Optimizer::LocalBundleAdjustment(mpCurrentKeyFrame,&mbAbortBA, mpMap);
// Check redundant local Keyframes
// 检测并剔除当前帧相邻的关键帧中冗余的关键帧
// 剔除的标准是:该关键帧的90%的MapPoints可以被其它至少3个关键帧观测到
// trick!
// Tracking中先把关键帧交给LocalMapping线程
// 并且在Tracking中InsertKeyFrame函数的条件比较松,交给LocalMapping线程的关键帧会比较密
// 在这里再删除冗余的关键帧
KeyFrameCulling();
}
// 将当前帧加入到闭环检测关键帧队列中
mpLoopCloser->InsertKeyFrame(mpCurrentKeyFrame);
}
else if(Stop())
{
// Safe area to stop
while(isStopped() && !CheckFinish())
{
usleep(3000);
}
if(CheckFinish())
break;
}
ResetIfRequested();
// Tracking will see that Local Mapping is not busy
//表示现在可以接受插入新的keyframe了
SetAcceptKeyFrames(true);
if(CheckFinish())
break;
usleep(3000);
}
SetFinish();
}
/**
* @brief 处理列表中的关键帧
*
* - 计算Bow,加速三角化新的MapPoints
* - 关联当前关键帧至MapPoints,并更新MapPoints的平均观测方向和观测距离范围
* - 插入关键帧,更新Covisibility图和Essential图,以及spanningtree
*/
void LocalMapping::ProcessNewKeyFrame()
{
// 从缓冲队列中取出一帧关键帧
// Tracking线程向LocalMapping中插入关键帧存在该队列中
{
unique_lock<mutex> lock(mMutexNewKFs);
mpCurrentKeyFrame = mlNewKeyFrames.front();
mlNewKeyFrames.pop_front();
}
// Compute Bags of Words structures
//计算此关键帧的mBowVec,mFeatVec
mpCurrentKeyFrame->ComputeBoW();
// Associate MapPoints to the new keyframe and update normal and descriptor
//遍历和关键帧匹配的mappoint
const vector<MapPoint*> vpMapPointMatches = mpCurrentKeyFrame->GetMapPointMatches();
for(size_t i=0; i<vpMapPointMatches.size(); i++)
{
MapPoint* pMP = vpMapPointMatches[i];
if(pMP)
{
if(!pMP->isBad())
{
//如果pMP是不在关键帧mpCurrentKeyFrame中,则更新mappoint的属性
if(!pMP->IsInKeyFrame(mpCurrentKeyFrame))
{
//让mappoint知道自己可以被哪些keyframe看到
pMP->AddObservation(mpCurrentKeyFrame, i);
//更新此mappoint参考帧光心到mappoint平均观测方向以及观测距离范围
pMP->UpdateNormalAndDepth();
//在此mappoint能被看到的特征点中找出最能代表此mappoint的描述子
pMP->ComputeDistinctiveDescriptors();
}
else // this can only happen for new stereo points inserted by the Tracking
{
// 当前帧生成的MapPoints
// 将双目或RGBD跟踪过程中新插入的MapPoints放入mlpRecentAddedMapPoints,等待MapPointCulling函数检查
// CreateNewMapPoints函数中通过三角化也会生成MapPoints
// 这些MapPoints都会经过MapPointCulling函数的检验
mlpRecentAddedMapPoints.push_back(pMP);
}
}
}
}
// Update links in the Covisibility Graph
//更新共视图Covisibility graph,spanningtree
mpCurrentKeyFrame->UpdateConnections();
// Insert Keyframe in Map
//将该关键帧插入到地图map中
mpMap->AddKeyFrame(mpCurrentKeyFrame);
}
// 剔除ProcessNewKeyFrame和CreateNewMapPoints函数中引入在mlpRecentAddedMapPoints的质量不好的MapPoints
void LocalMapping::MapPointCulling()
{
// Check Recent Added MapPoints
list<MapPoint*>::iterator lit = mlpRecentAddedMapPoints.begin();
const unsigned long int nCurrentKFid = mpCurrentKeyFrame->mnId;
int nThObs;
if(mbMonocular)
nThObs = 2;
else
nThObs = 3;
const int cnThObs = nThObs;
//遍历mlpRecentAddedMapPoints
while(lit!=mlpRecentAddedMapPoints.end())
{
MapPoint* pMP = *lit;
// 已经是坏点的MapPoints直接从检查链表中删除
if(pMP->isBad())
{
lit = mlpRecentAddedMapPoints.erase(lit);
}
// 跟踪到该MapPoint的Frame数相比预计可观测到该MapPoint的Frame数的比例需大于25%
// IncreaseFound / IncreaseVisible < 25%,注意不一定是关键帧。
else if(pMP->GetFoundRatio()<0.25f )
{
pMP->SetBadFlag();
lit = mlpRecentAddedMapPoints.erase(lit);
}
// VI-B 条件2:从该点建立开始,到现在已经过了不小于2个关键帧
// 但是观测到该点的关键帧数却不超过cnThObs帧,那么该点检验不合格
else if(((int)nCurrentKFid-(int)pMP->mnFirstKFid)>=2 && pMP->Observations()<=cnThObs)
{
pMP->SetBadFlag();
lit = mlpRecentAddedMapPoints.erase(lit);
}
// 步骤4:从建立该点开始,已经过了3个关键帧而没有被剔除,则认为是质量高的点
// 因此没有SetBadFlag(),仅从队列中删除,放弃继续对该MapPoint的检测
else if(((int)nCurrentKFid-(int)pMP->mnFirstKFid)>=3)
lit = mlpRecentAddedMapPoints.erase(lit);
else
lit++;
}
}
/**
* 1.找出和当前关键帧共视程度前10/20(单目/双目RGBD)的关键帧
* 2.计算这些关键帧和当前关键帧的F矩阵;
* 3.在满足对级约束条件下,匹配关键帧之间的特征点,通过BOW加速匹配;
*/
void LocalMapping::CreateNewMapPoints()
{
// Retrieve neighbor keyframes in covisibility graph
int nn = 10;
if(mbMonocular)
nn=20;
//在当前关键帧的Covisibility graph中找到共视程度最高的nn帧相邻帧存入vpNeighKFs
const vector<KeyFrame*> vpNeighKFs = mpCurrentKeyFrame->GetBestCovisibilityKeyFrames(nn);
ORBmatcher matcher(0.6,false);
//当前关键帧在世界坐标系下的位姿
cv::Mat Rcw1 = mpCurrentKeyFrame->GetRotation();
cv::Mat Rwc1 = Rcw1.t();
cv::Mat tcw1 = mpCurrentKeyFrame->GetTranslation();
cv::Mat Tcw1(3,4,CV_32F);
Rcw1.copyTo(Tcw1.colRange(0,3));
tcw1.copyTo(Tcw1.col(3));
// 得到当前关键帧光心在世界坐标系中的坐标
cv::Mat Ow1 = mpCurrentKeyFrame->GetCameraCenter();
const float &fx1 = mpCurrentKeyFrame->fx;
const float &fy1 = mpCurrentKeyFrame->fy;
const float &cx1 = mpCurrentKeyFrame->cx;
const float &cy1 = mpCurrentKeyFrame->cy;
const float &invfx1 = mpCurrentKeyFrame->invfx;
const float &invfy1 = mpCurrentKeyFrame->invfy;
const float ratioFactor = 1.5f*mpCurrentKeyFrame->mfScaleFactor;
int nnew=0;
// Search matches with epipolar restriction and triangulate
//遍历相邻关键帧vpNeighKFs
for(size_t i=0; i<vpNeighKFs.size(); i++)
{
if(i>0 && CheckNewKeyFrames())
return;
KeyFrame* pKF2 = vpNeighKFs[i];
// Check first that baseline is not too short
// 邻接的关键帧在世界坐标系中的坐标
cv::Mat Ow2 = pKF2->GetCameraCenter();
// 基线向量,两个关键帧间的相机转移矩阵
cv::Mat vBaseline = Ow2-Ow1;
// 基线长度,基线长度
const float baseline = cv::norm(vBaseline);
// 判断相机运动的基线是不是足够长
if(!mbMonocular)
{
// 如果是立体相机,关键帧间距太小时不生成3D点
if(baseline<pKF2->mb)
continue;
}
else
{
// 邻接关键帧的场景深度中值
const float medianDepthKF2 = pKF2->ComputeSceneMedianDepth(2);
// baseline与景深的比例
const float ratioBaselineDepth = baseline/medianDepthKF2;
// 如果特别远(比例特别小),那么不考虑当前邻接的关键帧,不生成3D点
if(ratioBaselineDepth<0.01)
continue;
}
// Compute Fundamental Matrix
// 根据两个关键帧的位姿计算它们之间的基本矩阵
cv::Mat F12 = ComputeF12(mpCurrentKeyFrame,pKF2);
// Search matches that fullfil epipolar constraint
// 通过极线约束限制匹配时的搜索范围,进行特征点匹配
vector<pair<size_t,size_t> > vMatchedIndices;
// 匹配pKF1与pKF2之间未被匹配的特征点并通过bow加速,并校验是否符合对级约束。vMatchedPairs匹配成功的特征点在各自关键帧中的id。
matcher.SearchForTriangulation(mpCurrentKeyFrame,pKF2,F12,vMatchedIndices,false);
cv::Mat Rcw2 = pKF2->GetRotation();
cv::Mat Rwc2 = Rcw2.t();
cv::Mat tcw2 = pKF2->GetTranslation();
cv::Mat Tcw2(3,4,CV_32F);
Rcw2.copyTo(Tcw2.colRange(0,3));
tcw2.copyTo(Tcw2.col(3));
const float &fx2 = pKF2->fx;
const float &fy2 = pKF2->fy;
const float &cx2 = pKF2->cx;
const float &cy2 = pKF2->cy;
const float &invfx2 = pKF2->invfx;
const float &invfy2 = pKF2->invfy;
// Triangulate each match
// 对每对匹配通过三角化生成3D点
const int nmatches = vMatchedIndices.size();
//遍历vMatchedIndices
for(int ikp=0; ikp<nmatches; ikp++)
{
const int &idx1 = vMatchedIndices[ikp].first;
const int &idx2 = vMatchedIndices[ikp].second;
const cv::KeyPoint &kp1 = mpCurrentKeyFrame->mvKeysUn[idx1];
const float kp1_ur=mpCurrentKeyFrame->mvuRight[idx1];
bool bStereo1 = kp1_ur>=0;
const cv::KeyPoint &kp2 = pKF2->mvKeysUn[idx2];
const float kp2_ur = pKF2->mvuRight[idx2];
bool bStereo2 = kp2_ur>=0;
// Check parallax between rays
//将像素坐标转化为归一化平面坐标
cv::Mat xn1 = (cv::Mat_<float>(3,1) << (kp1.pt.x-cx1)*invfx1, (kp1.pt.y-cy1)*invfy1, 1.0);
cv::Mat xn2 = (cv::Mat_<float>(3,1) << (kp2.pt.x-cx2)*invfx2, (kp2.pt.y-cy2)*invfy2, 1.0);
cv::Mat ray1 = Rwc1*xn1;
cv::Mat ray2 = Rwc2*xn2;
const float cosParallaxRays = ray1.dot(ray2)/(cv::norm(ray1)*cv::norm(ray2));
float cosParallaxStereo = cosParallaxRays+1;
float cosParallaxStereo1 = cosParallaxStereo;
float cosParallaxStereo2 = cosParallaxStereo;
if(bStereo1)
cosParallaxStereo1 = cos(2*atan2(mpCurrentKeyFrame->mb/2,mpCurrentKeyFrame->mvDepth[idx1]));
else if(bStereo2)
cosParallaxStereo2 = cos(2*atan2(pKF2->mb/2,pKF2->mvDepth[idx2]));
cosParallaxStereo = min(cosParallaxStereo1,cosParallaxStereo2);
cv::Mat x3D;
if(cosParallaxRays<cosParallaxStereo && cosParallaxRays>0 && (bStereo1 || bStereo2 || cosParallaxRays<0.9998))
{
// Linear Triangulation Method
cv::Mat A(4,4,CV_32F);
A.row(0) = xn1.at<float>(0)*Tcw1.row(2)-Tcw1.row(0);
A.row(1) = xn1.at<float>(1)*Tcw1.row(2)-Tcw1.row(1);
A.row(2) = xn2.at<float>(0)*Tcw2.row(2)-Tcw2.row(0);
A.row(3) = xn2.at<float>(1)*Tcw2.row(2)-Tcw2.row(1);
cv::Mat w,u,vt;
//SVD分解A=u*w*vt
//实际是要解Ax=0
cv::SVD::compute(A,w,u,vt,cv::SVD::MODIFY_A| cv::SVD::FULL_UV);
x3D = vt.row(3).t();
if(x3D.at<float>(3)==0)
continue;
// Euclidean coordinates
//归一化
x3D = x3D.rowRange(0,3)/x3D.at<float>(3);
}
else if(bStereo1 && cosParallaxStereo1<cosParallaxStereo2)
{
x3D = mpCurrentKeyFrame->UnprojectStereo(idx1);
}
else if(bStereo2 && cosParallaxStereo2<cosParallaxStereo1)
{
x3D = pKF2->UnprojectStereo(idx2);
}
else
continue; //No stereo and very low parallax
cv::Mat x3Dt = x3D.t();
//Check triangulation in front of cameras
//检验3d点投影到第一个关键帧的误差
float z1 = Rcw1.row(2).dot(x3Dt)+tcw1.at<float>(2);
if(z1<=0)
continue;
float z2 = Rcw2.row(2).dot(x3Dt)+tcw2.at<float>(2);
if(z2<=0)
continue;
//Check reprojection error in first keyframe
const float &sigmaSquare1 = mpCurrentKeyFrame->mvLevelSigma2[kp1.octave];
const float x1 = Rcw1.row(0).dot(x3Dt)+tcw1.at<float>(0);
const float y1 = Rcw1.row(1).dot(x3Dt)+tcw1.at<float>(1);
const float invz1 = 1.0/z1;
if(!bStereo1)
{
float u1 = fx1*x1*invz1+cx1;
float v1 = fy1*y1*invz1+cy1;
float errX1 = u1 - kp1.pt.x;
float errY1 = v1 - kp1.pt.y;
if((errX1*errX1+errY1*errY1)>5.991*sigmaSquare1)
continue;
}
else
{
float u1 = fx1*x1*invz1+cx1;
float u1_r = u1 - mpCurrentKeyFrame->mbf*invz1;
float v1 = fy1*y1*invz1+cy1;
float errX1 = u1 - kp1.pt.x;
float errY1 = v1 - kp1.pt.y;
float errX1_r = u1_r - kp1_ur;
if((errX1*errX1+errY1*errY1+errX1_r*errX1_r)>7.8*sigmaSquare1)
continue;
}
//Check reprojection error in second keyframe
//检验3d点投影到第二个关键帧的误差
const float sigmaSquare2 = pKF2->mvLevelSigma2[kp2.octave];
const float x2 = Rcw2.row(0).dot(x3Dt)+tcw2.at<float>(0);
const float y2 = Rcw2.row(1).dot(x3Dt)+tcw2.at<float>(1);
const float invz2 = 1.0/z2;
if(!bStereo2)
{
float u2 = fx2*x2*invz2+cx2;
float v2 = fy2*y2*invz2+cy2;
float errX2 = u2 - kp2.pt.x;
float errY2 = v2 - kp2.pt.y;
if((errX2*errX2+errY2*errY2)>5.991*sigmaSquare2)
continue;
}
else
{
float u2 = fx2*x2*invz2+cx2;
float u2_r = u2 - mpCurrentKeyFrame->mbf*invz2;
float v2 = fy2*y2*invz2+cy2;
float errX2 = u2 - kp2.pt.x;
float errY2 = v2 - kp2.pt.y;
float errX2_r = u2_r - kp2_ur;
if((errX2*errX2+errY2*errY2+errX2_r*errX2_r)>7.8*sigmaSquare2)
continue;
}
//Check scale consistency
//检验尺度的连续性
cv::Mat normal1 = x3D-Ow1;
float dist1 = cv::norm(normal1);
cv::Mat normal2 = x3D-Ow2;
float dist2 = cv::norm(normal2);
if(dist1==0 || dist2==0)
continue;
const float ratioDist = dist2/dist1;
const float ratioOctave = mpCurrentKeyFrame->mvScaleFactors[kp1.octave]/pKF2->mvScaleFactors[kp2.octave];
/*if(fabs(ratioDist-ratioOctave)>ratioFactor)
continue;*/
if(ratioDist*ratioFactor<ratioOctave || ratioDist>ratioOctave*ratioFactor)
continue;
// Triangulation is succesfull
//如果三角化成功
MapPoint* pMP = new MapPoint(x3D,mpCurrentKeyFrame,mpMap);
pMP->AddObservation(mpCurrentKeyFrame,idx1);
pMP->AddObservation(pKF2,idx2);
mpCurrentKeyFrame->AddMapPoint(pMP,idx1);
pKF2->AddMapPoint(pMP,idx2);
pMP->ComputeDistinctiveDescriptors();
pMP->UpdateNormalAndDepth();
mpMap->AddMapPoint(pMP);
//新添加的点要加入这里,然后让MapPointCulling()检测
mlpRecentAddedMapPoints.push_back(pMP);
nnew++;
}
}
}
检查并融合与当前关键帧共视程度高的帧重复的MapPoints
void LocalMapping::SearchInNeighbors()
{
// Retrieve neighbor keyframes
int nn = 10;
if(mbMonocular)
nn=20;
//获取当前关键帧与当前关键帧共视程度高的帧放入vpNeighKFs,并标记
const vector<KeyFrame*> vpNeighKFs = mpCurrentKeyFrame->GetBestCovisibilityKeyFrames(nn);
vector<KeyFrame*> vpTargetKFs;
//遍历vpNeighKFs,遍历当前帧essential graph图中权重排名前nn的邻接关键帧
for(vector<KeyFrame*>::const_iterator vit=vpNeighKFs.begin(), vend=vpNeighKFs.end(); vit!=vend; vit++)
{
KeyFrame* pKFi = *vit;
//如果这个keyframe是坏的,或者已经被当前关键帧标记了
if(pKFi->isBad() || pKFi->mnFuseTargetForKF == mpCurrentKeyFrame->mnId)
continue;
vpTargetKFs.push_back(pKFi);
pKFi->mnFuseTargetForKF = mpCurrentKeyFrame->mnId;
// Extend to some second neighbors
//通过获取与pKFi共视程度高的关键帧来拓展vpNeighKFs
const vector<KeyFrame*> vpSecondNeighKFs = pKFi->GetBestCovisibilityKeyFrames(5);
for(vector<KeyFrame*>::const_iterator vit2=vpSecondNeighKFs.begin(), vend2=vpSecondNeighKFs.end(); vit2!=vend2; vit2++)
{
KeyFrame* pKFi2 = *vit2;
if(pKFi2->isBad() || pKFi2->mnFuseTargetForKF==mpCurrentKeyFrame->mnId || pKFi2->mnId==mpCurrentKeyFrame->mnId)
continue;
vpTargetKFs.push_back(pKFi2);
}
}
// Search matches by projection from current KF in target KFs
ORBmatcher matcher;
//取出当前关键帧的mappoint与pKFi中的mappoint融合
vector<MapPoint*> vpMapPointMatches = mpCurrentKeyFrame->GetMapPointMatches();
//遍历vpTargetKFs中的keyframe
for(vector<KeyFrame*>::iterator vit=vpTargetKFs.begin(), vend=vpTargetKFs.end(); vit!=vend; vit++)
{
KeyFrame* pKFi = *vit;
//将vpMapPoints中的mappoint与pKF的特征点进行匹配,若匹配的特征点已有mappoint与其匹配,
//则选择其一与此特征点匹配,并抹去没有选择的那个mappoint,此为mappoint的融合
matcher.Fuse(pKFi,vpMapPointMatches);
}
// Search matches by projection from target KFs in current KF
//获取vpTargetKFs所有良好MapPoints的集合
vector<MapPoint*> vpFuseCandidates;
vpFuseCandidates.reserve(vpTargetKFs.size()*vpMapPointMatches.size());
for(vector<KeyFrame*>::iterator vitKF=vpTargetKFs.begin(), vendKF=vpTargetKFs.end(); vitKF!=vendKF; vitKF++)
{
KeyFrame* pKFi = *vitKF;
vector<MapPoint*> vpMapPointsKFi = pKFi->GetMapPointMatches();
for(vector<MapPoint*>::iterator vitMP=vpMapPointsKFi.begin(), vendMP=vpMapPointsKFi.end(); vitMP!=vendMP; vitMP++)
{
MapPoint* pMP = *vitMP;
if(!pMP)
continue;
if(pMP->isBad() || pMP->mnFuseCandidateForKF == mpCurrentKeyFrame->mnId)
continue;
pMP->mnFuseCandidateForKF = mpCurrentKeyFrame->mnId;
vpFuseCandidates.push_back(pMP);
}
}
//mappoint融合
matcher.Fuse(mpCurrentKeyFrame,vpFuseCandidates);
// Update points
//更新mappoint属性
vpMapPointMatches = mpCurrentKeyFrame->GetMapPointMatches();
for(size_t i=0, iend=vpMapPointMatches.size(); i<iend; i++)
{
MapPoint* pMP=vpMapPointMatches[i];
if(pMP)
{
if(!pMP->isBad())
{
//在此mappoint能被看到的特征点中找出最能代表此mappoint的描述子
pMP->ComputeDistinctiveDescriptors();
// 更新平均观测方向和观测距离
pMP->UpdateNormalAndDepth();
}
}
}
// Update connections in covisibility graph
//更新共视图Covisibility graph,essential graph和spanningtree
mpCurrentKeyFrame->UpdateConnections();
}
// 检测并剔除当前帧相邻的关键帧中冗余的关键帧
// 剔除的标准是:该关键帧的90%的MapPoints可以被其它至少3个关键帧观测到
void LocalMapping::KeyFrameCulling()
{
// Check redundant keyframes (only local keyframes)
// A keyframe is considered redundant if the 90% of the MapPoints it sees, are seen
// in at least other 3 keyframes (in the same or finer scale)
// We only consider close stereo points
//返回essential graph中与此keyframe连接的节点(即关键帧)
vector<KeyFrame*> vpLocalKeyFrames = mpCurrentKeyFrame->GetVectorCovisibleKeyFrames();
//遍历vpLocalKeyFrames
for(vector<KeyFrame*>::iterator vit=vpLocalKeyFrames.begin(), vend=vpLocalKeyFrames.end(); vit!=vend; vit++)
{
KeyFrame* pKF = *vit;
if(pKF->mnId==0)
continue;
// 提取每个共视关键帧的MapPoints
const vector<MapPoint*> vpMapPoints = pKF->GetMapPointMatches();
int nObs = 3;
const int thObs=nObs;
int nRedundantObservations=0;
int nMPs=0;
//遍历该局部关键帧pKF的MapPoints,判断是否90%以上的MapPoints能被其它关键帧(至少3个)观测到
for(size_t i=0, iend=vpMapPoints.size(); i<iend; i++)
{
MapPoint* pMP = vpMapPoints[i];
if(pMP)
{
if(!pMP->isBad())
{
if(!mbMonocular)
{
if(pKF->mvDepth[i]>pKF->mThDepth || pKF->mvDepth[i]<0)
continue;
}
nMPs++;
//如果pMP被其他keyframe观测的数量大于thObs
if(pMP->Observations()>thObs)
{
const int &scaleLevel = pKF->mvKeysUn[i].octave;
const map<KeyFrame*, size_t> observations = pMP->GetObservations();
int nObs=0;
for(map<KeyFrame*, size_t>::const_iterator mit=observations.begin(), mend=observations.end(); mit!=mend; mit++)
{
KeyFrame* pKFi = mit->first;
if(pKFi==pKF)
continue;
const int &scaleLeveli = pKFi->mvKeysUn[mit->second].octave;
if(scaleLeveli<=scaleLevel+1)
{
nObs++;
if(nObs>=thObs)
break;
}
}
if(nObs>=thObs)
{
nRedundantObservations++;
}
}
}
}
}
if(nRedundantObservations>0.9*nMPs)
pKF->SetBadFlag();
}
}