目录
4.1 System.cc文件简介
4.2 System构造函数
4.3 双目相机入口(TrackStereo)
4.4 RGBD相机入口(TrackRGBD)
4.5 单目相机入口(TrackMonocular)
4.6 其他函数
系统流程的入口在system.cc文件中,system.cc和ORB-SLAM2的代码基本相同
构造函数中和流程相关的一共有四个参数,分别是
System::System(const string &strVocFile, //词典文件路径
const string &strSettingsFile, //配置文件路径
const eSensor sensor, //传感器类型
const bool bUseViewer): //是否使用可视化界面
mSensor(sensor), //初始化传感器类型
mpViewer(static_cast(NULL)), //绘图句柄,设置为空
mbReset(false), //复位标志,设置为false
mbActivateLocalizationMode(false), //是否仅定位不建图模式,设置为false
mbDeactivateLocalizationMode(false) //关闭定位模式的标志,默认设置为false
{
构造函数中一共初始化了以下内容:
{
// Output welcome message
cout << endl <<
"DynaSLAM Copyright (C) 2018 Berta Bescos, University of Zaragoza." << endl << endl;
// 输出当前传感器类型
cout << "Input sensor was set to: ";
if(mSensor==MONOCULAR)
cout << "Monocular" << endl;
else if(mSensor==STEREO)
cout << "Stereo" << endl;
else if(mSensor==RGBD)
cout << "RGB-D" << endl;
// 检查配置文件
cv::FileStorage fsSettings(strSettingsFile.c_str(), //将配置文件名转换成为字符串
cv::FileStorage::READ); //只读
//如果打开失败,就输出调试信息
if(!fsSettings.isOpened())
{
cerr << "Failed to open settings file at: " << strSettingsFile << endl;
exit(-1);
}
//Load ORB Vocabulary
cout << endl << "Loading ORB Vocabulary. This could take a while..." << endl;
//建立一个新的ORB字典
mpVocabulary = new ORBVocabulary();
//获取字典加载状态
bool bVocLoad = mpVocabulary->loadFromTextFile(strVocFile);
//如果加载失败,就输出调试信息
if(!bVocLoad)
{
cerr << "Wrong path to vocabulary. " << endl;
cerr << "Falied to open at: " << strVocFile << endl;
exit(-1);
}
cout << "Vocabulary loaded!" << endl << endl;
//创建关键帧数据库
mpKeyFrameDatabase = new KeyFrameDatabase(*mpVocabulary);
//创建地图
mpMap = new Map();
//Create Drawers. These are used by the Viewer
//这里的帧绘制器和地图绘制器将会被可视化的Viewer所使用
mpFrameDrawer = new FrameDrawer(mpMap);
mpMapDrawer = new MapDrawer(mpMap, strSettingsFile);
//Initialize the Tracking thread
//(it will live in the main thread of execution, the one that called this constructor)
//初始化追踪线程
mpTracker = new Tracking(this, mpVocabulary, mpFrameDrawer, mpMapDrawer,
mpMap, mpKeyFrameDatabase, strSettingsFile, mSensor);
//Initialize the Local Mapping thread and launch
//初始化局部建图线程
mpLocalMapper = new LocalMapping(mpMap, mSensor==MONOCULAR);
mptLocalMapping = new thread(&ORB_SLAM2::LocalMapping::Run,mpLocalMapper);
//Initialize the Loop Closing thread and launch
//初始化回环检测线程
mpLoopCloser = new LoopClosing(mpMap, mpKeyFrameDatabase, mpVocabulary, mSensor!=MONOCULAR);
mptLoopClosing = new thread(&ORB_SLAM2::LoopClosing::Run, mpLoopCloser);
//Initialize the Viewer thread and launch
//如果bUseViewer=true,初始化可视化线程
if(bUseViewer)
{
mpViewer = new Viewer(this, mpFrameDrawer,mpMapDrawer,mpTracker,strSettingsFile);
mptViewer = new thread(&Viewer::Run, mpViewer);
mpTracker->SetViewer(mpViewer);
}
//Set pointers between threads
//设置进程间的指针
mpTracker->SetLocalMapper(mpLocalMapper);
mpTracker->SetLoopClosing(mpLoopCloser);
mpLocalMapper->SetTracker(mpTracker);
mpLocalMapper->SetLoopCloser(mpLoopCloser);
mpLoopCloser->SetTracker(mpTracker);
mpLoopCloser->SetLocalMapper(mpLocalMapper);
}
1)初始化词袋模型:词袋是在做闭环检测和重定位时候做检索用的,从历史关键帧中检索出和当前帧最相似的一帧,以进行位姿匹配。
2)初始化关键帧数据集:如果此处只是加载数据就没意思了,前面提到,这个数据主要用来做闭环检测和重定位,所以这个模块除了加载数据以外,还实现了闭环检测和重定位检测。
3)初始化地图:地图中主要存放关键帧和特征点。
4)初始化用户显示界面,并打开对应的线程
5)初始化局部地图,并打开对应的线程
6)初始化闭环流程,并打开对应的线程
7)给各个线程之间建立关联
当系统为双目相机输入时,通过该函数进行系统的运行
//双目输入时的跟踪线程函数
cv::Mat System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const cv::Mat &maskLeft, const cv::Mat &maskRight,const double ×tamp)
{
//检查输入数据类型是否合法,不合法那就退出
if(mSensor!=STEREO)
{
cerr << "ERROR: you called TrackStereo but input sensor was not set to STEREO." << endl;
exit(-1);
}
// Check mode change
// 检查是否有运行模式的改变
{
// 运行时加锁,防止其他的线程对它的更改
unique_lock lock(mMutexMode);
//如果激活定位模式
if(mbActivateLocalizationMode)
{
mpLocalMapper->RequestStop();
// Wait until Local Mapping has effectively stopped
while(!mpLocalMapper->isStopped())
{
usleep(1000);
}
mpTracker->InformOnlyTracking(true);
mbActivateLocalizationMode = false;
}
//如果取消定位模式
if(mbDeactivateLocalizationMode)
{
mpTracker->InformOnlyTracking(false);
mpLocalMapper->Release();
mbDeactivateLocalizationMode = false;
}
}
// Check reset
// 检查是否有复位的操作
{
unique_lock lock(mMutexReset);
if(mbReset)
{
mpTracker->Reset();
mbReset = false;
}
}
// 用矩阵Tcw来保存估计的相机 位姿
cv::Mat Tcw = mpTracker->GrabImageStereo(imLeft,imRight, maskLeft, maskRight, timestamp);
unique_lock lock2(mMutexState);
//获取运动追踪状态
mTrackingState = mpTracker->mState;
//获取当前帧追踪到的地图点向量指针
mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints;
//获取当前帧追踪到的关键帧特征点向量的指针
mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn;
//返回获得的相机运动估计
return Tcw;
}
从途中可以看出,每次输入数据以后,会执行如下操作:
1)判断目前是否是双目模式,如果不是则退出。
2)判断是否需要进入或者退出纯定位模式,如果需要则执行
3)判断是否需要重启tracking,如果需要则执行
4)执行双目的跟踪程序。GrabImageStereo共有三个参数,分别是左目、右目和时间戳
5)最后获取相机运动的信息
与ORB-SLAM2的对比:因为DynaSLAM进行动态物体检测,左右目都会有目标检测Mask的输入。
当系统为RGBD相机输入时,通过该函数进行系统的运行
// RGBD输入时的跟踪线程函数
cv::Mat System::TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, cv::Mat &mask,
const double ×tamp, cv::Mat &imRGBOut,
cv::Mat &imDOut, cv::Mat &maskOut)
{
// 判断输入数据类型是否合法
if(mSensor!=RGBD)
{
cerr << "ERROR: you called TrackRGBD but input sensor was not set to RGBD." << endl;
exit(-1);
}
// Check mode change
// 检查模式改变
{
unique_lock lock(mMutexMode);
if(mbActivateLocalizationMode)
{
mpLocalMapper->RequestStop();
// Wait until Local Mapping has effectively stopped
while(!mpLocalMapper->isStopped())
{
usleep(1000);
}
mpTracker->InformOnlyTracking(true);
mbActivateLocalizationMode = false;
}
if(mbDeactivateLocalizationMode)
{
mpTracker->InformOnlyTracking(false);
mpLocalMapper->Release();
mbDeactivateLocalizationMode = false;
}
}
// Check reset
//检查是否有复位请求
{
unique_lock lock(mMutexReset);
if(mbReset)
{
mpTracker->Reset();
mbReset = false;
}
}
cv::Mat Tcw = mpTracker->GrabImageRGBD(im,depthmap,mask,timestamp,imRGBOut,imDOut,maskOut);
unique_lock lock2(mMutexState);
mTrackingState = mpTracker->mState;
mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints;
mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn;
return Tcw;
}
// RGBD输入时的另一个跟踪线程函数,不同的是没有RGB,Depth和目标检测Mask的输出
cv::Mat System::TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, cv::Mat &mask,
const double ×tamp)
{
if(mSensor!=RGBD)
{
cerr << "ERROR: you called TrackRGBD but input sensor was not set to RGBD." << endl;
exit(-1);
}
// Check mode change
{
unique_lock lock(mMutexMode);
if(mbActivateLocalizationMode)
{
mpLocalMapper->RequestStop();
// Wait until Local Mapping has effectively stopped
while(!mpLocalMapper->isStopped())
{
usleep(1000);
}
mpTracker->InformOnlyTracking(true);
mbActivateLocalizationMode = false;
}
if(mbDeactivateLocalizationMode)
{
mpTracker->InformOnlyTracking(false);
mpLocalMapper->Release();
mbDeactivateLocalizationMode = false;
}
}
// Check reset
{
unique_lock lock(mMutexReset);
if(mbReset)
{
mpTracker->Reset();
mbReset = false;
}
}
cv::Mat Tcw = mpTracker->GrabImageRGBD(im,depthmap,mask,timestamp);
unique_lock lock2(mMutexState);
mTrackingState = mpTracker->mState;
mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints;
mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn;
return Tcw;
}
与双目的对比:双目整体流程和双目一样,唯一的区别是此处调用的入口是RGB函数的入口,它的三个参数分别是像素图、深度图和时间戳。
与ORB-SLAM2的对比:有两个System::TrackRGBD函数,其中一个与ORB-SLAM2相同,另一个因为DynaSLAM进行动态物体检测,有RGB,Depth和目标检测Mask的输出。
// RGBD输入时的另一个跟踪线程函数
cv::Mat System::TrackMonocular(const cv::Mat &im, const cv::Mat &mask, const double ×tamp)
{
if(mSensor!=MONOCULAR)
{
cerr << "ERROR: you called TrackMonocular but input sensor was not set to Monocular." << endl;
exit(-1);
}
// Check mode change
{
unique_lock lock(mMutexMode);
if(mbActivateLocalizationMode)
{
mpLocalMapper->RequestStop();
// Wait until Local Mapping has effectively stopped
while(!mpLocalMapper->isStopped())
{
usleep(1000);
}
mpTracker->InformOnlyTracking(true);
mbActivateLocalizationMode = false;
}
if(mbDeactivateLocalizationMode)
{
mpTracker->InformOnlyTracking(false);
mpLocalMapper->Release();
mbDeactivateLocalizationMode = false;
}
}
// Check reset
{
unique_lock lock(mMutexReset);
if(mbReset)
{
mpTracker->Reset();
mbReset = false;
}
}
cv::Mat Tcw = mpTracker->GrabImageMonocular(im,mask,timestamp);
unique_lock lock2(mMutexState);
mTrackingState = mpTracker->mState;
mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints;
mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn;
return Tcw;
}
整体流程仍然和双目一样,区别是入口变成了单目的入口。
其他函数包括前面四个函数中的子函数,保存,关闭等函数,函数解释如下代码
//激活定位模式
void System::ActivateLocalizationMode()
{
unique_lock lock(mMutexMode);
mbActivateLocalizationMode = true;
}
//取消定位模式
void System::DeactivateLocalizationMode()
{
unique_lock lock(mMutexMode);
mbDeactivateLocalizationMode = true;
}
//判断是否地图有较大的改变
bool System::MapChanged()
{
static int n=0;
int curn = mpMap->GetLastBigChangeIdx();
if(n lock(mMutexReset);
mbReset = true;
}
//Syetem关闭函数
void System::Shutdown()
{
//对局部建图线程和回环检测线程发送终止请求
mpLocalMapper->RequestFinish();
mpLoopCloser->RequestFinish();
//如果系统运行可视化,向可视化线程发送终止请求
if(mpViewer)
{
mpViewer->RequestFinish();
while(!mpViewer->isFinished())
usleep(5000);
}
// Wait until all thread have effectively stopped
while(!mpLocalMapper->isFinished() || !mpLoopCloser->isFinished() || mpLoopCloser->isRunningGBA())
{
usleep(5000);
}
if(mpViewer)
pangolin::BindToContext("ORB-SLAM2: Map Viewer");
}
//按照TUM格式保存相机运行轨迹并保存到指定的文件中
void System::SaveTrajectoryTUM(const string &filename)
{
cout << endl << "Saving camera trajectory to " << filename << " ..." << endl;
//单目时退出,只有在传感器为双目或者RGBD时才运行
if(mSensor==MONOCULAR)
{
cerr << "ERROR: SaveTrajectoryTUM cannot be used for monocular." << endl;
return;
}
//从地图中获取所有的关键帧
vector vpKFs = mpMap->GetAllKeyFrames();
//根据关键帧生成的先后顺序(id)进行排序
sort(vpKFs.begin(),vpKFs.end(),KeyFrame::lId);
// Transform all keyframes so that the first keyframe is at the origin.
// After a loop closure the first keyframe might not be at the origin.
// 到原点的转换,获取这个转换矩阵
cv::Mat Two = vpKFs[0]->GetPoseInverse();
//文件写入的准备工作
ofstream f;
f.open(filename.c_str());
f << fixed;
// Frame pose is stored relative to its reference keyframe (which is optimized by BA and pose graph).
// We need to get first the keyframe pose and then concatenate the relative transformation.
// Frames not localized (tracking failure) are not saved.
// For each frame we have a reference keyframe (lRit), the timestamp (lT) and a flag
// which is true when tracking failed (lbL).
// 之前的帧位姿都是基于其参考关键帧的,现在我们把它恢复
//参考关键帧列表
list::iterator lRit = mpTracker->mlpReferences.begin();
//所有帧对应的时间戳列表
list::iterator lT = mpTracker->mlFrameTimes.begin();
//每帧的追踪状态组成的列表
list::iterator lbL = mpTracker->mlbLost.begin();
//对于每一个mlRelativeFramePoses中的帧lit
for(list::iterator lit=mpTracker->mlRelativeFramePoses.begin(),
lend=mpTracker->mlRelativeFramePoses.end();lit!=lend;lit++, lRit++, lT++, lbL++)
{
if(*lbL)
continue;
KeyFrame* pKF = *lRit;
cv::Mat Trw = cv::Mat::eye(4,4,CV_32F);
// If the reference keyframe was culled, traverse the spanning tree to get a suitable keyframe.
while(pKF->isBad())
{
Trw = Trw*pKF->mTcp;
pKF = pKF->GetParent();
}
Trw = Trw*pKF->GetPose()*Two;
cv::Mat Tcw = (*lit)*Trw;
cv::Mat Rwc = Tcw.rowRange(0,3).colRange(0,3).t();
cv::Mat twc = -Rwc*Tcw.rowRange(0,3).col(3);
vector q = Converter::toQuaternion(Rwc);
f << setprecision(6) << *lT << " " << setprecision(9) << twc.at(0) << " " << twc.at(1) << " " << twc.at(2) << " " << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << endl;
}
//操作完毕,关闭文件并且输出调试信息
f.close();
cout << endl << "trajectory saved!" << endl;
}
//保存关键帧的轨迹
void System::SaveKeyFrameTrajectoryTUM(const string &filename)
{
cout << endl << "Saving keyframe trajectory to " << filename << " ..." << endl;
vector vpKFs = mpMap->GetAllKeyFrames();
sort(vpKFs.begin(),vpKFs.end(),KeyFrame::lId);
// Transform all keyframes so that the first keyframe is at the origin.
// After a loop closure the first keyframe might not be at the origin.
//cv::Mat Two = vpKFs[0]->GetPoseInverse();
ofstream f;
f.open(filename.c_str());
f << fixed;
for(size_t i=0; iSetPose(pKF->GetPose()*Two);
if(pKF->isBad())
continue;
cv::Mat R = pKF->GetRotation().t();
vector q = Converter::toQuaternion(R);
cv::Mat t = pKF->GetCameraCenter();
f << setprecision(6) << pKF->mTimeStamp << setprecision(7) << " " << t.at(0) << " " << t.at(1) << " " << t.at(2)
<< " " << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << endl;
}
f.close();
cout << endl << "trajectory saved!" << endl;
}
/*
void System::SaveDetectedLoops(const string &filename)
{
cout << endl << "Saving detected loops to " << filename << " ..." << endl;
ofstream f;
f.open(filename.c_str());
f << fixed;
vector > mvpMatchedLoop = mpLoopCloser->mvpMatchedLoop;
for(size_t i=0; iisBad() || pMatchedKF->isBad())
continue;
f << setprecision(6) << pCurrentKF->mTimeStamp << setprecision(6) << " " << pMatchedKF->mTimeStamp << endl;
}
f.close();
cout << endl << "detected loops saved!" << endl;
}
*/
//按照KITTI数据集的格式将相机的运动轨迹保存到文件中
void System::SaveTrajectoryKITTI(const string &filename)
{
cout << endl << "Saving camera trajectory to " << filename << " ..." << endl;
if(mSensor==MONOCULAR)
{
cerr << "ERROR: SaveTrajectoryKITTI cannot be used for monocular." << endl;
return;
}
vector vpKFs = mpMap->GetAllKeyFrames();
sort(vpKFs.begin(),vpKFs.end(),KeyFrame::lId);
// Transform all keyframes so that the first keyframe is at the origin.
// After a loop closure the first keyframe might not be at the origin.
cv::Mat Two = vpKFs[0]->GetPoseInverse();
ofstream f;
f.open(filename.c_str());
f << fixed;
// Frame pose is stored relative to its reference keyframe (which is optimized by BA and pose graph).
// We need to get first the keyframe pose and then concatenate the relative transformation.
// Frames not localized (tracking failure) are not saved.
// For each frame we have a reference keyframe (lRit), the timestamp (lT) and a flag
// which is true when tracking failed (lbL).
list::iterator lRit = mpTracker->mlpReferences.begin();
list::iterator lT = mpTracker->mlFrameTimes.begin();
for(list::iterator lit=mpTracker->mlRelativeFramePoses.begin(), lend=mpTracker->mlRelativeFramePoses.end();lit!=lend;lit++, lRit++, lT++)
{
ORB_SLAM2::KeyFrame* pKF = *lRit;
cv::Mat Trw = cv::Mat::eye(4,4,CV_32F);
while(pKF->isBad())
{
// cout << "bad parent" << endl;
Trw = Trw*pKF->mTcp;
pKF = pKF->GetParent();
}
Trw = Trw*pKF->GetPose()*Two;
cv::Mat Tcw = (*lit)*Trw;
cv::Mat Rwc = Tcw.rowRange(0,3).colRange(0,3).t();
cv::Mat twc = -Rwc*Tcw.rowRange(0,3).col(3);
f << setprecision(9) << Rwc.at(0,0) << " " << Rwc.at(0,1) << " " << Rwc.at(0,2) << " " << twc.at(0) << " " <<
Rwc.at(1,0) << " " << Rwc.at(1,1) << " " << Rwc.at(1,2) << " " << twc.at(1) << " " <<
Rwc.at(2,0) << " " << Rwc.at(2,1) << " " << Rwc.at(2,2) << " " << twc.at(2) << endl;
}
f.close();
cout << endl << "trajectory saved!" << endl;
}
//获取追踪器状态
int System::GetTrackingState()
{
unique_lock lock(mMutexState);
return mTrackingState;
}
//获取追踪到的地图点(其实实际上得到的是一个指针)
vector System::GetTrackedMapPoints()
{
unique_lock lock(mMutexState);
return mTrackedMapPoints;
}
//获取追踪到的关键帧的点
vector System::GetTrackedKeyPointsUn()
{
unique_lock lock(mMutexState);
return mTrackedKeyPointsUn;
}
} //namespace ORB_SLAM