先要拿大名鼎鼎的ORB-SLAM系统框图镇楼,看着这张图能够完美的串起来整个流程。
ORB-SLAM分三个线程,分别是Tracking、LocalMapping和LoopClosing。
(1)Tracking:在主线程上,输入视频流,输出相机位姿并跟踪局部地图。提取ORB特征子,根据上一帧进行位姿估计或全局重定位,然后跟踪局部地图优化位姿,确定新的关键帧。
(2)LocalMapping:维护优化局部地图。输入关键帧,输出修正的关键帧和地图点。插入关键帧,生成新的地图点,使用BA优化,去除冗余的关键帧
(3)LoopClosing:闭环检测和闭环矫正。BOW检测,再用Sim3计算相似变换;闭环融合与优化Essential Graph的图优化。
#ifndef SYSTEM_H
#define SYSTEM_H
#include
#include
#include
#include "Tracking.h"
#include "FrameDrawer.h"
#include "MapDrawer.h"
#include "Map.h"
#include "LocalMapping.h"
#include "LoopClosing.h"
#include "KeyFrameDatabase.h"
#include "ORBVocabulary.h"
#include "Viewer.h"
namespace ORB_SLAM2
{
class Viewer;
class FrameDrawer;
class Map;
class Tracking;
class LocalMapping;
class LoopClosing;
class System
{
public:
// 三种传感器选择
enum eSensor{
MONOCULAR=0,
STEREO=1,
RGBD=2
};
//成员函数
public:
// 声明
System(const string &strVocFile, const string &strSettingsFile, const eSensor sensor, const bool bUseViewer = true);
// Tracking函数:输出相机位姿
cv::Mat TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double ×tamp);
cv::Mat TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const double ×tamp);
cv::Mat TrackMonocular(const cv::Mat &im, const double ×tamp);
// 激活定位模块;停止Local Mapping,只相机追踪???
void ActivateLocalizationMode();
// 失效定位模块;恢复Local Mapping,再次执行SLAM???
void DeactivateLocalizationMode();
// 清空地图,重启系统
void Reset();
// 保存轨迹之前执行
void Shutdown();
// 保存相机轨迹 Only stereo and RGB-D.
// Call first Shutdown()
// See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
void SaveTrajectoryTUM(const string &filename);
// See format details at: http://www.cvlibs.net/datasets/kitti/eval_odometry.php
void SaveTrajectoryKITTI(const string &filename);
// 保存关键帧位姿
// Call first Shutdown()
// See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
void SaveKeyFrameTrajectoryTUM(const string &filename);
//成员变量
private:
// Input sensor
eSensor mSensor;
// ORB词汇表用于场景识别和特征匹配
ORBVocabulary* mpVocabulary;
// 关键帧数据库用于位置识别 (重定位和回环检测).
KeyFrameDatabase* mpKeyFrameDatabase;
// 存储关键帧和地图特征子
Map* mpMap;
// Tracker. 接受帧计算相机位姿
// 决定何时插入新的关键帧,创建新的地图特征子
// 重定位
Tracking* mpTracker;
// Local Mapper.管理本地地图,执行BA
LocalMapping* mpLocalMapper;
// Loop Closer. 搜索每个新的关键帧的循环
LoopClosing* mpLoopCloser;
// Pangolin.绘制地图和当前相机位姿
Viewer* mpViewer;
// 画图用的
FrameDrawer* mpFrameDrawer;
MapDrawer* mpMapDrawer;
// 3个线程: Local Mapping, Loop Closing, Viewer.
// Tracking线程在System主程序线程中
std::thread* mptLocalMapping;
std::thread* mptLoopClosing;
std::thread* mptViewer;
// 重置Flag???
std::mutex mMutexReset;
bool mbReset;
//更改模型flags
std::mutex mMutexMode;
bool mbActivateLocalizationMode;
bool mbDeactivateLocalizationMode;
};
}// namespace ORB_SLAM
#endif // SYSTEM_H
#include "System.h"
#include "Converter.h"
#include
#include
#include // std::cout, std::fixed
#include // std::setprecision
bool has_suffix(const std::string &str, const std::string &suffix) {
std::size_t index = str.find(suffix, str.size() - suffix.size());
return (index != std::string::npos);
}
namespace ORB_SLAM2
{
//列表初始化构造函数:词袋文件、参数文件、传感器类型、是否用Viewer
System::System(const string &strVocFile, const string &strSettingsFile, const eSensor sensor,
const bool bUseViewer):mSensor(sensor),mbReset(false),mbActivateLocalizationMode(false),
mbDeactivateLocalizationMode(false)
{
// Output welcome message
cout << endl <<
"ORB-SLAM2 Copyright (C) 2014-2016 Raul Mur-Artal, University of Zaragoza." << endl <<
"This program comes with ABSOLUTELY NO WARRANTY;" << endl <<
"This is free software, and you are welcome to redistribute it" << endl <<
"under certain conditions. See LICENSE.txt." << endl << endl;
cout << "Input sensor was set to: ";
if(mSensor==MONOCULAR)
cout << "Monocular" << endl;
else if(mSensor==STEREO)
cout << "Stereo" << endl;
else if(mSensor==RGBD)
cout << "RGB-D" << endl;
//1.读取参数文件,内参、帧率、基线、深度, XXX.yaml
cv::FileStorage fsSettings(strSettingsFile.c_str(), cv::FileStorage::READ);
if(!fsSettings.isOpened())
{
cerr << "Failed to open settings file at: " << strSettingsFile << endl;
exit(-1);
}
//2.下载ORB词袋 .txt
cout << endl << "Loading ORB Vocabulary. This could take a while..." << endl;
mpVocabulary = new ORBVocabulary();//这个词袋类在哪里定义的???
bool bVocLoad = false; // 基于扩展名加载
if (has_suffix(strVocFile, ".txt"))
bVocLoad = mpVocabulary->loadFromTextFile(strVocFile);
else if(has_suffix(strVocFile, ".bin"))
bVocLoad = mpVocabulary->loadFromBinaryFile(strVocFile);
else
bVocLoad = false;
if(!bVocLoad)
{
cerr << "Wrong path to vocabulary. " << endl;
cerr << "Failed to open at: " << strVocFile << endl;
exit(-1);
}
cout << "Vocabulary loaded!" << endl << endl;
//3.创建关键帧数据库
mpKeyFrameDatabase = new KeyFrameDatabase(*mpVocabulary);
//4.创建地图
mpMap = new Map();
//Create Drawers.
mpFrameDrawer = new FrameDrawer(mpMap);
mpMapDrawer = new MapDrawer(mpMap, strSettingsFile);
//5.1初始化 Tracking
//(it will live in the main thread of execution, the one that called this constructor)
mpTracker = new Tracking(this, mpVocabulary, mpFrameDrawer, mpMapDrawer,
mpMap, mpKeyFrameDatabase, strSettingsFile, mSensor);
//5.2初始化并发布 Local Mapping 线程
mpLocalMapper = new LocalMapping(mpMap, mSensor==MONOCULAR);
mptLocalMapping = new thread(&ORB_SLAM2::LocalMapping::Run,mpLocalMapper);
//5.3初始化并发布 Loop Closing 线程
mpLoopCloser = new LoopClosing(mpMap, mpKeyFrameDatabase, mpVocabulary, mSensor!=MONOCULAR);
mptLoopClosing = new thread(&ORB_SLAM2::LoopClosing::Run, mpLoopCloser);
//5.4初始化并发布 Viewer 线程
mpViewer = new Viewer(this, mpFrameDrawer,mpMapDrawer,mpTracker,strSettingsFile);
if(bUseViewer)
mptViewer = new thread(&Viewer::Run, mpViewer);
mpTracker->SetViewer(mpViewer);
//5.5线程之间相互设置指针???
mpTracker->SetLocalMapper(mpLocalMapper);
mpTracker->SetLoopClosing(mpLoopCloser);
mpLocalMapper->SetTracker(mpTracker);
mpLocalMapper->SetLoopCloser(mpLoopCloser);
mpLoopCloser->SetTracker(mpTracker);
mpLoopCloser->SetLocalMapper(mpLocalMapper);
}
cv::Mat System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double ×tamp)
{
if(mSensor!=STEREO)
{
cerr << "ERROR: you called TrackStereo but input sensor was not set to STEREO." << endl;
exit(-1);
}
// Check mode change
{
unique_lock lock(mMutexMode);
if(mbActivateLocalizationMode)
{
mpLocalMapper->RequestStop();
// Wait until Local Mapping has effectively stopped
while(!mpLocalMapper->isStopped())
{
//usleep(1000);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
mpTracker->InformOnlyTracking(true);// 定位时,只跟踪
mbActivateLocalizationMode = false;
}
if(mbDeactivateLocalizationMode)
{
mpTracker->InformOnlyTracking(false);
mpLocalMapper->Release();
mbDeactivateLocalizationMode = false;
}
}
// Check reset
{
unique_lock lock(mMutexReset);
if(mbReset)
{
mpTracker->Reset();
mbReset = false;
}
}
return mpTracker->GrabImageStereo(imLeft,imRight,timestamp);
}
cv::Mat System::TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const double ×tamp)
{
if(mSensor!=RGBD)
{
cerr << "ERROR: you called TrackRGBD but input sensor was not set to RGBD." << endl;
exit(-1);
}
// Check mode change
{
unique_lock lock(mMutexMode);
if(mbActivateLocalizationMode)
{
mpLocalMapper->RequestStop();
// Wait until Local Mapping has effectively stopped
while(!mpLocalMapper->isStopped())
{
//usleep(1000);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
mpTracker->InformOnlyTracking(true);// 定位时,只跟踪
mbActivateLocalizationMode = false;
}
if(mbDeactivateLocalizationMode)
{
mpTracker->InformOnlyTracking(false);
mpLocalMapper->Release();
mbDeactivateLocalizationMode = false;
}
}
// Check reset
{
unique_lock lock(mMutexReset);
if(mbReset)
{
mpTracker->Reset();
mbReset = false;
}
}
return mpTracker->GrabImageRGBD(im,depthmap,timestamp);
}
//定义跟踪单目函数
cv::Mat System::TrackMonocular(const cv::Mat &im, const double ×tamp)
{
if(mSensor!=MONOCULAR)
{
cerr << "ERROR: you called TrackMonocular but input sensor was not set to Monocular." << endl;
exit(-1);
}
// 检查模型
{
unique_lock lock(mMutexMode);
//如果激活定位模块,休眠1000ms直到停止建图
if(mbActivateLocalizationMode)
{
mpLocalMapper->RequestStop();
// Wait until Local Mapping has effectively stopped
while(!mpLocalMapper->isStopped())
{
//usleep(1000);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
mpTracker->InformOnlyTracking(true);// 定位时,只跟踪
mbActivateLocalizationMode = false;// 防止重复执行
}
//如果定位模块失效,重启建图
if(mbDeactivateLocalizationMode)
{
mpTracker->InformOnlyTracking(false);
mpLocalMapper->Release();
mbDeactivateLocalizationMode = false;// 防止重复执行
}
}
// 检查重启
{
unique_lock lock(mMutexReset);
if(mbReset)
{
mpTracker->Reset();
mbReset = false;
}
}
//这个Grab函数哪里来的???
return mpTracker->GrabImageMonocular(im,timestamp);
}
//激活定位模块
void System::ActivateLocalizationMode()
{
unique_lock lock(mMutexMode);
mbActivateLocalizationMode = true;
}
//失效定位模块
void System::DeactivateLocalizationMode()
{
//加锁
unique_lock lock(mMutexMode);
mbDeactivateLocalizationMode = true;
}
//重置系统
void System::Reset()
{
unique_lock lock(mMutexReset);
mbReset = true;
}
//关闭整个系统
void System::Shutdown()
{
mpLocalMapper->RequestFinish();
mpLoopCloser->RequestFinish();
if(mpViewer)
{
mpViewer->RequestFinish();
while(!mpViewer->isFinished())
std::this_thread::sleep_for(std::chrono::milliseconds(5));
}
// Wait until all thread have effectively stopped
while(!mpLocalMapper->isFinished() || !mpLoopCloser->isFinished() || mpLoopCloser->isRunningGBA())
{
//usleep(5000);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
}
if(mpViewer)
pangolin::BindToContext("ORB-SLAM2: Map Viewer");
}
//保存轨迹
void System::SaveTrajectoryTUM(const string &filename)
{
cout << endl << "Saving camera trajectory to " << filename << " ..." << endl;
if(mSensor==MONOCULAR)
{
cerr << "ERROR: SaveTrajectoryTUM cannot be used for monocular." << endl;
return;
}
//得到所有关键帧,重新排序第一帧位于原点
vector vpKFs = mpMap->GetAllKeyFrames();
sort(vpKFs.begin(),vpKFs.end(),KeyFrame::lId);
cv::Mat Two = vpKFs[0]->GetPoseInverse();
ofstream f;
f.open(filename.c_str());
f << fixed;
// 先获得关键帧,当前帧相对于关键帧
//每一帧都有引用关键帧、时间戳和标志
list::iterator lRit = mpTracker->mlpReferences.begin();
list::iterator lT = mpTracker->mlFrameTimes.begin();
list::iterator lbL = mpTracker->mlbLost.begin();
for(list::iterator lit=mpTracker->mlRelativeFramePoses.begin(),
lend=mpTracker->mlRelativeFramePoses.end();lit!=lend;lit++, lRit++, lT++, lbL++)
{
if(*lbL)
continue;
KeyFrame* pKF = *lRit;
cv::Mat Trw = cv::Mat::eye(4,4,CV_32F);
// 如果参考关键帧被剔除,遍历生成树获得合适关键帧
while(pKF->isBad())
{
Trw = Trw*pKF->mTcp;
pKF = pKF->GetParent();
}
Trw = Trw*pKF->GetPose()*Two;
cv::Mat Tcw = (*lit)*Trw;
cv::Mat Rwc = Tcw.rowRange(0,3).colRange(0,3).t();
cv::Mat twc = -Rwc*Tcw.rowRange(0,3).col(3);
vector q = Converter::toQuaternion(Rwc);
f << setprecision(6) << *lT << " " << setprecision(9) << twc.at(0) << " " << twc.at(1) << " " << twc.at(2) << " " << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << endl;
}
f.close();
cout << endl << "trajectory saved!" << endl;
}
void System::SaveKeyFrameTrajectoryTUM(const string &filename)
{
cout << endl << "Saving keyframe trajectory to " << filename << " ..." << endl;
vector vpKFs = mpMap->GetAllKeyFrames();
sort(vpKFs.begin(),vpKFs.end(),KeyFrame::lId);
// Transform all keyframes so that the first keyframe is at the origin.
// After a loop closure the first keyframe might not be at the origin.
//cv::Mat Two = vpKFs[0]->GetPoseInverse();
ofstream f;
f.open(filename.c_str());
f << fixed;
for(size_t i=0; iSetPose(pKF->GetPose()*Two);
if(pKF->isBad())
continue;
cv::Mat R = pKF->GetRotation().t();
vector q = Converter::toQuaternion(R);
cv::Mat t = pKF->GetCameraCenter();
f << setprecision(6) << pKF->mTimeStamp << setprecision(7) << " " << t.at(0) << " " << t.at(1) << " " << t.at(2)
<< " " << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << endl;
}
f.close();
cout << endl << "trajectory saved!" << endl;
}
void System::SaveTrajectoryKITTI(const string &filename)
{
cout << endl << "Saving camera trajectory to " << filename << " ..." << endl;
if(mSensor==MONOCULAR)
{
cerr << "ERROR: SaveTrajectoryKITTI cannot be used for monocular." << endl;
return;
}
vector vpKFs = mpMap->GetAllKeyFrames();
sort(vpKFs.begin(),vpKFs.end(),KeyFrame::lId);
// Transform all keyframes so that the first keyframe is at the origin.
// After a loop closure the first keyframe might not be at the origin.
cv::Mat Two = vpKFs[0]->GetPoseInverse();
ofstream f;
f.open(filename.c_str());
f << fixed;
// Frame pose is stored relative to its reference keyframe (which is optimized by BA and pose graph).
// We need to get first the keyframe pose and then concatenate the relative transformation.
// Frames not localized (tracking failure) are not saved.
// For each frame we have a reference keyframe (lRit), the timestamp (lT) and a flag
// which is true when tracking failed (lbL).
list::iterator lRit = mpTracker->mlpReferences.begin();
list::iterator lT = mpTracker->mlFrameTimes.begin();
for(list::iterator lit=mpTracker->mlRelativeFramePoses.begin(), lend=mpTracker->mlRelativeFramePoses.end();lit!=lend;lit++, lRit++, lT++)
{
ORB_SLAM2::KeyFrame* pKF = *lRit;
cv::Mat Trw = cv::Mat::eye(4,4,CV_32F);
while(pKF->isBad())
{
// cout << "bad parent" << endl;
Trw = Trw*pKF->mTcp;
pKF = pKF->GetParent();
}
Trw = Trw*pKF->GetPose()*Two;
cv::Mat Tcw = (*lit)*Trw;
cv::Mat Rwc = Tcw.rowRange(0,3).colRange(0,3).t();
cv::Mat twc = -Rwc*Tcw.rowRange(0,3).col(3);
f << setprecision(9) << Rwc.at(0,0) << " " << Rwc.at(0,1) << " " << Rwc.at(0,2) << " " << twc.at(0) << " " <<
Rwc.at(1,0) << " " << Rwc.at(1,1) << " " << Rwc.at(1,2) << " " << twc.at(1) << " " <<
Rwc.at(2,0) << " " << Rwc.at(2,1) << " " << Rwc.at(2,2) << " " << twc.at(2) << endl;
}
f.close();
cout << endl << "trajectory saved!" << endl;
}
} //namespace ORB_SLAM
我们可以看到在这个对象的实例化过程中,我们创建了以下对象:
(1)创建了ORB词袋的对象
(2)创建了关键帧的数据库
(3)创建地图对象
(4)创建两个显示窗口
(5)初始化Tracking对象
(6)初始化Local Mapping对象,并开启线程运行
(7)初始化Loop Closing对象,并开启线程运行
(8)初始化窗口,开启线程显示图像和地图点