使用OpenCV显示SiftGPU提取的特征点和特征匹配,主要包括以下几个步骤:
具体代码如下:
代码采用image.txt输入待处理的图像路径,此处根据自己实际处理的图像自行设置。
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "compare.h"
using namespace std;
using namespace cv;
#if !defined(SIFTGPU_STATIC) && !defined(SIFTGPU_DLL_RUNTIME)
// SIFTGPU_STATIC comes from compiler
#define SIFTGPU_DLL_RUNTIME
// Load at runtime if the above macro defined
// comment the macro above to use static linking
#endif
#ifdef _WIN32
#ifdef SIFTGPU_DLL_RUNTIME
#define WIN32_LEAN_AND_MEAN
#include
#define FREE_MYLIB FreeLibrary
#define GET_MYPROC GetProcAddress
#else
//define this to get dll import definition for win32
#define SIFTGPU_DLL
#ifdef _DEBUG
#pragma comment(lib, "../lib/SIFTGPU.lib")
#else
#pragma comment(lib, "../lib/SIFTGPU.lib")
#endif
#endif
#else
#ifdef SIFTGPU_DLL_RUNTIME
#include
#define FREE_MYLIB dlclose
#define GET_MYPROC dlsym
#endif
#endif
#include
int main(int argc, char** argv)
{
#ifdef SIFTGPU_DLL_RUNTIME
#ifdef _WIN32
#ifdef _DEBUG
HMODULE hsiftgpu = LoadLibrary("SiftGPU_d.dll");
#else
HMODULE hsiftgpu = LoadLibrary("siftgpu.dll");
#endif
#else
void * hsiftgpu = dlopen("libsiftgpu.so", RTLD_LAZY);
#endif
if (hsiftgpu == NULL) {
DWORD error_id = GetLastError();
cout << "error loading SIFTGPU.dll" << endl;
cout << "error id is " << error_id << endl;
system("pause");
return 0;
}
#endif
//声明SiftGPU并初始化
SiftGPU sift;
char* myargv[4] = { "-fo", "-1", "-v", "1" };
sift.ParseParam(4, myargv);
//检查硬件是否支持SiftGPU
int support = sift.CreateContextGL();
if (support != SiftGPU::SIFTGPU_FULL_SUPPORTED)
{
cerr << "SiftGPU is not supported!" << endl;
return 2;
}
//读入txt文件列表内的图像名
vector<string> imageNames;
string fileName;
int imgCount = 0;
ifstream fin("image.txt");
while (getline(fin, fileName)){
imgCount++;
imageNames.push_back(fileName);
}
if (imgCount == 0){
cerr << "cannot get image files" << endl;
return 1;
}
else{
cout << "Read In " << imgCount << " Pictures" << endl;
}
//OpenCV读入图像
vector<Mat> images;
for (int i = 0; i < imgCount; ++i){
Mat temp = imread(imageNames[i]);
images.push_back(temp);
}
cout << "OpenCV Read in " << images.size() << " images" << endl;
//SiftGPU读入图像并处理
vector<int> num;
vector<vector<float>> descriptors;
vector<vector<SiftGPU::SiftKeypoint>> keys;
for (int i = 0; i < imgCount; ++i){
const char* temp = imageNames[i].data();
sift.RunSIFT(temp);
int _num = sift.GetFeatureNum();
num.push_back(_num);
cout << "Keypoints Number of " << imageNames[i] << " is= " << _num << endl << endl;
vector<SiftGPU::SiftKeypoint> _keys;
vector<float> _descriptors;
_keys.resize(_num);
_descriptors.resize(128*_num);
sift.GetFeatureVector(&_keys[0], &_descriptors[0]);
keys.push_back(_keys);
descriptors.push_back(_descriptors);
}
//SiftGPU的特征点赋值于OpenCV关键点类型
vector<vector<KeyPoint>> cvKeypoints;
for (int i = 0; i < imgCount; ++i){
vector<KeyPoint> cvKeys;
for (int j = 0; j < keys[i].size(); ++j){
KeyPoint cvTempKey;
cvTempKey.pt.x = keys[i][j].x;
cvTempKey.pt.y = keys[i][j].y;
cvKeys.push_back(cvTempKey);
}
cvKeypoints.push_back(cvKeys);
}
//验证第i张图片的特征点数是否相同
for (int i = 0; i < imgCount; ++i){
if (cvKeypoints[i].size() != keys[i].size())
cerr << "Image " << i << "'s Keypoints Number isn't correct." << endl;
}
//使用OpenCV接口显示关键点
for (int i = 0; i < imgCount; ++i){
Mat featureImage;
drawKeypoints(images[i], cvKeypoints[i], featureImage, Scalar(255, 255, 255), DrawMatchesFlags::DEFAULT);
namedWindow("Sift Keypoints",WINDOW_NORMAL);
imshow("Sift Keypoints", featureImage);
waitKey(0);
}
//SiftGPU的描述子赋值于OpenCV描述子类型
vector<Mat> cvDescriptors(imgCount); //每个Mat内部是num行,128列的特征点描述矩阵
for (int inum = 0; inum < imgCount; ++inum){
//图i的SiftGPU的描述子_des
vector<vector<float>> _des;
for (int sj = 0; sj < num[inum]; ++sj){
vector<float> temp(128, 0.0);
for (int sk = 0; sk < 128; ++sk){
temp[sk] = descriptors[inum][sk + 128 * sj];
}
_des.push_back(temp);
}
//图i的OpenCV描述子_cvDes
Mat _cvDes(num[inum], 128, CV_32F);
for (int cj = 0; cj < num[inum]; ++cj){
float* pxDesMat = _cvDes.ptr<float>(cj);
for (int ck = 0; ck < 128; ++ck){
pxDesMat[ck] = _des[cj][ck];
}
}
输出_des的极值
//float smax = _des[0][0];
//float smin = _des[0][0];
//for (int simm = 0; simm < _des.size(); ++simm){
// for (int sjmm = 0; sjmm < 128; ++sjmm){
// if (smax < _des[simm][sjmm])
// smax = _des[simm][sjmm];
// if (smin > _des[simm][sjmm])
// smin = _des[simm][sjmm];
// }
//}
//cout << "SiftGPU Descriptors Max= " << smax << " , Min=" << smin << endl;
输出_cvDes的极值
//float cmax = _cvDes.at(0, 0);
//float cmin = _cvDes.at(0, 0);
//for (int cimm = 0; cimm < _cvDes.rows; ++cimm){
// for (int cjmm = 0; cjmm < 128; ++cjmm){
// if (cmax < _cvDes.at(cimm,cjmm))
// cmax = _cvDes.at(cimm, cjmm);
// if (cmin > _cvDes.at(cimm, cjmm))
// cmin = _cvDes.at(cimm, cjmm);
// }
//}
//cout << "OpenCV Descriptors Max= " << cmax << " , Min=" << cmin << endl;
cvDescriptors.push_back(_cvDes);
}
map<pair<int, int>, vector<DMatch>> matches_matrix;
map<pair<int, int>, int> matches_num;
SiftMatchGPU matcher;
matcher.VerifyContextGL();
for (int i = 0; i < imgCount - 1; ++i){
for (int j = i + 1; j < imgCount; ++j){
matcher.SetDescriptors(0, num[i], &descriptors[i][0]);
matcher.SetDescriptors(1, num[j], &descriptors[j][0]);
int(*match_buf)[2] = new int[num[i]][2];
int num_match = matcher.GetSiftMatch(num[i], match_buf);
matches_num[make_pair(i, j)] = num_match;
cout << "Matches between Image" << i << " and Image" << j << " are " << num_match << endl;
//定义OpenCV的match类型,并赋值quryIdx和trainIdx
vector<DMatch> matches_tmp;
for (int mk = 0; mk < num_match; ++mk){
DMatch match;
match.queryIdx = match_buf[mk][0];
match.trainIdx = match_buf[mk][1];
Point2f p1, p2;
p1.x = cvKeypoints[i][match.queryIdx].pt.x;
p1.y = cvKeypoints[i][match.queryIdx].pt.y;
p2.x = cvKeypoints[j][match.trainIdx].pt.x;
p2.y = cvKeypoints[j][match.trainIdx].pt.y;
float dist;
dist = (float)sqrtf(((p1.x - p2.x)*(p1.x - p2.x) + (p1.y - p2.y)*(p1.y - p2.y)));
match.distance = dist;
//cout << "Image" << i << " & Image" << j << " Match " << mk << ": " << endl;
//cout << "Idx: queryIdx is= " << match.queryIdx << " , trainIdx is= " << match.trainIdx << endl;
//cout << " Keypoint1: " << p1.x << " " << p1.y << endl;
//cout << " Keypoint2: " << p2.x << " " << p2.y << endl;
//cout << " Distance: " << match.distance << endl;
matches_tmp.push_back(match);
}
matches_matrix[make_pair(i, j)] = matches_tmp;
delete[] match_buf;
//OpenCV匹配显示
Mat cvImgMatches;
drawMatches(images[i], cvKeypoints[i], images[j], cvKeypoints[j], matches_tmp, cvImgMatches);
char showName[100];
sprintf(showName, "%s%d%s%d", "Matches between ", i, " and ", j);
namedWindow(showName, WINDOW_NORMAL);
imshow(showName, cvImgMatches);
waitKey(0);
}
}
system("pause");
return 0;
}