opencv3.3 orb

Ptr orb = ORB::create();
orb->detectAndCompute(rgb->rgb, Mat(), rgb->keypoints, rgb->descriptor);

Ptr matcher =DescriptorMatcher::create("BruteForce");

vector matches;
vector goodmatches;

    matcher->match(descriptor1, descriptor2, matches);
     cout << " matches=" <double min_dist = 100;
    double max_dist = 0;

    for(int i = 0; i < descriptor1.rows; i++)
    {
        double  dist = matches[i].distance;
        if(dist < min_dist)
            min_dist = dist;
        if(dist > max_dist)
            max_dist = dist;

    }

    for(int i = 0; i < matches.size(); i++)
    {
        if(matches[i].distance < 2 * min_dist)
        {
            goodmatches.push_back(matches[i]);
        }
    }

另一种匹配方式knn

vector< vector > matches_knn;
matcher->knnMatch( descriptor1, descriptor2, matches_knn, 2 );

        //vector< cv::DMatch > matches;
        for ( size_t i=0; iif (matches_knn[i][0].distance < 0.8 * matches_knn[i][1].distance )
            goodmatches.push_back( matches_knn[i][0] );
        }

flann

#include 
#include 
#include 
#include "opencv2/core/core.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/flann/flann.hpp"

using namespace std;
using namespace cv;

void readme();
string type2str(int type);

int main( int argc, char** argv )
{


  Mat img_1 = imread("left3.jpg",0);
  Mat img_2 = imread( "right3.jpg",0 );

  if( !img_1.data || !img_2.data )
  { 
    cout<< " --(!) Error reading images " << endl; 
    return -1; 
  }

  //-- Step 1: Detect the keypoints using ORB Detector

  cv::Ptr orb = cv::ORB::create(50);

  std::vector keypoints_1, keypoints_2;

  orb->detect( img_1, keypoints_1 );
  orb->detect( img_2, keypoints_2 );
  //-- Step 2: Calculate descriptors (feature vectors)

  Mat descriptors_1, descriptors_2;

  // descriptor is a cv::Mat, with rows the same as nFeatures, and cols as 32 (8UC1)
  orb->compute( img_1, keypoints_1, descriptors_1 );
  orb->compute( img_2, keypoints_2, descriptors_2 );
  cout << type2str(descriptors_1.type()) << " " << descriptors_1.rows << "*" << descriptors_1.cols << endl;; 

  //-- Step 3: Matching descriptor vectors using FLANN matcher
  FlannBasedMatcher matcher;
  std::vector matches;

  // the descriptor for FlannBasedMatcher should has matrix element of CV_32F
  if( descriptors_1.type()!=CV_32F ) 
  {
    descriptors_1.convertTo( descriptors_1, CV_32F );
    descriptors_2.convertTo( descriptors_2, CV_32F );
  }
  matcher.match( descriptors_1, descriptors_2, matches );



  double min_dist = min_element( matches.begin(), 
                 matches.end(), 
                 []( const DMatch& d1, const DMatch& d2 )->double
                 {
                   return d1.distance < d2.distance;
                 } )->distance;

  cout << min_dist << endl;

  vector good_matches;

  for( int i = 0; i < descriptors_1.rows; i++ )
  { 
    if( matches[i].distance < max<double>( min_dist*2, 60.0 ) )
    { 
      good_matches.push_back( matches[i]); 
    }
  }

  Mat img_matches;
  drawMatches( img_1, keypoints_1, img_2, keypoints_2,
               good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
               vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );

  //-- Show detected matches
  imshow( "Good Matches", img_matches );

  for( int i = 0; i < good_matches.size(); i++ )
  { 
    cout << good_matches[i].queryIdx << " --- " << good_matches[i].trainIdx << endl; 
  }

  waitKey(0);

  return 0;
}

void readme()
{ 
  cout << " Usage: ./ORB_test  " << endl; 
}

string type2str(int type) 
{
  string r;

  uchar depth = type & CV_MAT_DEPTH_MASK;
  uchar chans = 1 + (type >> CV_CN_SHIFT);

  switch ( depth ) {
    case CV_8U:  r = "8U"; break;
    case CV_8S:  r = "8S"; break;
    case CV_16U: r = "16U"; break;
    case CV_16S: r = "16S"; break;
    case CV_32S: r = "32S"; break;
    case CV_32F: r = "32F"; break;
    case CV_64F: r = "64F"; break;
    default:     r = "User"; break;
  }

  r += "C";
  r += (chans+'0');

  return r;
}

你可能感兴趣的:(opencv)