3

I'm doing a project on SURF and so far I have implemented SURF features successfully, and I have done the feature evaluation correctly as well. But I don't know how to do the DESCRIPTOR evaluation... I'm using c++/opencv svn.

Here you can find the sample code from opencv svn(That shows how use the EVALUATOR but I couldn't use it in my code...

My code:

#include "cv.h" // include standard OpenCV headers, same as before
#include "highgui.h"
#include "ml.h"
#include <stdio.h>
#include <iostream>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <vector>
//#include "precomp.hpp"


using namespace cv; // all the new API is put into "cv" namespace. Export its content
using namespace std;

using std::cout;
using std::cerr;
using std::endl;
using std::vector;

// enable/disable use of mixed API in the code below.
#define DEMO_MIXED_API_USE 1
void warpPerspectiveRand( const Mat& src, Mat& dst, Mat& H, RNG& rng )
{
    H.create(3, 3, CV_32FC1);
    H.at<float>(0,0) = rng.uniform( 0.8f, 1.2f);
    H.at<float>(0,1) = rng.uniform(-0.1f, 0.1f);
    H.at<float>(0,2) = rng.uniform(-0.1f, 0.1f)*src.cols;
    H.at<float>(1,0) = rng.uniform(-0.1f, 0.1f);
    H.at<float>(1,1) = rng.uniform( 0.8f, 1.2f);
    H.at<float>(1,2) = rng.uniform(-0.1f, 0.1f)*src.rows;
    H.at<float>(2,0) = rng.uniform( -1e-4f, 1e-4f);
    H.at<float>(2,1) = rng.uniform( -1e-4f, 1e-4f);
    H.at<float>(2,2) = rng.uniform( 0.8f, 1.2f);

    warpPerspective( src, dst, H, src.size() );
}




double match(const vector<KeyPoint>& /*kpts_train*/, const vector<KeyPoint>& /*kpts_query*/, DescriptorMatcher& matcher,
            const Mat& train, const Mat& query, vector<DMatch>& matches)
{

  double t = (double)getTickCount();
  matcher.match(query, train, matches); //Using features2d
  return ((double)getTickCount() - t) / getTickFrequency();
}


void simpleMatching( Ptr<DescriptorMatcher>& descriptorMatcher,
                     const Mat& descriptors1, const Mat& descriptors2,
                     vector<DMatch>& matches12 );

int main( int argc, char** argv )
{

string im1_name, im2_name;
  im1_name = "lena.jpg";
  im2_name = "lena.jpg";

Mat img1 = imread(im1_name, 1);
Mat img2 = imread(im2_name, 1);

RNG rng = theRNG();
Mat H12;
warpPerspectiveRand(img1, img2, H12, rng );




    SurfFeatureDetector detector(2000);
    vector<KeyPoint> keypoints1, keypoints2;
    detector.detect(img1, keypoints1);
    detector.detect(img2, keypoints2);


float repeatability;
int correspCount;
evaluateFeatureDetector( img1, img2, H12, &keypoints1, &keypoints2, repeatability, correspCount );

cout << "repeatability = " << repeatability << endl;
        cout << "correspCount = " << correspCount << endl;

    // computing descriptors
    SurfDescriptorExtractor extractor;
    Mat descriptors1, descriptors2;
    extractor.compute(img1, keypoints1, descriptors1);
    extractor.compute(img2, keypoints2, descriptors2);


  return 0;
}

So my question is: How to evaluate the descriptor for SURF(How to do that) I tried in many ways but I couldn't do that..

Thank you so much

Mario
  • 1,469
  • 7
  • 29
  • 46

1 Answers1

0

Use a descriptor matcher

cv::BruteForceMatcher< cv::L2<float> > matcher;
std::vector<cv::DMatch> matches;
matcher.match(descriptors1, descriptors2, matches);

This will get you a vector of matches. Have a look at the documentation for DMatch.

Also have a look at this function:

cv::drawMatches(image1, keypoints1, image2, keypoints2, matches, outimage);
cv::imshow("foo", outimage);
Unapiedra
  • 15,037
  • 12
  • 64
  • 93
  • I tried those function and successfully worked with me, my concern is like this: When I'm implementing the function Ptr gdm = new VectorDescriptorMatcher( extractor, matcher ); It is not excepted both of the extractor and matcher, which I need these 2 parameters in order to implement the next function which is valuateGenericDescriptorMatcher( img1, img2, H12, keypoints1, keypoints2, 0, 0, curve, gdm ); So do you have any idea what I'm doing wrong here – Mario Jul 18 '11 at 17:28
  • 2
    Nope. I suggest you close this question and ask a new one with a minimal example. – Unapiedra Jul 22 '11 at 10:04