Im using OpenCV feature detection to estimate robot position based on comparison of LIDAR result and virtual map. I've try using orb feature detection followed by flannbasedmatcher, but the match result gone wrong. here's some of my code
Ptr<ORB> orb_a = ORB::create();
Ptr<ORB> orb_b = ORB::create();
vector <cv::KeyPoint> kp1,kp2;
Mat desc1,desc2;
/* set orb :
1. ORB name
2. nfeatures
3. Nlevels
4. EdgeThreshold
5. First Level
6. WTA
7. Score Type
8. Patchsize
9. Scale Factor */
Mat hmap,hlidar;
setORB(orb_a,500,8,100,0,4,ORB::HARRIS_SCORE,31,1.1); //map
orb_a->detectAndCompute(lidarmap,noArray(),kp1,desc1);
drawKeypoints(lidarmap,kp1,hmap,Scalar::all(-1),DrawMatchesFlags::DEFAULT);
setORB(orb_b,50,8,30,0,4,ORB::HARRIS_SCORE,10,1.5); //lidar
orb_b->detectAndCompute(lidarused,noArray(),kp2,desc2);
drawKeypoints(lidarused,kp2,hlidar,Scalar::all(-1),DrawMatchesFlags::DEFAULT);
//flann
FlannBasedMatcher matcher;
std::vector<DMatch>matches;
matcher.match (desc1,desc2,matches);
double maxdist = 0, mindist = 100000;
for (int i = 0; i< desc1.rows; i++)
{
double dist = matches[i].distance;
if (dist<mindist) mindist = dist;
if (dist>maxdist) maxdist = dist;
}
if (mindist<0.02) mindist = 0.02;
printf ("min : %7.3f \t max : %7.3f \n",mindist,maxdist);
vector <DMatch> good_matches;
for (int i=1; i<desc1.rows; i++)
{
if (matches[i].distance >= 2*mindist && matches[i].distance<maxdist/2)
{
good_matches.push_back (matches[i]);
}
}
Mat imgmatches;
drawMatches (lidarmap,kp1,
lidarused,kp2,
good_matches,imgmatches,
Scalar::all(-1), Scalar::all(-1),
vector<char>(),DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
here's the result. Detection seems okay, but it's terrible when i rotate second image
Is flann matcher only works on unscaled and unrotated image? Can i use flann to match bicolor image (BW) ? or can somebody point where i'm doing it wrong? thanks in advance