3

I'm trying to stitch two images together, using the OpenCV Java API. However, I get the wrong output and I cannot work out the problem. I use the following steps: 1. detect features 2. extract features 3. match features. 4. find homography 5. find perspective transform 6. warp perspective 7. 'stitch' the 2 images, into a combined image.

but somewhere I'm going wrong. I think it's the way I'm combing the 2 images, but I'm not sure. I get 214 good feature matches between the 2 images, but cannot stitch them?

public class ImageStitching {

static Mat image1;
static Mat image2;

static FeatureDetector fd;
static DescriptorExtractor fe;
static DescriptorMatcher fm;

public static void initialise(){
    fd = FeatureDetector.create(FeatureDetector.BRISK); 
    fe = DescriptorExtractor.create(DescriptorExtractor.SURF); 
    fm = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);

    //images
    image1 = Highgui.imread("room2.jpg");
    image2 = Highgui.imread("room3.jpg");

    //structures for the keypoints from the 2 images
    MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
    MatOfKeyPoint keypoints2 = new MatOfKeyPoint();

    //structures for the computed descriptors
    Mat descriptors1 = new Mat();
    Mat descriptors2 = new Mat();

    //structure for the matches
    MatOfDMatch matches = new MatOfDMatch();

    //getting the keypoints
    fd.detect(image1, keypoints1);
    fd.detect(image1, keypoints2);

    //getting the descriptors from the keypoints
    fe.compute(image1, keypoints1, descriptors1);
    fe.compute(image2,keypoints2,descriptors2);

    //getting the matches the 2 sets of descriptors 
    fm.match(descriptors2,descriptors1, matches);

    //turn the matches to a list
    List<DMatch> matchesList = matches.toList();

    Double maxDist = 0.0; //keep track of max distance from the matches
    Double minDist = 100.0; //keep track of min distance from the matches

    //calculate max & min distances between keypoints
    for(int i=0; i<keypoints1.rows();i++){
        Double dist = (double) matchesList.get(i).distance;
        if (dist<minDist) minDist = dist;
        if(dist>maxDist) maxDist=dist;
    }

    System.out.println("max dist: " + maxDist );
    System.out.println("min dist: " + minDist);

    //structure for the good matches
    LinkedList<DMatch> goodMatches = new LinkedList<DMatch>();

    //use only the good matches (i.e. whose distance is less than 3*min_dist)
    for(int i=0;i<descriptors1.rows();i++){
        if(matchesList.get(i).distance<3*minDist){
            goodMatches.addLast(matchesList.get(i));
        }
    }

    //structures to hold points of the good matches (coordinates)
    LinkedList<Point> objList = new LinkedList<Point>(); // image1
    LinkedList<Point> sceneList = new LinkedList<Point>(); //image 2

    List<KeyPoint> keypoints_objectList = keypoints1.toList();
    List<KeyPoint> keypoints_sceneList = keypoints2.toList();

    //putting the points of the good matches into above structures
    for(int i = 0; i<goodMatches.size(); i++){
        objList.addLast(keypoints_objectList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(keypoints_sceneList.get(goodMatches.get(i).trainIdx).pt);
    }

    System.out.println("\nNum. of good matches" +goodMatches.size());

    MatOfDMatch gm = new MatOfDMatch();
    gm.fromList(goodMatches);

    //converting the points into the appropriate data structure
    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    //finding the homography matrix
    Mat H = Calib3d.findHomography(obj, scene);

    //LinkedList<Point> cornerList = new LinkedList<Point>();
    Mat obj_corners = new Mat(4,1,CvType.CV_32FC2);
    Mat scene_corners = new Mat(4,1,CvType.CV_32FC2);

    obj_corners.put(0,0, new double[]{0,0});
    obj_corners.put(0,0, new double[]{image1.cols(),0});
    obj_corners.put(0,0,new double[]{image1.cols(),image1.rows()});
    obj_corners.put(0,0,new double[]{0,image1.rows()});

    Core.perspectiveTransform(obj_corners, scene_corners, H);

    //structure to hold the result of the homography matrix
    Mat result = new Mat();

    //size of the new image - i.e. image 1 + image 2
    Size s = new Size(image1.cols()+image2.cols(),image1.rows());

    //using the homography matrix to warp the two images
    Imgproc.warpPerspective(image1, result, H, s);
    int i = image1.cols();
    Mat m = new Mat(result,new Rect(i,0,image2.cols(), image2.rows()));

    image2.copyTo(m);

    Mat img_mat = new Mat();

    Features2d.drawMatches(image1, keypoints1, image2, keypoints2, gm, img_mat, new Scalar(254,0,0),new Scalar(254,0,0) , new MatOfByte(), 2);

    //creating the output file
    boolean imageStitched = Highgui.imwrite("imageStitched.jpg",result);
    boolean imageMatched = Highgui.imwrite("imageMatched.jpg",img_mat);
}


public static void main(String args[]){
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    initialise();
}

I cannot embed images nor post more than 2 links, because of reputation points? so I've linked the incorrectly stitched images and an image showing the matched features between the 2 images (to get an understanding of the issue):

incorrect stitched image: http://oi61.tinypic.com/11ac01c.jpg detected features: http://oi57.tinypic.com/29m3wif.jpg

user3019612
  • 259
  • 2
  • 7
  • 15

2 Answers2

2

It seems that you have a lot of outliers that make the estimation of homography is incorrect. SO you can use RANSAC method that recursively reject those outliers.

No need much efforts for that, just use a third parameter in findHomography function as:

Mat H = Calib3d.findHomography(obj, scene, CV_RANSAC);

Edit

Then try to be sure that your images given to detector are 8-bit grayscale image, as mentioned here

Y.AL
  • 1,808
  • 13
  • 27
  • I have tried the above function, as such; Mat H = Calib3d.findHomography(obj, scene, Calib3d.RANSAC, 1) - but not success. I have experimented with many different algorithm combinations for the detection, extraction and matching, but still no success. Also tried many different values for the re-projection error (4th parameter to findHomography()). Not quite sure how to move on. – user3019612 Feb 08 '14 at 23:12
1

The "incorrectly stitched image" you post looks like having a bad conditioned H matrix. Apart from +dervish suggestions, run:

cv::determinant(H) > 0.01

To check if your H matrix is "usable". If the matrix is badly conditioned, you get the effect you are showing.

You are drawing onto a 2x2 canvas size, if that's the case, you won't see plenty of stitching configurations, i.e. it's ok for image A on the left of image B but not otherwise. Try drawing the output onto a 3x3 canvas size, using the following snippet:

  // Use the Homography Matrix to warp the images, but offset it to the
  // center of the output canvas. Careful to pre-multiply, not post-multiply.
  cv::Mat Offset = (cv::Mat_<double>(3,3) << 1, 0, 
                    width, 0, 1, height, 0, 0, 1);
  H = Offset * H;

  cv::Mat result;
  cv::warpPerspective(mat_l,
                      result,
                      H,
                      cv::Size(3*width, 3*height));
  // Copy the reference image to the center of the 3x3 output canvas.
  cv::Mat roi = result.colRange(width,2*width).rowRange(height,2*height);
  mat_r.copyTo(roi);

Where width and height are those of the input images, supposedly both of the same size. Note that this warping assumes the mat_l unchanged (flat) and mat_r warping to get stitched on it.

miguelao
  • 772
  • 5
  • 12