surf_matcher.cpp 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225
  1. #include <iostream>
  2. #include <stdio.h>
  3. #include "opencv2/core.hpp"
  4. #include "opencv2/core/utility.hpp"
  5. #include "opencv2/core/ocl.hpp"
  6. #include "opencv2/imgcodecs.hpp"
  7. #include "opencv2/highgui.hpp"
  8. #include "opencv2/features2d.hpp"
  9. #include "opencv2/calib3d.hpp"
  10. #include "opencv2/imgproc.hpp"
  11. #include "opencv2/xfeatures2d.hpp"
  12. using namespace cv;
  13. using namespace cv::xfeatures2d;
  14. const int LOOP_NUM = 10;
  15. const int GOOD_PTS_MAX = 50;
  16. const float GOOD_PORTION = 0.15f;
  17. int64 work_begin = 0;
  18. int64 work_end = 0;
  19. static void workBegin()
  20. {
  21. work_begin = getTickCount();
  22. }
  23. static void workEnd()
  24. {
  25. work_end = getTickCount() - work_begin;
  26. }
  27. static double getTime()
  28. {
  29. return work_end /((double)getTickFrequency() )* 1000.;
  30. }
  31. struct SURFDetector
  32. {
  33. Ptr<Feature2D> surf;
  34. SURFDetector(double hessian = 800.0)
  35. {
  36. surf = SURF::create(hessian);
  37. }
  38. template<class T>
  39. void operator()(const T& in, const T& mask, std::vector<cv::KeyPoint>& pts, T& descriptors, bool useProvided = false)
  40. {
  41. surf->detectAndCompute(in, mask, pts, descriptors, useProvided);
  42. }
  43. };
  44. template<class KPMatcher>
  45. struct SURFMatcher
  46. {
  47. KPMatcher matcher;
  48. template<class T>
  49. void match(const T& in1, const T& in2, std::vector<cv::DMatch>& matches)
  50. {
  51. matcher.match(in1, in2, matches);
  52. }
  53. };
  54. static Mat drawGoodMatches(
  55. const Mat& img1,
  56. const Mat& img2,
  57. const std::vector<KeyPoint>& keypoints1,
  58. const std::vector<KeyPoint>& keypoints2,
  59. std::vector<DMatch>& matches,
  60. std::vector<Point2f>& scene_corners_
  61. )
  62. {
  63. //-- Sort matches and preserve top 10% matches
  64. std::sort(matches.begin(), matches.end());
  65. std::vector< DMatch > good_matches;
  66. double minDist = matches.front().distance;
  67. double maxDist = matches.back().distance;
  68. const int ptsPairs = std::min(GOOD_PTS_MAX, (int)(matches.size() * GOOD_PORTION));
  69. for( int i = 0; i < ptsPairs; i++ )
  70. {
  71. good_matches.push_back( matches[i] );
  72. }
  73. std::cout << "\nMax distance: " << maxDist << std::endl;
  74. std::cout << "Min distance: " << minDist << std::endl;
  75. std::cout << "Calculating homography using " << ptsPairs << " point pairs." << std::endl;
  76. // drawing the results
  77. Mat img_matches;
  78. drawMatches( img1, keypoints1, img2, keypoints2,
  79. good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
  80. std::vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
  81. //-- Localize the object
  82. std::vector<Point2f> obj;
  83. std::vector<Point2f> scene;
  84. for( size_t i = 0; i < good_matches.size(); i++ )
  85. {
  86. //-- Get the keypoints from the good matches
  87. obj.push_back( keypoints1[ good_matches[i].queryIdx ].pt );
  88. scene.push_back( keypoints2[ good_matches[i].trainIdx ].pt );
  89. }
  90. //-- Get the corners from the image_1 ( the object to be "detected" )
  91. std::vector<Point2f> obj_corners(4);
  92. obj_corners[0] = Point(0,0);
  93. obj_corners[1] = Point( img1.cols, 0 );
  94. obj_corners[2] = Point( img1.cols, img1.rows );
  95. obj_corners[3] = Point( 0, img1.rows );
  96. std::vector<Point2f> scene_corners(4);
  97. Mat H = findHomography( obj, scene, RANSAC );
  98. perspectiveTransform( obj_corners, scene_corners, H);
  99. scene_corners_ = scene_corners;
  100. //-- Draw lines between the corners (the mapped object in the scene - image_2 )
  101. line( img_matches,
  102. scene_corners[0] + Point2f( (float)img1.cols, 0), scene_corners[1] + Point2f( (float)img1.cols, 0),
  103. Scalar( 0, 255, 0), 2, LINE_AA );
  104. line( img_matches,
  105. scene_corners[1] + Point2f( (float)img1.cols, 0), scene_corners[2] + Point2f( (float)img1.cols, 0),
  106. Scalar( 0, 255, 0), 2, LINE_AA );
  107. line( img_matches,
  108. scene_corners[2] + Point2f( (float)img1.cols, 0), scene_corners[3] + Point2f( (float)img1.cols, 0),
  109. Scalar( 0, 255, 0), 2, LINE_AA );
  110. line( img_matches,
  111. scene_corners[3] + Point2f( (float)img1.cols, 0), scene_corners[0] + Point2f( (float)img1.cols, 0),
  112. Scalar( 0, 255, 0), 2, LINE_AA );
  113. return img_matches;
  114. }
  115. ////////////////////////////////////////////////////
  116. // This program demonstrates the usage of SURF_OCL.
  117. // use cpu findHomography interface to calculate the transformation matrix
  118. int main(int argc, char* argv[])
  119. {
  120. const char* keys =
  121. "{ h help | | print help message }"
  122. "{ l left | box.png | specify left image }"
  123. "{ r right | box_in_scene.png | specify right image }"
  124. "{ o output | SURF_output.jpg | specify output save path }"
  125. "{ m cpu_mode | | run without OpenCL }";
  126. CommandLineParser cmd(argc, argv, keys);
  127. if (cmd.has("help"))
  128. {
  129. std::cout << "Usage: surf_matcher [options]" << std::endl;
  130. std::cout << "Available options:" << std::endl;
  131. cmd.printMessage();
  132. return EXIT_SUCCESS;
  133. }
  134. if (cmd.has("cpu_mode"))
  135. {
  136. ocl::setUseOpenCL(false);
  137. std::cout << "OpenCL was disabled" << std::endl;
  138. }
  139. UMat img1, img2;
  140. std::string outpath = cmd.get<std::string>("o");
  141. std::string leftName = cmd.get<std::string>("l");
  142. imread(leftName, IMREAD_GRAYSCALE).copyTo(img1);
  143. if(img1.empty())
  144. {
  145. std::cout << "Couldn't load " << leftName << std::endl;
  146. cmd.printMessage();
  147. return EXIT_FAILURE;
  148. }
  149. std::string rightName = cmd.get<std::string>("r");
  150. imread(rightName, IMREAD_GRAYSCALE).copyTo(img2);
  151. if(img2.empty())
  152. {
  153. std::cout << "Couldn't load " << rightName << std::endl;
  154. cmd.printMessage();
  155. return EXIT_FAILURE;
  156. }
  157. double surf_time = 0.;
  158. //declare input/output
  159. std::vector<KeyPoint> keypoints1, keypoints2;
  160. std::vector<DMatch> matches;
  161. UMat _descriptors1, _descriptors2;
  162. Mat descriptors1 = _descriptors1.getMat(ACCESS_RW),
  163. descriptors2 = _descriptors2.getMat(ACCESS_RW);
  164. //instantiate detectors/matchers
  165. SURFDetector surf;
  166. SURFMatcher<BFMatcher> matcher;
  167. //-- start of timing section
  168. for (int i = 0; i <= LOOP_NUM; i++)
  169. {
  170. if(i == 1) workBegin();
  171. surf(img1.getMat(ACCESS_READ), Mat(), keypoints1, descriptors1);
  172. surf(img2.getMat(ACCESS_READ), Mat(), keypoints2, descriptors2);
  173. matcher.match(descriptors1, descriptors2, matches);
  174. }
  175. workEnd();
  176. std::cout << "FOUND " << keypoints1.size() << " keypoints on first image" << std::endl;
  177. std::cout << "FOUND " << keypoints2.size() << " keypoints on second image" << std::endl;
  178. surf_time = getTime();
  179. std::cout << "SURF run time: " << surf_time / LOOP_NUM << " ms" << std::endl<<"\n";
  180. std::vector<Point2f> corner;
  181. Mat img_matches = drawGoodMatches(img1.getMat(ACCESS_READ), img2.getMat(ACCESS_READ), keypoints1, keypoints2, matches, corner);
  182. //-- Show detected matches
  183. namedWindow("surf matches", 0);
  184. imshow("surf matches", img_matches);
  185. imwrite(outpath, img_matches);
  186. waitKey(0);
  187. return EXIT_SUCCESS;
  188. }