Skip to content

Instantly share code, notes, and snippets.

@piedar
Last active December 9, 2018 06:08
Show Gist options
  • Save piedar/b250001eca2a58a7565a to your computer and use it in GitHub Desktop.
Save piedar/b250001eca2a58a7565a to your computer and use it in GitHub Desktop.
Keypoint Object Matching with SURF/BRISK - https://www.youtube.com/watch?v=-r9J1eO4qg4
// This file is hereby released into the public domain.
// This code contains a skeleton for Keypoint Object Matching with SURF/BRISK
// It was extracted from the code of an unreleased demo - https://www.youtube.com/watch?v=-r9J1eO4qg4
// It will not compile as-is, but may function as a starting point.
#include <set>
#include <string>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "Freenect.hpp"
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/nonfree/features2d.hpp>
#include <opencv2/video/tracking.hpp>
#include <opencv2/video/background_segm.hpp>
class ObjectMatcher {
public:
/// The generically-named Object maps a label to an image and includes keypoints and descriptors.
/// The scene is just another Object.
struct Object {
std::string label;
cv::Mat image;
std::vector<cv::KeyPoint> keypoints;
cv::Mat descriptors;
Object(std::string label, const cv::Mat& image, ObjectMatcher* matcher) :
label(label),
image(image) {
matcher->detector->detect(image, keypoints);
matcher->extractor->compute(image, keypoints, descriptors);
}
//~Object() { }
bool operator<(const Object& other) const { return (this->label < other.label); }
};
struct Match {
Object object, scene;
std::vector<cv::DMatch> dmatches;
Match(Object object, Object scene, std::vector<cv::DMatch> dmatches) :
object(object),
scene(scene),
dmatches(dmatches) { }
//~Match() { }
};
/// @param max_ratio Maximum nearest-neighbor ratio (optional)
/// @param surf_threshold SURF keypoint detection threshold (optional, if < 0 use SURF default)
ObjectMatcher(float max_ratio = 0.75, double surf_threshold = 0) :
max_ratio(max_ratio)
{
if (surf_threshold >= 0)
detector = new cv::SURF(surf_threshold);
else
detector = new cv::SURF;
// BRISK is much faster but not as thorough
//detector = new cv::BRISK(0);
// BRISK extractor works with SURF
extractor = new cv::BRISK;
matcher = new cv::BFMatcher(cv::NORM_HAMMING2, false);
// Alternately
//matcher = new cv::FlannBasedMatcher;
}
//~ObjectMatcher() { }
/// Load the image of an object into the tracker.
/// @param label Unique string describing the object
/// @param image Cropped image of object to match
/// @return true iff the object was added
bool addObject(std::string label, const cv::Mat& image) { return objects.insert(Object(label, image, this)).second; }
bool removeObject(std::string object_name) { return objects.erase(Object(object_name, cv::Mat(), this)); }
/// @param ratio Maximum nearest-neighbor ratio
void setRatio(float ratio) { max_ratio = ratio; }
/// @param detect New detector
void setDetector(cv::Ptr<cv::FeatureDetector>& detect) { detector = detect; }
/// @param extract New extractor
void setExtractor(cv::Ptr<cv::DescriptorExtractor>& extract) { extractor = extract; }
/// @param match New matcher
void setMatcher(cv::Ptr<cv::DescriptorMatcher>& match) { matcher = match; }
/// Match object features to scene using detector, extractor, matcher, and ratio and symmetry tests.
/// @param scene Scene to search for match
/// @param object Object to match
/// @return Match (with filtered dmatches) between scene and object
Match match(const Object& scene, const Object& object) {
std::vector< std::vector<cv::DMatch> > os_matches, so_matches;
matcher->knnMatch(object.descriptors, scene.descriptors, os_matches, 2);
matcher->knnMatch(scene.descriptors, object.descriptors, so_matches, 2);
//printf("number of matched points object->scene: %lu and scene->object: %lu\n", os_matches.size(), so_matches.size());
os_matches = ratioTest(os_matches, max_ratio);
so_matches = ratioTest(so_matches, max_ratio);
//printf("number of matched points (ratioTest) object->scene: %lu and scene->object: %lu\n", os_matches.size(), so_matches.size());
std::vector<cv::DMatch> symmetric_matches = symmetryTest(os_matches, so_matches);
//printf("number of matched points (symmetryTest): %lu\n", symmetric_matches.size());
return Match(object, scene, symmetric_matches);
}
/// Draw the best match between the scene and stored objects.
/// @param scene_image Scene to search for match
/// @return If matches are found, matches drawn on scene_image; otherwise, scene_image unmodified
cv::Mat drawBestMatch(const cv::Mat& scene_image) {
cv::Mat result;
Object scene(std::string(), scene_image, this);
Match best_match(Object(std::string(), cv::Mat(), this), Object(std::string(), cv::Mat(), this), std::vector<cv::DMatch>());
for (Object object : objects) {
Match this_match = match(scene, object);
if (this_match.dmatches.size() > best_match.dmatches.size())
best_match = this_match;
}
if (best_match.dmatches.size() < 4)
return scene_image.clone();
cv::drawMatches(best_match.object.image, best_match.object.keypoints, best_match.scene.image, best_match.scene.keypoints, best_match.dmatches, result);
// localize points
std::vector<cv::Point2f> object_points, scene_points;
for (cv::DMatch dmatch : best_match.dmatches) {
object_points.push_back(best_match.object.keypoints.at(dmatch.queryIdx).pt);
scene_points.push_back(best_match.scene.keypoints.at(dmatch.trainIdx).pt);
}
// perspective shift
cv::Mat H = cv::findHomography(object_points, scene_points, CV_RANSAC);
std::vector<cv::Point2f> object_corners(4), scene_corners(4);
object_corners[0] = cv::Point(0, 0),
object_corners[1] = cv::Point(best_match.object.image.cols, 0);
object_corners[2] = cv::Point(best_match.object.image.cols, best_match.object.image.rows);
object_corners[3] = cv::Point(0, best_match.object.image.rows);
cv::perspectiveTransform(object_corners, scene_corners, H);
// draw bounds
cv::line(result, scene_corners.at(0) + cv::Point2f(best_match.object.image.cols, 0), scene_corners.at(1) + cv::Point2f(best_match.object.image.cols, 0), cv::Scalar(0, 255, 0), 4);
cv::line(result, scene_corners.at(1) + cv::Point2f(best_match.object.image.cols, 0), scene_corners.at(2) + cv::Point2f(best_match.object.image.cols, 0), cv::Scalar(0, 255, 0), 4);
cv::line(result, scene_corners.at(2) + cv::Point2f(best_match.object.image.cols, 0), scene_corners.at(3) + cv::Point2f(best_match.object.image.cols, 0), cv::Scalar(0, 255, 0), 4);
cv::line(result, scene_corners.at(3) + cv::Point2f(best_match.object.image.cols, 0), scene_corners.at(0) + cv::Point2f(best_match.object.image.cols, 0), cv::Scalar(0, 255, 0), 4);
return result;
}
private:
std::set<Object> objects;
float max_ratio;
cv::Ptr<cv::FeatureDetector> detector;
cv::Ptr<cv::DescriptorExtractor> extractor;
cv::Ptr<cv::DescriptorMatcher> matcher;
/// @param matches Set of matches nearest-neighbor (k = 2) matches to test
/// @param max_ratio Maximum nearest-neighbor ratio
/// @return Input matches that have size >= 2 and matches[0].distance / matches[1].distance <= max_ratio
static std::vector< std::vector<cv::DMatch> > ratioTest(std::vector< std::vector<cv::DMatch> > matches, float max_ratio) {
for (std::vector< std::vector<cv::DMatch> >::iterator match_iter = matches.begin(); match_iter != matches.end(); ) {
std::vector<cv::DMatch>& match = *match_iter;
if ((match.size() >= 2) && ((match.at(0).distance / match.at(1).distance) <= max_ratio))
++match_iter;
else
match_iter = matches.erase(match_iter);
}
return matches;
}
/// @param matches1 First set of matches
/// @param matches2 Second set of matches
/// @return Input matches that appear in both matches1 and matches2
static std::vector<cv::DMatch> symmetryTest(const std::vector< std::vector<cv::DMatch> >& matches1, const std::vector< std::vector<cv::DMatch> >& matches2) {
std::vector<cv::DMatch> symmetric_matches;
for (std::vector<cv::DMatch> match1 : matches1) {
if (match1.size() >= 2) {
for (std::vector<cv::DMatch> match2 : matches2) {
if ((match2.size() >= 2) && (match1.at(0).queryIdx == match2.at(0).trainIdx) && (match2.at(0).queryIdx == match1.at(0).trainIdx)) {
symmetric_matches.push_back(cv::DMatch(match1.at(0).queryIdx, match1.at(0).trainIdx, match1.at(0).distance));
break;
}
}
}
}
return symmetric_matches;
}
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment