Skip to content

Instantly share code, notes, and snippets.

@DennisMelamed
Created April 14, 2018 23:14
Show Gist options
  • Save DennisMelamed/4f2a2dc61f67d15c4c85a425918911b6 to your computer and use it in GitHub Desktop.
Save DennisMelamed/4f2a2dc61f67d15c4c85a425918911b6 to your computer and use it in GitHub Desktop.
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/video/tracking.hpp>
//#include <opencv2/calib3d/calib3d.hpp>
//#include <opencv2/imgproc/imgproc.hpp>
#include <ros/ros.h>
#include "image_transport/image_transport.h"
#include "cv_bridge/cv_bridge.h"
#include <sensor_msgs/Image.h>
#include "undistorter.h"
#include <boost/bind.hpp>
using namespace cv;
Mat img1, img2;
std::vector<Point2f> points1;
std::vector<Point2f> points2;
void featureTracking(Mat _img1, Mat _img2, std::vector<Point2f>& _points1, std::vector<Point2f>& _points2, std::vector<uchar>& status)
{
std::vector<float> err;
Size winSize(21,21);
TermCriteria termcrit(TermCriteria::COUNT+TermCriteria::EPS, 30, 0.01);
calcOpticalFlowPyrLK(_img1, _img2, _points1, _points2, status, err, winSize, 3, termcrit, 0, 0.001);
int iCorr = 0;
for(int i=0; i<status.size(); i++)
{
Point2f pt = _points2.at(i-iCorr);
if((status.at(i) ==0) ||(pt.x<0)||(pt.y<0))
{
if((pt.x<0)||(pt.y<0))
{
status.at(i) = 0;
}
_points1.erase(_points1.begin() + i-iCorr);
_points2.erase(_points2.begin() + i-iCorr);
iCorr++;
}
}
}
void fast_features(Mat img, std::vector<Point2f>& points)
{
std::vector<KeyPoint> keypoints;
int threshold = 50;
bool nonmaxSuppression = true;
FAST(img, keypoints, threshold, nonmaxSuppression);
drawKeypoints(img, keypoints, img, Scalar(255,0,0));
imshow("points", img);
waitKey(30);
KeyPoint::convert(keypoints, points, std::vector<int>());
}
void image_callback(const sensor_msgs::ImageConstPtr& msg, const undistorter::PinholeUndistorter& undistorter)
{
double focal_length = (352.7618998983656+362.2127413903864)/2;
Point2d pp(309.0169477654956, 231.17134547931778);
try
{
img2 = img1;
points2 = points1;
cv_bridge::CvImagePtr img_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
undistorter.undistortImage(img_ptr->image, img1);
// img1 = img_ptr->image;
imshow("view", img1);
waitKey(30);
fast_features(img1, points1);
std::vector<uchar> status;
if(!img1.empty() && !img2.empty())
{
featureTracking(img1, img2, points1, points2, status);
Mat mask, R, t;
Mat E = findEssentialMat(points2, points1, focal_length, pp, RANSAC, 0.999, 1.0, mask);
recoverPose(E, points2, points1, R, t, focal_length, pp, mask);
std::cout << R << std::endl << std::endl;
}
}
catch(cv_bridge::Exception& e)
{
ROS_ERROR("could not convert from '%s' to 'bgr8'.", msg->encoding.c_str());
}
}
int main(int argc, char** argv)
{
Eigen::Vector2d fl(352.7618998983656, 362.2127413903864);
Eigen::Vector2d pp(309.0169477654956, 231.17134547931778);
Eigen::Vector2i res(640,480);
Eigen::Vector4d dist(-0.09188870979428575, 0.03877266717716036, -0.040007988295463065, 0.013430413696251163);
undistorter::PinholeGeometry camera(fl,pp,res, undistorter::EquidistantDistortion::create(dist));
double alpha = 1.0, scale=1.0;
int interp = INTER_LINEAR;
104,1 69%
undistorter::PinholeUndistorter undistorter(camera, alpha, scale, interp);
ROS_INFO("initializing");
ros::init(argc, argv, "image_listener");
ROS_INFO("node init");
ros::NodeHandle nh;
ROS_INFO("node handle made");
namedWindow("view", CV_WINDOW_AUTOSIZE);
namedWindow("points", CV_WINDOW_AUTOSIZE);
ROS_INFO("window made");
startWindowThread();
ROS_INFO("window thread made");
image_transport::ImageTransport it(nh);
ROS_INFO("image transport made");
image_transport::Subscriber sub = it.subscribe("/cv_camera/image_raw", 1, boost::bind(image_callback, _1, undistorter));
ROS_INFO("subscriber made");
ros::spin();
ROS_INFO("destroying window");
destroyWindow("view");
destroyWindow("points");
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment