Skip to content

Instantly share code, notes, and snippets.

@edgarriba
Last active October 27, 2021 03:02
Show Gist options
  • Save edgarriba/10246de35799d63e0423 to your computer and use it in GitHub Desktop.
Save edgarriba/10246de35799d63e0423 to your computer and use it in GitHub Desktop.
relative pose with opengv
// Returns the camera matrix given the focal and the image itself
// img The reference to the image
// f The focal length in pixels of the camera
cv::Mat getCameraMatrix(const cv::Mat& img, const float f) {
const int cx = img.cols / 2;
const int cy = img.rows / 2;
return (cv::Mat_<float>(3, 3) << f, 0, cx, 0, f, cy, 0, 0, 1);
}
// Convert a set of points to bearing
// points Matrix of size Nx3 with the set of points.
// bearings Vector of bearings.
void points2bearings(const cv::Mat& points,
opengv::bearingVectors_t* bearings) {
double l;
cv::Vec3f p;
opengv::bearingVector_t bearing;
for (int i = 0; i < points.rows; ++i) {
p = cv::Vec3f(points.row(i));
l = std::sqrt(p[0]*p[0] + p[1]*p[1] + p[2]*p[2]);
for (int j = 0; j < 3; ++j) bearing[j] = p[j] / l;
bearings->push_back(bearing);
}
}
// convert Eigen essential to Opencv
void eigen2opencv(const Eigen::MatrixXd& eigenMat, cv::Mat* cvMat) {
cvMat->create(eigenMat.rows(), eigenMat.cols(), CV_32F);
for (int i = 0; i < eigenMat.rows(); ++i) {
for (int j = 0; j < eigenMat.cols(); ++j) {
cvMat->ptr<float>(i)[j] = eigenMat(i, j);
}
}
}
// Computes the relative pose between two set of 2D points.
// points1 Matrix of size Nx2 with the left image points.
// points2 Matrix of size Nx2 with the right image points.
// K1 Matrix of size 3x3 with the left image camera calibration.
// K2 Matrix of size 3x3 with the right image camera calibration.
// R Vector of size 3x1 with the relative rotation.
// t Vector of size 3x1 with the relative translation.
void computeRelativePose(const std::vector<cv::Point2f>& points1,
const std::vector<cv::Point2f>& points2,
const cv::Mat& K1,
const cv::Mat& K2,
cv::Vec3f* R,
cv::Vec3f* t) {
assert(points1.size() > 0 || points2.size() > 0);
assert(points1.size() == points2.size());
// undistort points
cv::Mat points1_rect, points2_rect;
cv::Mat D = (cv::Mat_<float>(1, 4) << 0, 0, 0, 0);
cv::undistortPoints(points1, points1_rect, K1, D);
cv::undistortPoints(points2, points2_rect, K2, D);
// Map points from 1xNx2 to Nx2
cv::Mat points1_mat = points1_rect.reshape(1, points1_rect.cols);
cv::Mat points2_mat = points2_rect.reshape(1, points2_rect.cols);
// construct homogeneous points
cv::Mat ones_col1 = cv::Mat::ones(points1_mat.rows, 1, CV_32F);
cv::Mat ones_col2 = cv::Mat::ones(points2_mat.rows, 1, CV_32F);
cv::hconcat(points1_mat, ones_col1, points1_mat);
cv::hconcat(points2_mat, ones_col2, points2_mat);
// compute bearings
opengv::bearingVectors_t bearingVectors1;
opengv::bearingVectors_t bearingVectors2;
points2bearings(points1_mat, &bearingVectors1);
points2bearings(points1_mat, &bearingVectors2);
using namespace opengv::sac_problems::relative_pose; // NOLINT
// create a central relative adapter
opengv::relative_pose::CentralRelativeAdapter adapter(
bearingVectors1,
bearingVectors2);
CentralRelativePoseSacProblem::algorithm_t algorithm;
algorithm = CentralRelativePoseSacProblem::STEWENIUS;
boost::shared_ptr<CentralRelativePoseSacProblem>
relposeproblem_ptr(
new CentralRelativePoseSacProblem(adapter, algorithm));
// Create a ransac solver for the problem
opengv::sac::Ransac<CentralRelativePoseSacProblem> ransac;
ransac.sac_model_ = relposeproblem_ptr;
ransac.threshold_ = 1 - std::cos(0.006);
ransac.max_iterations_ = 1000;
// Solve
ransac.computeModel();
std::cout << ransac.model_coefficients_ << std::endl;
cv::Mat R_mat, t_mat;
eigen2opencv(ransac.model_coefficients_.block(0, 0, 3, 3), &R_mat);
eigen2opencv(ransac.model_coefficients_.col(3), &t_mat);
// convert to OpenSfM convention
cv::Mat R_t = R_mat.t();
cv::Rodrigues(R_t, *R);
(*t) = cv::Mat(-R_t * t_mat);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment