Skip to content

Instantly share code, notes, and snippets.

@Mugichoko445
Created February 16, 2021 00:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Mugichoko445/492ed7cc96a4a5f03991f15347d36c8a to your computer and use it in GitHub Desktop.
Save Mugichoko445/492ed7cc96a4a5f03991f15347d36c8a to your computer and use it in GitHub Desktop.
KinFu + RealSense
// Ref 1: https://gist.github.com/UnaNancyOwen/7ef170a48b6b73016810d5ba926bf145
// Ref 2: https://github.com/opencv/opencv_contrib/blob/master/modules/rgbd/samples/kinfu_demo.cpp
#include <iostream>
#include <opencv2/opencv.hpp>
#include <opencv2/rgbd.hpp>
#include <opencv2/viz.hpp>
#include <opencv2/core/ocl.hpp>
#include <librealsense2/rs.hpp>
class RealSense
{
public:
RealSense()
{
rs2::log_to_console(RS2_LOG_SEVERITY_ERROR);
cfg.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
pipe.start(cfg);
auto prof = pipe.get_active_profile();
auto device = prof.get_device();
auto sensors = device.query_sensors();
if (sensors.size() <= 0)
{
std::cerr << "[RealSense::RealSense] No sensor found!" << std::endl;
depthFactor = 1000.0f;
}
else
{
auto depthSensor = sensors[0].as<rs2::depth_sensor>();
depthFactor = 1.0f / depthSensor.get_depth_scale(); // depth is in mm
}
}
~RealSense() { }
void GrabFrame(cv::UMat& depth)
{
auto data = pipe.wait_for_frames();
auto depthFrame = data.get_depth_frame();
depthFrame.get_units();
auto w = depthFrame.get_width();
auto h = depthFrame.get_height();
auto cpuDepth = cv::Mat(h, w, CV_16U, (void*)depthFrame.get_data());
cpuDepth.copyTo(depth);
}
rs2_intrinsics GetIntrinsics()
{
return pipe.get_active_profile().get_stream(RS2_STREAM_DEPTH).as<rs2::video_stream_profile>().get_intrinsics();
}
float GetDepthFactor()
{
return depthFactor;
}
private:
rs2::pipeline pipe;
rs2::config cfg;
float depthFactor;
};
void InitParams(cv::kinfu::Params& params, const int w, const int h, const float fx, const float fy, float depthFactor)
{
const float cx = w / 2.0f - 0.5f;
const float cy = h / 2.0f - 0.5f;
const cv::Matx33f cameraMatrix = cv::Matx33f(fx, 0.0f, cx, 0.0f, fy, cy, 0.0f, 0.0f, 1.0f);
params.frameSize = cv::Size(w, h);
params.intr = cameraMatrix;
params.depthFactor = depthFactor; // to meters scale
}
int main()
{
bool useOpenCL = true;
bool coarse = false;
bool useHashTSDF = false;
cv::setUseOptimized(!useOpenCL);
cv::ocl::setUseOpenCL(useOpenCL);
RealSense rs;
auto ip = rs.GetIntrinsics();
// Initialize KinFu Parameters
cv::Ptr<cv::kinfu::Params> params;
if (!coarse) params = cv::kinfu::Params::defaultParams(); // default
else params = cv::kinfu::Params::coarseParams(); // coarse
if (useHashTSDF) params = cv::kinfu::Params::hashTSDFParams(coarse);
InitParams(*params, ip.width, ip.height, ip.fx, ip.fy, rs.GetDepthFactor());
// Create KinFu
cv::Ptr<cv::kinfu::KinFu> kinfu;
kinfu = cv::kinfu::KinFu::create(params);
cv::namedWindow("Kinect Fusion");
cv::viz::Viz3d viewer("Kinect Fusion");
while (true && !viewer.wasStopped())
{
cv::UMat frame;
rs.GrabFrame(frame);
if (frame.empty()) continue;
// Update KinFu
if (!kinfu->update(frame))
{
std::cout << "reset" << std::endl;
kinfu->reset();
continue;
}
cv::flip(frame, frame, 1);
cv::UMat render;
kinfu->render(render);
// Retrieve Point Cloud
cv::UMat points;
kinfu->getPoints(points);
// Show Rendering Image and Point Cloud
cv::imshow("Kinect Fusion", render);
if (!points.empty())
{
cv::viz::WCloud cloud(points, cv::viz::Color::white());
viewer.showWidget("cloud", cloud);
}
viewer.spinOnce();
auto key = cv::waitKey(1);
if (key == 'r') kinfu->reset();
else if (key == 'q') break;
}
cv::destroyAllWindows();
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment