Skip to content

Instantly share code, notes, and snippets.

@annafatkina
Last active May 30, 2018 10:30
Show Gist options
  • Save annafatkina/b7679cd0c41ce7b0b2d9f58ac0dc3ec5 to your computer and use it in GitHub Desktop.
Save annafatkina/b7679cd0c41ce7b0b2d9f58ac0dc3ec5 to your computer and use it in GitHub Desktop.
apt-get install -y git cmake build-essential libgl1-mesa-dev libglew-dev libblas-dev liblapack-dev libopencv-dev python-opencv libeigen3-dev
cd /tmp && git clone https://github.com/stevenlovegrove/Pangolin.git && cd Pangolin && mkdir build && cd build && cmake .. && make -j; make install
git clone https://github.com/raulmur/ORB_SLAM2.git ORB_SLAM2
cd ORB_SLAM2
chmod +x build.sh
import datetime
from os import listdir
from os.path import isfile, join
mypath = "/headless/Downloads/dm/"
import time
current = datetime.datetime.now()
onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
for f in onlyfiles:
current += datetime.timedelta(seconds=0.1)
print(str(current.timestamp()) + " "+ f)
/**
* This file is part of ORB-SLAM2.
*
* Copyright (C) 2014-2016 Raúl Mur-Artal <raulmur at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/raulmur/ORB_SLAM2>
*
* ORB-SLAM2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ORB-SLAM2. If not, see <http://www.gnu.org/licenses/>.
*/
#include<iostream>
#include<algorithm>
#include<fstream>
#include<chrono>
#include<opencv2/core/core.hpp>
#include<System.h>
using namespace std;
void LoadImages(const string &strFile, vector<string> &vstrImageFilenames,
vector<double> &vTimestamps);
int main(int argc, char **argv)
{
if(argc != 4)
{
cerr << endl << "Usage: ./mono_tum path_to_vocabulary path_to_settings path_to_sequence" << endl;
return 1;
}
// Retrieve paths to images
vector<string> vstrImageFilenames;
vector<double> vTimestamps;
string strFile = string(argv[3])+"/rgb.txt";
LoadImages(strFile, vstrImageFilenames, vTimestamps);
int nImages = vstrImageFilenames.size();
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::MONOCULAR,true);
// Vector for tracking time statistics
vector<float> vTimesTrack;
vTimesTrack.resize(nImages);
cout << endl << "-------" << endl;
cout << "Start processing sequence ..." << endl;
cout << "Images in the sequence: " << nImages << endl << endl;
// Main loop
cv::Mat im;
for(int ni=0; ni<nImages; ni++)
{
// Read image from file
im = cv::imread(string(argv[3])+"/"+vstrImageFilenames[ni],CV_LOAD_IMAGE_UNCHANGED);
double tframe = vTimestamps[ni];
if(im.empty())
{
cerr << endl << "Failed to load image at: "
<< string(argv[3]) << "/" << vstrImageFilenames[ni] << endl;
return 1;
}
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point t1 = std::chrono::monotonic_clock::now();
#endif
// Pass the image to the SLAM system
SLAM.TrackMonocular(im,tframe);
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();
#endif
double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
vTimesTrack[ni]=ttrack;
// Wait to load the next frame
double T=0;
if(ni<nImages-1)
T = vTimestamps[ni+1]-tframe;
else if(ni>0)
T = tframe-vTimestamps[ni-1];
if(ttrack<T)
usleep((T-ttrack)*1e6);
}
std::cout << "recogn stoppped!" << std::endl;
auto map = SLAM.mpMap;
const auto & vpMPs = map->GetAllMapPoints();
ofstream myFileMap("pointCloud.xyz");
for(size_t i=0, iend=vpMPs.size(); i<iend;i++) {
if(vpMPs[i]->isBad()) {
continue;
}
auto pos = vpMPs[i]->GetWorldPos();
myFileMap << pos.at<float>(2) << "\t" <<pos.at<float>(0)<< "\t" << pos.at<float>(1) << std::endl;
}
myFileMap.close();
std::cout << "cloud saved as pointCloud.xyz" << std::endl;
return 0; //uncomment to create a utilety out of it!
while(true) {
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
// Stop all threads
SLAM.Shutdown();
// Tracking time statistics
sort(vTimesTrack.begin(),vTimesTrack.end());
float totaltime = 0;
for(int ni=0; ni<nImages; ni++)
{
totaltime+=vTimesTrack[ni];
}
cout << "-------" << endl << endl;
cout << "median tracking time: " << vTimesTrack[nImages/2] << endl;
cout << "mean tracking time: " << totaltime/nImages << endl;
// Save camera trajectory
SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt");
return 0;
}
void LoadImages(const string &strFile, vector<string> &vstrImageFilenames, vector<double> &vTimestamps)
{
ifstream f;
f.open(strFile.c_str());
// skip first three lines
string s0;
getline(f,s0);
getline(f,s0);
getline(f,s0);
while(!f.eof())
{
string s;
getline(f,s);
if(!s.empty())
{
stringstream ss;
ss << s;
double t;
string sRGB;
ss >> t;
vTimestamps.push_back(t);
ss >> sRGB;
vstrImageFilenames.push_back(sRGB);
}
}
}
/**
* This file is part of ORB-SLAM2.
*
* Copyright (C) 2014-2016 Raúl Mur-Artal <raulmur at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/raulmur/ORB_SLAM2>
*
* ORB-SLAM2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ORB-SLAM2. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef SYSTEM_H
#define SYSTEM_H
#include<string>
#include<thread>
#include<opencv2/core/core.hpp>
#include "Tracking.h"
#include "FrameDrawer.h"
#include "MapDrawer.h"
#include "Map.h"
#include "LocalMapping.h"
#include "LoopClosing.h"
#include "KeyFrameDatabase.h"
#include "ORBVocabulary.h"
#include "Viewer.h"
namespace ORB_SLAM2
{
class Viewer;
class FrameDrawer;
class Map;
class Tracking;
class LocalMapping;
class LoopClosing;
class System
{
public:
// Input sensor
enum eSensor{
MONOCULAR=0,
STEREO=1,
RGBD=2
};
public:
// Initialize the SLAM system. It launches the Local Mapping, Loop Closing and Viewer threads.
System(const string &strVocFile, const string &strSettingsFile, const eSensor sensor, const bool bUseViewer = true);
// Proccess the given stereo frame. Images must be synchronized and rectified.
// Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
// Returns the camera pose (empty if tracking fails).
cv::Mat TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timestamp);
// Process the given rgbd frame. Depthmap must be registered to the RGB frame.
// Input image: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
// Input depthmap: Float (CV_32F).
// Returns the camera pose (empty if tracking fails).
cv::Mat TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const double &timestamp);
// Proccess the given monocular frame
// Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
// Returns the camera pose (empty if tracking fails).
cv::Mat TrackMonocular(const cv::Mat &im, const double &timestamp);
// This stops local mapping thread (map building) and performs only camera tracking.
void ActivateLocalizationMode();
// This resumes local mapping thread and performs SLAM again.
void DeactivateLocalizationMode();
// Returns true if there have been a big map change (loop closure, global BA)
// since last call to this function
bool MapChanged();
// Reset the system (clear map)
void Reset();
// All threads will be requested to finish.
// It waits until all threads have finished.
// This function must be called before saving the trajectory.
void Shutdown();
// Save camera trajectory in the TUM RGB-D dataset format.
// Only for stereo and RGB-D. This method does not work for monocular.
// Call first Shutdown()
// See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
void SaveTrajectoryTUM(const string &filename);
// Save keyframe poses in the TUM RGB-D dataset format.
// This method works for all sensor input.
// Call first Shutdown()
// See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
void SaveKeyFrameTrajectoryTUM(const string &filename);
// Save camera trajectory in the KITTI dataset format.
// Only for stereo and RGB-D. This method does not work for monocular.
// Call first Shutdown()
// See format details at: http://www.cvlibs.net/datasets/kitti/eval_odometry.php
void SaveTrajectoryKITTI(const string &filename);
// TODO: Save/Load functions
// SaveMap(const string &filename);
// LoadMap(const string &filename);
// Information from most recent processed frame
// You can call this right after TrackMonocular (or stereo or RGBD)
int GetTrackingState();
std::vector<MapPoint*> GetTrackedMapPoints();
std::vector<cv::KeyPoint> GetTrackedKeyPointsUn();
private:
// Input sensor
eSensor mSensor;
// ORB vocabulary used for place recognition and feature matching.
ORBVocabulary* mpVocabulary;
// KeyFrame database for place recognition (relocalization and loop detection).
KeyFrameDatabase* mpKeyFrameDatabase;
public:
// Map structure that stores the pointers to all KeyFrames and MapPoints.
Map* mpMap;
private:
// Tracker. It receives a frame and computes the associated camera pose.
// It also decides when to insert a new keyframe, create some new MapPoints and
// performs relocalization if tracking fails.
Tracking* mpTracker;
// Local Mapper. It manages the local map and performs local bundle adjustment.
LocalMapping* mpLocalMapper;
// Loop Closer. It searches loops with every new keyframe. If there is a loop it performs
// a pose graph optimization and full bundle adjustment (in a new thread) afterwards.
LoopClosing* mpLoopCloser;
// The viewer draws the map and the current camera pose. It uses Pangolin.
Viewer* mpViewer;
FrameDrawer* mpFrameDrawer;
MapDrawer* mpMapDrawer;
// System threads: Local Mapping, Loop Closing, Viewer.
// The Tracking thread "lives" in the main execution thread that creates the System object.
std::thread* mptLocalMapping;
std::thread* mptLoopClosing;
std::thread* mptViewer;
// Reset flag
std::mutex mMutexReset;
bool mbReset;
// Change mode flags
std::mutex mMutexMode;
bool mbActivateLocalizationMode;
bool mbDeactivateLocalizationMode;
// Tracking state
int mTrackingState;
std::vector<MapPoint*> mTrackedMapPoints;
std::vector<cv::KeyPoint> mTrackedKeyPointsUn;
std::mutex mMutexState;
};
}// namespace ORB_SLAM
#endif // SYSTEM_H
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment