Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save MatsumotoJ/e23a80f540b58db12ee92d8a96de1745 to your computer and use it in GitHub Desktop.
Save MatsumotoJ/e23a80f540b58db12ee92d8a96de1745 to your computer and use it in GitHub Desktop.
Synchronized playback of rosbag files recorded with multiple cameras (https://github.com/IntelRealSense/librealsense/issues/1810)
#include <iostream>
#include <vector>
#include <librealsense2\rs.hpp>
// loadbagfile: load a bag file to a pipeline
void loadbagfile(rs2::pipeline & pipe, std::string filename)
{
rs2::config cfg;
cfg.enable_device_from_file(filename);
pipe.start(cfg);
auto device = pipe.get_active_profile().get_device();
rs2::playback playback = device.as<rs2::playback>();
playback.pause();
playback.set_real_time(false);
}
// readframe: read the next frame of a bag-file-playback-pipeline
int readframe(rs2::pipeline & pipe, size_t & framenumber, rs2::frameset & frameset)
{
auto device = pipe.get_active_profile().get_device();
rs2::playback playback = device.as<rs2::playback>();
playback.resume();
frameset = pipe.wait_for_frames();
playback.pause();
if (frameset[0].get_frame_number() < framenumber) return EOF;
framenumber = frameset[0].get_frame_number();
return 0;
}
// readframeto: read the frame just after the specified timestamp.
int readframeto(double timestamp, rs2::pipeline & pipe, size_t & framenumber, rs2::frameset & frameset)
{
while (true)
{
if (timestamp <= frameset.get_timestamp()) return 0;
int r = readframe(pipe, framenumber, frameset);
if (r == EOF) return EOF;
}
return 0;
}
int main()
{
std::vector<rs2::pipeline> pipes;
std::vector<size_t> framenumbers;
std::vector<rs2::frameset> framesets;
int num_file = 2;
std::string bagfiles[] = { "camera0.bag", "camera1.bag" };
// step 1: load bag files //////////////////////////////////////////////////////
pipes.clear();
for (int i = 0; i < num_file; i++)
{
rs2::pipeline pipe;
loadbagfile(pipe, bagfiles[i]);
pipes.push_back(pipe);
framenumbers.push_back(0ULL);
}
// step 1 end /////////////////////////////////////////////////////////////////
// step 2: skip frames until the time when every camera is ready (t_start). ///
for (int i = 0; i < num_file; i++)
{
rs2::frameset frameset;
readframe(pipes[i], framenumbers[i], frameset);
framesets.push_back(frameset);
}
double t_start = 0;
for (int i = 0; i < num_file; i++)
{
if (t_start < framesets[i].get_timestamp()) t_start = framesets[i].get_timestamp();
}
for (int i = 0; i < num_file; i++)
{
readframeto(t_start, pipes[i], framenumbers[i], framesets[i]);
}
// step 2 end /////////////////////////////////////////////////////////////////
// step 3: read the synchronized frames until the file end ////////////////////
int framecnt = 0;
while (1)
{
int r;
bool flag_end = false;
r = readframe(pipes[0], framenumbers[0], framesets[0]);
if (r == EOF) flag_end = true;
for (int i = 1; i < num_file; i++)
{
r = readframeto(framesets[0].get_timestamp(), pipes[i], framenumbers[i], framesets[i]);
if (r == EOF) flag_end = true;
}
if (flag_end) break;
for (int i = 1; i < num_file; i++)
{
// show time lag between synchronized frames of cameras (which should be < 1/fps).
std::cout << "Frame count: " << framecnt << ", Camera No: " << i << ", time lag compared with Camera0 = " << (size_t)(framesets[i].get_timestamp() - framesets[0].get_timestamp()) << "ms" << std::endl;
}
/*
DO SOMETHING HERE WITH THE SYNCHRONIZED FRAMES (framesets)
*/
framecnt++;
}
// step 3 end ///////////////////////////////////////////////////////////////////////
return 0;
}
@metanamo
Copy link

metanamo commented Nov 18, 2018

Thank you,

I also want to synchronize multiple D415 cameras. I found your script and was happy to have a good start based on it.
However your script has to wait for the queue to get the matching framenumber at the desired timestamp (close to it).

What about this code using playback.seek() ?
It goes straight to the start position without delay.

auto dev = pipe.get_active_profile().get_device();

rs2::playback playback = dev.as<rs2::playback>();

auto seek_time = std::chrono::duration<double, std::nano>((timestamp));

playback.seek(std::chrono::duration_cast<std::chrono::nanoseconds>(seek_time));

int r = readframe(pipe, framenumber, frameset);

auto tt = frameset.get_depth_frame().get_frame_metadata(RS2_FRAME_METADATA_TIME_OF_ARRIVAL);

std::cout << "timestamp: " << tt << " framenumber: " << framenumber << std::endl;

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment