Skip to content

Instantly share code, notes, and snippets.

@MojamojaK
Created October 30, 2019 07:07
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save MojamojaK/54f85b5f3da83ce8ed5f69ec13bd1c72 to your computer and use it in GitHub Desktop.
Save MojamojaK/54f85b5f3da83ce8ed5f69ec13bd1c72 to your computer and use it in GitHub Desktop.
#include <fstream>
#include <iostream>
#include <string>
#include <fmt/format.h>
#include <spdlog/spdlog.h>
#include <Open3D/Open3D.h>
#include <librealsense2/rs.hpp>
#include "board.h"
#include "calibration.h"
#include "camera_info.h"
#include "device_manager.h"
#include "exception.h"
#include "pointcloud.h"
#include "preset.h"
#include "sensor_info.h"
#include "stream_config.h"
#include "streamer.h"
#include "utility.h"
#include <conio.h>
std::experimental::filesystem::path extract_path_in_commands(const std::vector<std::string> &commands, const uint offset = 1)
{
std::ostringstream oss;
std::copy(commands.begin() + offset, commands.end(), std::ostream_iterator<std::string>(oss, " "));
auto s = oss.str();
return std::experimental::filesystem::path(s.erase(s.size() - 1));
}
class CameraInfoWithData : public CameraInfo
{
public:
CameraInfoWithData(const std::string &serial, const std::string &id, const std::string &location, const int level) : CameraInfo(serial, id, location, level){};
CalibrationData calibration;
Frameset frameset;
float depth_scale = 0;
};
std::vector<std::string> extract_serial(std::vector<std::shared_ptr<CameraInfoWithData>> &exceptions)
{
std::vector<std::string> res;
res.reserve(exceptions.size());
for (auto &&info : exceptions)
{
res.emplace_back(info->serial());
}
return res;
}
void wait_for_connections(DeviceManager &manager, std::vector<std::shared_ptr<CameraInfoWithData>> disconnected)
{
SPDLOG_INFO("Waiting for connections");
std::vector<std::shared_ptr<CameraInfoWithData>> connected;
auto reset = std::chrono::steady_clock::now();
auto time = std::chrono::steady_clock::now();
bool running = true;
bool quitting = false;
std::thread resetter([&]() {
while (running)
{
if (_kbhit())
{
const char input = static_cast<char>(_getch());
if (input == 'R' || input == 'r')
{
SPDLOG_INFO("Force Resetting Cameras");
manager.reset(extract_serial(disconnected));
}
else if (input == 'Q' || input == 'q')
{
quitting = true;
break;
}
else
{
SPDLOG_INFO("Invalid Command Available Commands: R(eset) or Q(uit)");
}
}
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
});
resetter.detach();
while (!disconnected.empty() && !quitting)
{
{
for (auto itr = disconnected.begin(); itr != disconnected.end();)
{
if (manager.is_connected((*itr)->serial()))
{
connected.emplace_back(*itr);
itr = disconnected.erase(itr);
}
else
{
++itr;
}
}
for (auto itr = connected.begin(); itr != connected.end();)
{
if (!manager.is_connected((*itr)->serial()))
{
disconnected.emplace_back(*itr);
itr = connected.erase(itr);
}
else
{
++itr;
}
}
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
if (std::chrono::duration_cast<std::chrono::seconds>(std::chrono::steady_clock::now() - time).count() > 5)
{
const auto camera_count = disconnected.size();
for (auto i = 0ull; i < camera_count; ++i)
{
SPDLOG_INFO("{:>2}/{} Waiting connections for {}", i + 1, camera_count, disconnected[i]->name());
}
time = std::chrono::steady_clock::now();
}
if (std::chrono::duration_cast<std::chrono::seconds>(std::chrono::steady_clock::now() - reset).count() > 20)
{
SPDLOG_INFO("Input \"R\" to try force reset of remaining cameras (Unrecommended)");
reset = std::chrono::steady_clock::now();
}
}
running = false;
if (quitting)
{
exit(EXIT_SUCCESS);
}
SPDLOG_INFO("All Cameras Connected");
}
void handle_exception(DeviceManager &manager, std::vector<std::pair<std::shared_ptr<CameraInfoWithData>, Error>> &errors)
{
std::vector<std::string> reset;
for (auto &&itr : errors)
{
const auto info = itr.first;
const auto error = itr.second;
switch (error.getErrorType())
{
case ErrorType::BAD_STATE:
SPDLOG_WARN("{} in bad state (probably due to bad programming) [{}]", info->name(), error.what());
break;
case ErrorType::INVALID_PARAMETER:
SPDLOG_WARN("{} in threw invalid parameter error [{}]", info->name(), error.what());
break;
case ErrorType::CALIBRATION:
{
Frame frame;
switch (error.getCalibrationErrorType())
{
case CalibrationErrorType::NO_MARKERS:
{
const auto extended = error.as<NoMarkersFoundError>();
frame = extended.getFrame();
SPDLOG_WARN("{} found no markers in frame", info->name());
break;
}
case CalibrationErrorType::INSUFFICIENT_MARKERS:
{
const auto extended = error.as<InsufficientMarkersError>();
frame = extended.getFrame();
SPDLOG_WARN("{} unable to find 5 markers. {} markers found.", info->name(), extended.getMarkersFound());
break;
}
case CalibrationErrorType::FAILED_CORNER_ESTIMATION:
{
const auto extended = error.as<CornerEstimationError>();
frame = extended.getFrame();
SPDLOG_WARN("{} unable to estimate corners. {} markers found.", info->name(), extended.getMarkersFound());
break;
}
default:
case CalibrationErrorType::DEFAULT:
{
SPDLOG_WARN("{} threw [{}]", info->name(), error.what());
break;
}
}
if (frame)
{
if (frame.from_stream(rs2_stream::RS2_STREAM_INFRARED))
{
SPDLOG_WARN("Displaying Failed Frame. Press Any key to close Window", info->serial(), error.what());
const auto window_name = fmt::format("Calibration Failed {} [Press Any key to close window]", info->serial());
cv::namedWindow(window_name, cv::WINDOW_AUTOSIZE);
cv::imshow(window_name, frame.get_image(rs2_stream::RS2_STREAM_INFRARED));
cv::waitKey(0);
cv::destroyWindow(window_name);
}
}
break;
}
case ErrorType::SENSOR_INFO:
switch (error.getOptionType())
{
case OptionErrorType::OUT_OF_RANGE:
{
const auto extended = error.as<OptionOutOfRangeError>();
SPDLOG_WARN("{} threw [{}]", info->name(), extended.what());
break;
}
case OptionErrorType::UNSUPPORTED:
{
const auto extended = error.as<OptionUnsupportedError>();
SPDLOG_WARN("{} threw [{}]", info->name(), extended.what());
break;
}
default:
case OptionErrorType::DEFAULT:
{
SPDLOG_WARN("{} threw default option error [{}]", info->name(), error.what());
break;
}
}
break;
case ErrorType::UNKNOWN:
SPDLOG_WARN("{} threw unknown error [{}]", info->name(), error.what());
break;
default:
case ErrorType::DEFAULT:
SPDLOG_WARN("{} threw default error [{}]", info->name(), error.what());
break;
}
if (error.requiresReset())
{
reset.emplace_back(info->serial());
}
}
if (!reset.empty())
{
SPDLOG_WARN("Resetting {} devices", reset.size());
manager.reset(reset);
}
}
class MultiThreader
{
public:
MultiThreader(std::vector<std::shared_ptr<CameraInfoWithData>> &objs, std::function<void(std::shared_ptr<CameraInfoWithData>)> &lambda)
{
std::vector<std::thread> threads;
threads.reserve(objs.size());
std::mutex m_, n_;
for (const auto &obj : objs)
{
threads.emplace_back(std::thread([&]() {
try
{
lambda(obj);
}
catch (const Error &e)
{
std::lock_guard<std::mutex> _(m_);
errors.emplace_back(std::make_pair(obj, e));
}
catch (const rs2::error &e)
{
std::lock_guard<std::mutex> _(m_);
errors.emplace_back(std::make_pair(obj, UnknownError(fmt::format("{}({}): {}", e.get_failed_function(), e.get_failed_args(), e.what()))));
}
catch (const std::exception &e)
{
std::lock_guard<std::mutex> _(m_);
errors.emplace_back(std::make_pair(obj, UnknownError(e.what())));
}
catch (...)
{
std::lock_guard<std::mutex> _(m_);
errors.emplace_back(std::make_pair(obj, UnknownError("Unknown Error")));
}
{
std::lock_guard<std::mutex> _(n_);
success.push_back(obj);
}
}));
}
for (auto &&thread : threads)
{
thread.join();
}
threads.clear();
}
MultiThreader(const MultiThreader &other)
{
if (this != &other)
{
success = other.success;
errors = other.errors;
}
}
~MultiThreader()
{
success.clear();
errors.clear();
}
std::vector<std::shared_ptr<CameraInfoWithData>> success;
std::vector<std::pair<std::shared_ptr<CameraInfoWithData>, Error>> errors;
};
int main() try
{
// ログ出力をUTF-8に設定
SetConsoleOutputCP(65001);
SPDLOG_INFO("Start capture app");
// 保存パス
const auto current_path = std::experimental::filesystem::current_path();
const auto source_path = absolute(std::experimental::filesystem::current_path() / "../../sandbox");
const auto session_data_dir = current_path / fmt::format("sessions/session_{}", utility::get_date_time());
// PCI-USBハブのソフトリセット
system(fmt::format("{} restart PCI\\VEN_1912", (source_path / "devcon.exe").string()).c_str());
// キャリブレーションボードの設定
const int square_count_x = 8;
const int square_count_y = 35;
const float square_length = 0.05f;
const float marker_length = 0.03f;
const int marker_size = 6;
const float thickness = 0.003f;
CalibrationBoard calibration_board;
const auto calibration_board_dir = current_path / fmt::format("board_data_{}_{}_{}_{}_{}_{}", square_count_x, square_count_y, square_length, marker_length, marker_size, thickness);
if (!calibration_board.load(calibration_board_dir))
{
calibration_board.generate(square_count_x, square_count_y, square_length, marker_length, marker_size, thickness);
calibration_board.save(calibration_board_dir);
calibration_board.save_image(calibration_board_dir);
}
// カメラ情報の設定
std::vector<std::shared_ptr<CameraInfoWithData>> fronts;
std::vector<std::shared_ptr<CameraInfoWithData>> rears;
fronts.emplace_back(std::make_shared<CameraInfoWithData>("840412060507", "A1", "右前", 1));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("839112062252", "A2", "右前", 2));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("839112061752", "A3", "右前", 3));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("821312062349", "A4", "右前", 4));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("840412060541", "A5", "右前", 5));
rears.emplace_back(std::make_shared<CameraInfoWithData>("821312062588", "B1", "右後", 1));
rears.emplace_back(std::make_shared<CameraInfoWithData>("821312062610", "B2", "右後", 2));
rears.emplace_back(std::make_shared<CameraInfoWithData>("821312062543", "B3", "右後", 3));
rears.emplace_back(std::make_shared<CameraInfoWithData>("821312060506", "B4", "右後", 4));
rears.emplace_back(std::make_shared<CameraInfoWithData>("823112060208", "B5", "右後", 5));
rears.emplace_back(std::make_shared<CameraInfoWithData>("821312062503", "C1", "左後", 1));
rears.emplace_back(std::make_shared<CameraInfoWithData>("821312062739", "C2", "左後", 2));
rears.emplace_back(std::make_shared<CameraInfoWithData>("840412060529", "C3", "左後", 3));
rears.emplace_back(std::make_shared<CameraInfoWithData>("839112062188", "C4", "左後", 4));
rears.emplace_back(std::make_shared<CameraInfoWithData>("840412060557", "C5", "左後", 5));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("821212061829", "D1", "左前", 1));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("822512061093", "D2", "左前", 2));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("839112062232", "D3", "左前", 3));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("822512061144", "D4", "左前", 4));
fronts.emplace_back(std::make_shared<CameraInfoWithData>("839112061604", "D5", "左前", 5));
std::vector<std::shared_ptr<CameraInfoWithData>> infos;
infos.reserve(fronts.size() + rears.size());
infos.insert(infos.end(), fronts.begin(), fronts.end());
infos.insert(infos.end(), rears.begin(), rears.end());
DeviceManager manager;
manager.reset();
wait_for_connections(manager, infos);
std::this_thread::sleep_for(std::chrono::seconds(8));
wait_for_connections(manager, infos);
{
// カメラのプリセットを読み込む
std::ifstream j(source_path / "tmp.json");
const std::string json((std::istreambuf_iterator<char>(j)), std::istreambuf_iterator<char>());
j.close();
std::function<void(std::shared_ptr<CameraInfoWithData>)> set_preset_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
const rs2::device device = manager.get_rs2_device(info->serial());
Preset::set_preset(device, json);
};
SPDLOG_DEBUG("Setting Preset");
auto exp = MultiThreader(infos, set_preset_lambda).errors;
SPDLOG_DEBUG("Setting Preset Complete");
handle_exception(manager, exp);
wait_for_connections(manager, infos);
}
{
std::function<void(std::shared_ptr<CameraInfoWithData>)> set_options_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
SensorInfo sensor_info(manager.get_rs2_device(info->serial()).first<rs2::depth_sensor>());
sensor_info.set_option(rs2_option::RS2_OPTION_EMITTER_ENABLED, 1.0f);
sensor_info.set_option(rs2_option::RS2_OPTION_ENABLE_AUTO_EXPOSURE, 1.0f);
sensor_info.set_option(rs2_option::RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE, 0.0f);
};
auto exp = MultiThreader(infos, set_options_lambda).errors;
handle_exception(manager, exp);
wait_for_connections(manager, infos);
}
const utility::Range3Df range{{-0.5f, 0.5f}, {-0.24f, 1.8f}, {-0.5f, 0.5f}};
while (true)
{
SPDLOG_INFO("cc: Calibrate all cameras");
SPDLOG_INFO("cc <directory>: Load calibration data from file");
SPDLOG_INFO("cs: Save current calibration data");
SPDLOG_INFO("p <json file>: Load camera preset");
SPDLOG_INFO("c: Capture (frame count: 4)");
SPDLOG_INFO("c <frame count>: Capture");
SPDLOG_INFO("s: Save current capture data");
SPDLOG_INFO("vi: View live front camera image");
SPDLOG_INFO("vd: View live front camera depth");
SPDLOG_INFO("pp: View live front camera pointcloud");
SPDLOG_INFO("ds: Dump camera connection status");
SPDLOG_INFO("ds r: Dump camera connection status / Attempt re-connection of disconnected");
SPDLOG_INFO("q: Quit");
std::cout << "> ";
std::string input;
std::getline(std::cin, input);
if (input.empty())
{
continue;
}
std::vector<std::string> commands = utility::split(input, " ");
std::string &command = commands.at(0);
transform(command.begin(), command.end(), command.begin(), ::tolower);
if (command == "cc")
{
if (commands.size() == 1)
{
// 撮影する
std::function<void(std::shared_ptr<CameraInfoWithData>)> open_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
{
SensorInfo sensor_info(manager.get_rs2_device(info->serial()).first<rs2::depth_sensor>());
sensor_info.set_option(rs2_option::RS2_OPTION_EMITTER_ENABLED, 0.0f);
}
std::vector<StreamConfig> calibration_config{StreamConfig(rs2_stream::RS2_STREAM_INFRARED, rs2_format::RS2_FORMAT_Y8, 6, 1280, 720, 1)};
Frameset frameset;
// TODO Can crash
Streamer streamer(calibration_config, manager.get_rs2_device(info->serial()));
streamer.start();
const auto capture = [&](const int frame_count, const long long timeout = 5000) {
const auto start = std::chrono::steady_clock::now();
while (true)
{
try
{
frameset = streamer.get_frameset(frame_count, timeout);
break;
}
catch (const std::exception &e)
{
SPDLOG_DEBUG("{} Get Frameset Failed {}", info->name(), e.what());
if (std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count() > timeout)
{
try
{
streamer.stop();
streamer.close();
}
catch (...)
{
}
throw;
}
}
catch (...)
{
SPDLOG_DEBUG("{} Get Frameset Failed Unknown Failure", info->name());
throw;
}
}
};
capture(1, 5000);
SPDLOG_TRACE("{} capturing", info->name());
auto start = std::chrono::steady_clock::now();
capture(1, 5000);
SPDLOG_TRACE("{} capturing done took {}ms", info->name(), std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count());
streamer.stop();
SPDLOG_TRACE("{} closing", info->name());
start = std::chrono::steady_clock::now();
streamer.close();
SPDLOG_TRACE("{} closing done took {}ms", info->name(), std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count());
SensorInfo sensor_info(manager.get_rs2_device(info->serial()).first<rs2::depth_sensor>());
sensor_info.set_option(rs2_option::RS2_OPTION_EMITTER_ENABLED, 1.0f);
const auto depth_scale = sensor_info.get_depth_scale();
const auto intrinsics = streamer.get_intrinsics(rs2_stream::RS2_STREAM_INFRARED);
const Frame frame = frameset[0];
info->calibration = CalibrationData(calibration_board, depth_scale, intrinsics, frame, info->side());
};
{
SPDLOG_DEBUG("Open and Capture");
auto start = std::chrono::steady_clock::now();
MultiThreader res1(infos, open_lambda);
handle_exception(manager, res1.errors);
SPDLOG_DEBUG("Open and Capture Complete {}ms", std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count());
}
}
else if (commands.size() == 2)
{
// キャリブレーションデータを読み込む
const auto dir = extract_path_in_commands(commands);
if (std::experimental::filesystem::is_directory(dir))
{
std::function<void(std::shared_ptr<CameraInfoWithData>)> loader_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
const auto xml_path = dir / fmt::format("{}_{}.xml", info->id(), info->serial());
const auto bef_path = dir / fmt::format("{}_{}_before.png", info->id(), info->serial());
const auto aft_path = dir / fmt::format("{}_{}_after.png", info->id(), info->serial());
info->calibration = CalibrationData(xml_path, bef_path, aft_path);
};
auto exp = MultiThreader(infos, loader_lambda).errors;
handle_exception(manager, exp);
}
else
{
SPDLOG_ERROR("INVALID DIR PATH {}", dir.string());
}
}
}
else if (command == "cs")
{
const auto dir = session_data_dir / fmt::format("calibration/calibration_{}", utility::get_date_time());
std::function<void(std::shared_ptr<CameraInfoWithData>)> calibrate_saver_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
// キャリブレーションデータを保存する
const auto xml_path = dir / fmt::format("{}_{}.xml", info->id(), info->serial());
const auto bef_path = dir / fmt::format("{}_{}_before.png", info->id(), info->serial());
const auto aft_path = dir / fmt::format("{}_{}_after.png", info->id(), info->serial());
info->calibration.save_xml(xml_path);
info->calibration.save_png(bef_path, aft_path);
};
SPDLOG_DEBUG("Calibration Save");
auto exp = MultiThreader(infos, calibrate_saver_lambda).errors;
SPDLOG_DEBUG("Calibration Save Complete");
handle_exception(manager, exp);
// TODO Vector Destructor Crashes (Memory Access Violation)
}
else if (command == "p")
{
// カメラのプリセットを読み込む
if (commands.size() >= 2)
{
auto path = extract_path_in_commands(commands);
if (path.extension().string() == ".json" && std::experimental::filesystem::exists(path) && !std::experimental::filesystem::is_directory(path))
{
std::ifstream j(path);
const std::string json((std::istreambuf_iterator<char>(j)), std::istreambuf_iterator<char>());
j.close();
std::function<void(std::shared_ptr<CameraInfoWithData>)> set_preset_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
const rs2::device device = manager.get_rs2_device(info->serial());
Preset::set_preset(device, json);
};
SPDLOG_DEBUG("Setting Preset");
auto exp = MultiThreader(infos, set_preset_lambda).errors;
SPDLOG_DEBUG("Setting Preset Complete");
handle_exception(manager, exp);
}
else
{
SPDLOG_WARN("INVALID PATH {} ", commands[1]);
}
}
else
{
SPDLOG_WARN("INSUFFICIENT ARGUMENTS (expected 2)");
}
}
else if (command == "c")
{
int frame_count = 4;
if (commands.size() >= 2)
{
try
{
frame_count = std::atoi(commands[1].c_str());
}
catch (const std::exception &e)
{
SPDLOG_DEBUG("Invalid Parameter {} {}", commands[1], e.what());
}
}
// 撮影する
std::mutex m;
std::map<std::shared_ptr<CameraInfoWithData>, std::shared_ptr<Streamer>> streamers;
std::function<void(std::shared_ptr<CameraInfoWithData>)> open_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
std::vector<StreamConfig> capture_config;
capture_config.emplace_back(StreamConfig(rs2_stream::RS2_STREAM_DEPTH, rs2_format::RS2_FORMAT_Z16, 6, 1280, 720));
capture_config.emplace_back(StreamConfig(rs2_stream::RS2_STREAM_COLOR, rs2_format::RS2_FORMAT_BGR8, 6, 1280, 720));
// TODO Can crash
auto streamer = std::make_shared<Streamer>(capture_config, manager.get_rs2_device(info->serial()));
info->depth_scale = SensorInfo(manager.get_rs2_device(info->serial()).first<rs2::depth_sensor>()).get_depth_scale();
streamer->start();
{
std::lock_guard<std::mutex> _(m);
streamers.insert(std::make_pair(info, streamer));
}
const long long timeout = 8000;
const auto start = std::chrono::steady_clock::now();
while (true)
{
try
{
streamer->get_frameset(2, timeout);
break;
}
catch (const std::exception &e)
{
SPDLOG_DEBUG("{} Get Frameset Failed {}", info->name(), e.what());
if (std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count() > timeout)
{
try
{
streamer->stop();
streamer->close();
}
catch (...)
{
}
throw;
}
}
}
};
std::function<void(std::shared_ptr<CameraInfoWithData>)> capture_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
SPDLOG_TRACE("{} capturing", info->name());
const auto start = std::chrono::steady_clock::now();
auto streamer = streamers[info];
const long long timeout = 8000;
while (true)
{
try
{
info->frameset = streamer->get_frameset(frame_count, timeout);
break;
}
catch (const std::exception &e)
{
SPDLOG_DEBUG("{} Get Frameset Failed {}", info->name(), e.what());
if (std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count() > timeout)
{
try
{
streamer->stop();
streamer->close();
}
catch (...)
{
}
throw;
}
}
catch (...)
{
SPDLOG_DEBUG("{} Get Frameset Failed Unknown Failure", info->name());
throw;
}
}
SPDLOG_TRACE("{} capturing done took {}ms", info->name(), std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count());
};
// Close動作は他と分ける
std::function<void(std::shared_ptr<CameraInfoWithData>)> close_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
auto streamer = streamers[info];
streamer->stop();
SPDLOG_TRACE("{} closing", info->name());
const auto start = std::chrono::steady_clock::now();
streamer->close();
SPDLOG_TRACE("{} closing done took {}ms", info->name(), std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count());
};
{
SPDLOG_DEBUG("Opening Sensors");
auto start = std::chrono::steady_clock::now();
MultiThreader res1(infos, open_lambda);
handle_exception(manager, res1.errors);
SPDLOG_DEBUG("Opening Sensors Complete {}ms", std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count());
SPDLOG_DEBUG("Capturing Frames");
start = std::chrono::steady_clock::now();
MultiThreader res2(res1.success, capture_lambda);
SPDLOG_DEBUG("Capturing Frames Complete {}ms", std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count());
handle_exception(manager, res2.errors);
SPDLOG_DEBUG("Closing Sensors");
start = std::chrono::steady_clock::now();
MultiThreader res3(res2.success, close_lambda);
handle_exception(manager, res3.errors);
SPDLOG_DEBUG("Closing Sensors Complete {}ms", std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count());
}
streamers.clear();
}
else if (command == "s")
{
// キャプチャデータを保存する
const auto dir = session_data_dir / fmt::format("capture/frame_{}", utility::get_date_time());
utility::mkdir_if_not_exist(dir);
std::mutex m;
std::vector<PointCloud> point_clouds;
std::function<void(std::shared_ptr<CameraInfoWithData>)> capture_saver_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
const size_t size = info->frameset.size();
for (size_t i = 0; i < size; ++i)
{
// 個別のフレームがいじれる
const Frame frame = info->frameset[static_cast<int>(i)];
cv::imwrite((dir / fmt::format("{}_depth_raw_{}.png", info->id(), i)).string(), frame.get_image(rs2_stream::RS2_STREAM_DEPTH));
cv::imwrite((dir / fmt::format("{}_color_raw_{}.png", info->id(), i)).string(), frame.get_image(rs2_stream::RS2_STREAM_COLOR));
// const rs2::frameset rs2_frameset = frameset[i]; // 型を明示的に指定すればrs2::framesetとして操作することもできる。
}
// TODO align周りのメモリ管理がうまくいっていないそうでfilterとalignを分けてしまうとalign結果がおかしくなる。とりあえずget_filteredにalign(depth)をまとめてある。
const Frame filtered = info->frameset.get_filtered(/* TODO filter options*/);
// const Frame depth_aligned = filtered.get_aligned_to(rs2_stream::RS2_STREAM_DEPTH);
// const Frame color_aligned = filtered.get_aligned_to(rs2_stream::RS2_STREAM_COLOR);
cv::imwrite((dir / fmt::format("{}_aligned_depth_depth_filtered.png", info->id())).string(), filtered.get_image(rs2_stream::RS2_STREAM_DEPTH));
cv::imwrite((dir / fmt::format("{}_aligned_depth_color_filtered.png", info->id())).string(), filtered.get_image(rs2_stream::RS2_STREAM_COLOR));
// cv::imwrite((dir / fmt::format("{}_aligned_color_depth_filtered.png", info->id())).string(), color_aligned.get_image(rs2_stream::RS2_STREAM_DEPTH));
// cv::imwrite((dir / fmt::format("{}_aligned_color_color_filtered.png", info->id())).string(), filtered.get_image(rs2_stream::RS2_STREAM_COLOR));
PointCloud pointcloud = PointCloud(info->calibration, filtered, info->depth_scale, range);
pointcloud.downSample(0.003);
pointcloud.removeStatisticalOutliers(100, 1.5);
pointcloud.estimateNormals(30);
pointcloud.orientNormals(info->calibration.get_camera_coordinate());
{
std::lock_guard<std::mutex> _(m);
point_clouds.emplace_back(pointcloud);
// WritePointCloudToPLYはマルチスレッドで実行できない
open3d::io::WritePointCloudToPLY((dir / fmt::format("pointcloud_{}.ply", info->id())).string(), pointcloud, false, false);
}
};
open3d::utility::SetVerbosityLevel(open3d::utility::VerbosityLevel::VerboseError);
auto exp = MultiThreader(infos, capture_saver_lambda).errors;
handle_exception(manager, exp);
PointCloud combined(point_clouds);
combined.downSample(0.003);
open3d::io::WritePointCloudToPLY((dir / "pointcloud_combined.ply").string(), combined, false, false);
open3d::utility::SetVerbosityLevel(open3d::utility::VerbosityLevel::VerboseInfo);
open3d::visualization::Visualizer visualizer;
visualizer.CreateVisualizerWindow("PointCloud", 1600, 900);
visualizer.AddGeometry(combined.ptr());
visualizer.Run();
visualizer.DestroyVisualizerWindow();
point_clouds.clear();
}
else if (command.size() == 2 && command[0] == 'v')
{
std::vector<StreamConfig> video_config;
rs2_stream stream_type = rs2_stream::RS2_STREAM_COLOR;
if (command[1] == 'i')
{
stream_type = rs2_stream::RS2_STREAM_COLOR;
video_config.emplace_back(StreamConfig(rs2_stream::RS2_STREAM_COLOR, rs2_format::RS2_FORMAT_BGR8, 30, 1280, 720));
}
else if (command[1] == 'd')
{
stream_type = rs2_stream::RS2_STREAM_DEPTH;
video_config.emplace_back(StreamConfig(rs2_stream::RS2_STREAM_DEPTH, rs2_format::RS2_FORMAT_Z16, 30, 1280, 720));
}
else
{
SPDLOG_WARN("INVALID COMMAND");
continue;
}
std::function<void(std::shared_ptr<CameraInfoWithData>)> live_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
Streamer streamer(video_config, manager.get_rs2_device(info->serial()));
streamer.start();
Frameset live_frameset;
const auto capture = [&](const int count, const long long timeout) {
const auto start = std::chrono::steady_clock::now();
while (true)
{
try
{
live_frameset = streamer.get_frameset(count, timeout);
break;
}
catch (const std::exception &e)
{
SPDLOG_DEBUG("{} Get Frameset Failed {}", info->name(), e.what());
if (std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count() > timeout)
{
try
{
streamer.stop();
streamer.close();
}
catch (...)
{
}
throw;
}
}
}
};
const auto window_name = fmt::format("Live {}", info->name());
cv::namedWindow(window_name, cv::WINDOW_AUTOSIZE);
while (cv::waitKey(1) == -1)
{
capture(1, 8000);
const Frame frame = live_frameset[0];
cv::imshow(window_name, frame.get_image(stream_type));
}
cv::destroyWindow(window_name);
streamer.stop();
streamer.close();
};
auto exp = MultiThreader(fronts, live_lambda).errors;
handle_exception(manager, exp);
}
else if (command == "pp")
{
//spdlog::set_level(spdlog::level::debug);
std::vector<std::shared_ptr<CameraInfoWithData>> &source_cams = fronts;
std::vector<StreamConfig> video_config{StreamConfig(rs2_stream::RS2_STREAM_DEPTH, rs2_format::RS2_FORMAT_Z16, 30, 640, 360)};
bool showing = true;
std::mutex m;
std::map<std::shared_ptr<CameraInfoWithData>, cv::Mat> sources;
const int width = 450;
const int height = 800;
for (const auto &info : source_cams)
{
sources.insert(std::make_pair(info, cv::Mat(height, width, CV_8UC1, cv::Scalar(255))));
}
std::function<void(std::shared_ptr<CameraInfoWithData>)> live_lambda = [&](const std::shared_ptr<CameraInfoWithData> &info) -> void {
try
{
Streamer streamer(video_config, manager.get_rs2_device(info->serial()));
const float depth_scale = SensorInfo(manager.get_rs2_device(info->serial()).first<rs2::depth_sensor>()).get_depth_scale();
streamer.start();
Frameset live_frameset;
const auto capture = [&](const int count, const long long timeout) {
const auto start = std::chrono::steady_clock::now();
while (true)
{
try
{
live_frameset = streamer.get_frameset(count, timeout);
break;
}
catch (const std::exception &e)
{
SPDLOG_DEBUG("{} Get Frameset Failed {}", info->name(), e.what());
if (std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start).count() > timeout)
{
try
{
streamer.stop();
streamer.close();
}
catch (...)
{
}
throw;
}
}
}
};
std::deque<std::chrono::steady_clock::time_point> timestamps;
while (showing)
{
timestamps.emplace_front(std::chrono::steady_clock::now());
capture(1, 8000);
const Frame frame = live_frameset[0];
{
PointCloud pointcloud(info->calibration, frame, depth_scale, range);
pointcloud.downSample(0.01);
auto image = cv::Mat(height, width, CV_8UC1, cv::Scalar(255));
for (const auto &point : pointcloud.ptr()->points_)
{
const auto max = [](float a, float b) { return a > b ? a : b; };
const auto min = [](float a, float b) { return a < b ? a : b; };
const int x_coordinate = static_cast<int>((min(max(static_cast<float>(point[0]), range.x.min), range.x.max) - range.x.min) / (range.x.max - range.x.min) * width);
const int y_coordinate =
height - static_cast<int>((min(max(static_cast<float>(point[1]), range.y.min), range.y.max) - range.y.min) / (range.y.max - range.y.min) * height);
image.at<uchar>(y_coordinate, x_coordinate) = 0;
}
{
std::lock_guard<std::mutex> _(m);
sources.at(info) = image;
}
// SPDLOG_DEBUG("Update PointCloud addr: {}", reinterpret_cast<long>(sources.at(info).ptr().get()));
}
while (timestamps.size() > 50)
{
timestamps.erase(timestamps.end() - (timestamps.size() - 50));
}
if (timestamps.size() == 50)
{
SPDLOG_INFO("{} actual FPS {}", info->serial(),
static_cast<int>(
1000 / (std::chrono::duration_cast<std::chrono::milliseconds>(*timestamps.begin() - *(timestamps.end() - 1)).count() / static_cast<float>(timestamps.size()))));
}
}
streamer.stop();
streamer.close();
}
catch (...)
{
showing = false;
throw;
}
};
std::this_thread::sleep_for(std::chrono::milliseconds(1500));
std::thread viewer([&]() {
auto *data = static_cast<uint8_t *>(malloc(sizeof(uint8_t) * height * width));
while (showing)
{
memset(data, 255, sizeof(uint8_t) * height * width);
try
{
auto image = cv::Mat(height, width, CV_8UC1, data);
{
std::lock_guard<std::mutex> _(m);
const auto size = width * height;
for (const auto &source : sources)
{
for (auto i = 0, b = 0; b < size; i++, b += width)
{
for (auto j = 0; j < width; j++)
{
data[b + j] &= source.second.at<uchar>(i, j);
}
}
}
}
SPDLOG_TRACE("Refresh Frame");
cv::imshow("image", image);
const auto key = cv::waitKey(33);
if (key != -1)
showing = false;
}
catch (...)
{
showing = false;
}
}
cv::destroyAllWindows();
free(data);
});
auto exp = MultiThreader(source_cams, live_lambda).errors;
handle_exception(manager, exp);
viewer.join();
spdlog::set_level(spdlog::level::trace);
}
else if (command == "ds")
{
std::vector<std::string> disconnected;
for (auto &&info : infos)
{
SPDLOG_INFO("{} is {}", info->name(), manager.is_connected(info->serial()) ? "Connected" : "Disconnected");
if (!manager.is_connected(info->serial()))
{
disconnected.emplace_back(info->serial());
}
}
if (commands.size() >= 2)
{
if (commands[1] == "r")
{
if (!disconnected.empty())
{
manager.reset(disconnected);
}
}
}
disconnected.clear();
}
else if (command == "q")
{
// 終了する
commands.clear();
break;
}
else
{
SPDLOG_ERROR("INVALID COMMAND");
}
}
SPDLOG_INFO("Stopping capture app");
infos.clear();
fronts.clear();
rears.clear();
return EXIT_SUCCESS;
}
catch (const rs2::error &e)
{
SPDLOG_CRITICAL("RS2 Error at main [{}({}):{}]", e.get_failed_function(), e.get_failed_args(), e.what());
char c;
std::cin >> c;
return EXIT_FAILURE;
}
catch (const std::exception &e)
{
SPDLOG_CRITICAL("Excpetion at main [{}]", e.what());
char c;
std::cin >> c;
return EXIT_FAILURE;
}
catch (...)
{
SPDLOG_CRITICAL("Unknown Excpetion at main");
return EXIT_FAILURE;
}
@MojamojaK
Copy link
Author

WIP

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment