Skip to content

Instantly share code, notes, and snippets.

@ixtiyoruz
Created December 17, 2019 07:37
Show Gist options
  • Save ixtiyoruz/52b71c0f82a951fae50a544735ab752a to your computer and use it in GitHub Desktop.
Save ixtiyoruz/52b71c0f82a951fae50a544735ab752a to your computer and use it in GitHub Desktop.
deep sortni threadlar bilan qilingani
#include "yolo_console_dll.h"
#include <iostream>
#include <iomanip>
#include <string>
#include <vector>
#include <queue>
#include <fstream>
#include <thread>
#include <future>
#include <algorithm> // for std::find
#include <iterator> // for std::begin, std::end
#include <atomic>
#include <string>
#include <mutex> // std::mutex, std::unique_lock
#include <cmath>
#include <opencv2/opencv.hpp> // C++
#include <opencv2/core/version.hpp>
#ifndef CV_VERSION_EPOCH // OpenCV 3.x and 4.x
#include <opencv2/videoio/videoio.hpp>
#define OPENCV_VERSION CVAUX_STR(CV_VERSION_MAJOR)"" CVAUX_STR(CV_VERSION_MINOR)"" CVAUX_STR(CV_VERSION_REVISION)
#ifndef USE_CMAKE_LIBS
#pragma comment(lib, "opencv_world" OPENCV_VERSION ".lib")
#ifdef TRACK_OPTFLOW
#pragma comment(lib, "opencv_cudaoptflow" OPENCV_VERSION ".lib")
#pragma comment(lib, "opencv_cudaimgproc" OPENCV_VERSION ".lib")
#pragma comment(lib, "opencv_core" OPENCV_VERSION ".lib")
#pragma comment(lib, "opencv_imgproc" OPENCV_VERSION ".lib")
#pragma comment(lib, "opencv_highgui" OPENCV_VERSION ".lib")
#endif // TRACK_OPTFLOW
#endif // USE_CMAKE_LIBS
#else // OpenCV 2.x
#define OPENCV_VERSION CVAUX_STR(CV_VERSION_EPOCH)"" CVAUX_STR(CV_VERSION_MAJOR)"" CVAUX_STR(CV_VERSION_MINOR)
#ifndef USE_CMAKE_LIBS
#pragma comment(lib, "opencv_core" OPENCV_VERSION ".lib")
#pragma comment(lib, "opencv_imgproc" OPENCV_VERSION ".lib")
#pragma comment(lib, "opencv_highgui" OPENCV_VERSION ".lib")
#pragma comment(lib, "opencv_video" OPENCV_VERSION ".lib")
#endif // USE_CMAKE_LIBS
#endif // CV_VERSION_EPOCH
template<typename T>
class send_one_replaceable_object_t {
const bool sync;
std::atomic<T *> a_ptr;
public:
void send(T const& _obj) {
T *new_ptr = new T;
*new_ptr = _obj;
if (sync) {
while (a_ptr.load()) std::this_thread::sleep_for(std::chrono::milliseconds(3));
}
std::unique_ptr<T> old_ptr(a_ptr.exchange(new_ptr));
}
T receive() {
std::unique_ptr<T> ptr;
do {
while(!a_ptr.load()) std::this_thread::sleep_for(std::chrono::milliseconds(3));
ptr.reset(a_ptr.exchange(NULL));
} while (!ptr);
T obj = *ptr;
return obj;
}
bool is_object_present() {
return (a_ptr.load() != NULL);
}
send_one_replaceable_object_t(bool _sync) : sync(_sync), a_ptr(NULL)
{}
};
DS_DetectObjects det2detobj(std::vector<bbox_t> result_vec){
DS_DetectObjects result;
//person - car ... bus ..
int a[] = {0, 1, 2, 3, 5, 6, 7};
//without person
//int a[] = {1, 2, 3, 5, 6, 7};
for (auto &i : result_vec) {
bool exists = std::find(std::begin(a), std::end(a), i.obj_id) != std::end(a);
if(exists){
DS_Rect rec;DS_DetectObject obj;
rec.x = i.x;rec.y = i.y;rec.width = i.w;rec.height = i.h;
obj.class_id = i.obj_id;obj.rect = rec;obj.confidence = i.prob;
result.push_back(obj);
}
}
return result;
}
std::deque<cv::Point> line_point;
std::vector<std::string> objects_names_from_file(std::string const filename) {
std::ifstream file(filename);
std::vector<std::string> file_lines;
if (!file.is_open()) return file_lines;
for(std::string line; getline(file, line);) file_lines.push_back(line);
std::cout << "object names loaded \n";
return file_lines;
}
int main(int argc, char *argv[])
{
std::string names_file = "./data/coco.names";
std::string cfg_file = "/yolov3.cfg";
std::string weights_file = "./yolov3.weights";
std::string filename = "test.mp4";
if(argc > 4){ // weights , cfg, names, videopath
names_file = argv[1];
cfg_file = argv[2];
weights_file = argv[3];
filename = argv[4];
}
// int const skip_rate = (argc > 5) ? std::stof(argv[5]) : 3;
float const thresh = (argc > 6) ? std::stof(argv[6]) : 0.2;
std::cout << filename << std::endl;
Detector detector(cfg_file, weights_file);
std::cout << "weights are loaded" << std::endl;
auto obj_names = objects_names_from_file(names_file);
std::string out_videofile = "result.avi";
DS_DetectObjects detect_objects;
Deep_sort Tracker = Deep_sort();
DS_TrackObjects track_objects;
bool detection_sync = false;
int font_face = cv::FONT_HERSHEY_COMPLEX;
while(true){
char text[30];
try {
bool show_small_boxes = false;
std::string const file_ext = filename.substr(filename.find_last_of(".") + 1);
std::string const protocol = filename.substr(0, 7);
cv::Mat cur_frame;
std::atomic<int> fps_cap_counter(0), fps_det_counter(0);
std::atomic<int> current_fps_cap(0), current_fps_det(0);
std::atomic<bool> exit_flag(false);
std::chrono::steady_clock::time_point steady_start, steady_end;
int video_fps = 25;
cv::VideoCapture cap;
if (filename == "web_camera") {
cap.open(0);
cap >> cur_frame;
} else {
detection_sync = true;
cap.open(filename);
cap >> cur_frame;
}
video_fps = cap.get(cv::CAP_PROP_FPS);
cv::Size const frame_size = cur_frame.size();
//cv::Size const frame_size(cap.get(CV_CAP_PROP_FRAME_WIDTH), cap.get(CV_CAP_PROP_FRAME_HEIGHT));
std::cout << "\n Video size: " << frame_size << std::endl;
struct detection_data_t {
cv::Mat cap_frame;
std::shared_ptr<image_t> det_image;
std::vector<bbox_t> result_vec;
cv::Mat draw_frame;
bool new_detection;
uint64_t frame_id;
bool exit_flag;
detection_data_t() : exit_flag(false), new_detection(false) {}
};
const bool sync = detection_sync; // sync data exchange
send_one_replaceable_object_t<detection_data_t> cap2prepare(sync), cap2draw(sync),
prepare2detect(sync), detect2draw(sync), draw2show(sync), draw2write(sync), draw2net(sync);
std::thread t_cap, t_prepare, t_detect, t_post, t_draw, t_write, t_network;
if (t_cap.joinable()) t_cap.join();
t_cap = std::thread([&]()
{
uint64_t frame_id = 0;
detection_data_t detection_data;
do {
cap >> detection_data.cap_frame;
fps_cap_counter++;
detection_data.frame_id = frame_id++;
if (detection_data.cap_frame.empty() || exit_flag) {
std::cout << " exit_flag: detection_data.cap_frame.size = " << detection_data.cap_frame.size() << std::endl;
detection_data.exit_flag = true;
detection_data.cap_frame = cv::Mat(frame_size, CV_8UC3);
}
if (!detection_sync) {
cap2draw.send(detection_data); // skip detection
}
cap2prepare.send(detection_data);
} while (!detection_data.exit_flag);
std::cout << " t_cap exit \n";
});
// pre-processing video frame (resize, convertion)
t_prepare = std::thread([&]()
{
std::shared_ptr<image_t> det_image;
detection_data_t detection_data;
do {
detection_data = cap2prepare.receive();
det_image = detector.mat_to_image_resize(detection_data.cap_frame);
detection_data.det_image = det_image;
prepare2detect.send(detection_data); // detection
} while (!detection_data.exit_flag);
std::cout << " t_prepare exit \n";
});
// detection by Yolo
if (t_detect.joinable()) t_detect.join();
t_detect = std::thread([&]()
{
std::shared_ptr<image_t> det_image;
detection_data_t detection_data;
do {
detection_data = prepare2detect.receive();
det_image = detection_data.det_image;
std::vector<bbox_t> result_vec;
if(det_image){
result_vec = detector.detect_resized(*det_image, frame_size.width, frame_size.height, thresh, true); // true
detect_objects = det2detobj(result_vec);
Tracker.update(detect_objects,line_point,detection_data.cap_frame);
}
fps_det_counter++;
//std::this_thread::sleep_for(std::chrono::milliseconds(150));
detection_data.new_detection = true;
detection_data.result_vec = result_vec;
detect2draw.send(detection_data);
} while (!detection_data.exit_flag);
std::cout << " t_detect exit \n";
});
// draw rectangles (and track objects)
t_draw = std::thread([&]()
{
detection_data_t detection_data;
do {
// for Video-file
if (detection_sync) {
detection_data = detect2draw.receive();
}
// for Video-camera
else
{
// get new Detection result if present
if (detect2draw.is_object_present()) {
cv::Mat old_cap_frame = detection_data.cap_frame; // use old captured frame
detection_data = detect2draw.receive();
if (!old_cap_frame.empty()) detection_data.cap_frame = old_cap_frame;
}
// get new Captured frame
else {
std::vector<bbox_t> old_result_vec = detection_data.result_vec; // use old detections
detection_data = cap2draw.receive();
detection_data.result_vec = old_result_vec;
}
}
cv::Mat cap_frame = detection_data.cap_frame;
cv::Mat draw_frame = detection_data.cap_frame.clone();
std::vector<bbox_t> result_vec = detection_data.result_vec;
track_objects = Tracker.get_detect_obj();
for(auto oloop : track_objects)
{
int col = int(oloop.track_id%9);
// cv::Scalar color = color_map[col];
cv::Scalar color;
if(oloop.outside){
sprintf(text,"%d outside",oloop.track_id);
color = cv::Scalar(255, 0, 0);}
else{
sprintf(text,"%d inside",oloop.track_id);
color = cv::Scalar(0, 0, 255);}
if(oloop.rect.x<0)
{
oloop.rect.x = 0;
}
if(oloop.rect.y<0)
{
oloop.rect.y = 0;
}
if((oloop.rect.x + oloop.rect.width)>draw_frame.size[1])
{
oloop.rect.width = draw_frame.size[1]-oloop.rect.x;
}
if((oloop.rect.y + oloop.rect.height)>draw_frame.size[0])
{
oloop.rect.height = draw_frame.size[0]-oloop.rect.y;
}
cv::Rect box = cv::Rect(oloop.rect.x,oloop.rect.y,oloop.rect.width,oloop.rect.height);
cv::rectangle(draw_frame,box,color,2,1);
cv::Point origin = cv::Point(oloop.rect.x,oloop.rect.y-2);
cv::putText(draw_frame, std::to_string(oloop.track_id), cv::Point(oloop.rect.x,oloop.rect.y-5), font_face, 0.7, cv::Scalar(255, 0, 255), 2, 8, 0);
}
detection_data.result_vec = result_vec;
detection_data.draw_frame = draw_frame;
draw2show.send(detection_data);
} while (!detection_data.exit_flag);
std::cout << " t_draw exit \n";
});
// show detection
detection_data_t detection_data;
do {
steady_end = std::chrono::steady_clock::now();
float time_sec = std::chrono::duration<double>(steady_end - steady_start).count();
if (time_sec >= 1) {
current_fps_det = fps_det_counter.load() / time_sec;
current_fps_cap = fps_cap_counter.load() / time_sec;
steady_start = steady_end;
fps_det_counter = 0;
fps_cap_counter = 0;
}
detection_data = draw2show.receive();
cv::Mat draw_frame = detection_data.draw_frame;
//if (extrapolate_flag) {
// cv::putText(draw_frame, "extrapolate", cv::Point2f(10, 40), cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cv::Scalar(50, 50, 0), 2);
//}
cv::imshow("window name", draw_frame);
int key = cv::waitKey(3); // 3 or 16ms
if (key == 'f') show_small_boxes = !show_small_boxes;
if (key == 'p') while (true) if (cv::waitKey(100) == 'p') break;
//if (key == 'e') extrapolate_flag = !extrapolate_flag;
if (key == 27) { exit_flag = true;}
//std::cout << " current_fps_det = " << current_fps_det << ", current_fps_cap = " << current_fps_cap << std::endl;
} while (!detection_data.exit_flag);
std::cout << " show detection exit \n";
cv::destroyWindow("window name");
// wait for all threads
if (t_cap.joinable()) t_cap.join();
if (t_prepare.joinable()) t_prepare.join();
if (t_detect.joinable()) t_detect.join();
if (t_draw.joinable()) t_draw.join();
break;
}
catch (std::exception &e) { std::cerr << "exception: " << e.what() << "\n"; getchar(); }
catch (...) { std::cerr << "unknown exception \n"; getchar(); }
filename.clear();
}
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment