Skip to content

Instantly share code, notes, and snippets.

@sorashido
Last active June 5, 2021 09:04
Show Gist options
  • Save sorashido/b4c3a3ba5ff077acc3169a7f50dcd2aa to your computer and use it in GitHub Desktop.
Save sorashido/b4c3a3ba5ff077acc3169a7f50dcd2aa to your computer and use it in GitHub Desktop.
opencv3_multitracking_sample
cmake_minimum_required(VERSION 2.8)
project(sample)
add_executable(pedestrian
main.cpp
# kalman.cpp
)
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W -Wall")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
set (CMAKE_CXX_FLAHS "${CMAKE_CXX_FLAGS} -march=native")
set (CMAKE_BUILD_TYPE Debug)
find_package (OpenCV REQUIRED)
include_directories(${OpenCV_INCLUDE_DIRS})
target_link_libraries(sample ${OpenCV_LIBS})
#include <iostream>
#include <opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/tracking.hpp>
#include "samples_utility.hpp"
using namespace std;
using namespace cv;
int main() {
Mat img;
VideoCapture cap("C:/opencv/samples/data/vtest.avi");
cap >> img;
int max_frame = cap.get(CV_CAP_PROP_FRAME_COUNT);
cv::String trackingAlg = "MEDIAN_FLOW";
MultiTracker trackers;
vector<Rect2d> objects;
vector<Rect> ROIs;
selectROIs("tracker", img, ROIs);
if (ROIs.size() < 1)return 0;
std::vector<Ptr<Tracker>> algorithms;
for (size_t i = 0; i < ROIs.size(); i++) {
algorithms.push_back(createTrackerByName(trackingAlg));
objects.push_back(ROIs[i]);
}
trackers.add(algorithms, img, objects);
for (int i = 0; i < max_frame - 1; i++) {
cap >> img;
// update
trackers.update(img);
//// draw mat
for (unsigned i = 0; i < trackers.getObjects().size(); i++)
rectangle(img, trackers.getObjects()[i], Scalar(255, 0, 0), 2, 1);
imshow("tracker", img);
int key = cv::waitKey(10);
if (key == 'q')
break;
}
return 0;
}
#include <iostream>
#include <opencv2/opencv.hpp>
#include <opencv2/tracking/tracker.hpp>
const cv::Size MAX_DETECT_SIZE = cv::Size(100, 200);
const int MAX_MISS_FRAME = 10;
const double MIN_NEW_DETECT_INTERSECTION_RATE = 0.5;
// cv::Trackerのラッパー。
class MyTracker {
private:
static int next_id;
int id;
int n_miss_frame = 0;
cv::Rect2d rect;
cv::Ptr<cv::Tracker> cv_tracker;
public:
// フレーム画像と追跡対象(Rect)で初期化
MyTracker(const cv::Mat& _frame, const cv::Rect2d& _rect)
: id(next_id++), rect(_rect) {
cv_tracker = cv::TrackerBoosting::create(); // or "MIL"
cv_tracker->init(_frame, _rect);
}
// 次フレームを入力にして、追跡対象の追跡(true)
// MAX_MISS_FRAME以上検出が登録されていない場合は追跡失敗(false)
bool update(const cv::Mat& _frame) {
n_miss_frame++;
return cv_tracker->update(_frame, rect) && n_miss_frame < MAX_MISS_FRAME;
}
// 新しい検出(Rect)を登録。現在位置と近ければ受理してn_miss_frameをリセット(true)
// そうでなければ(false)
bool registerNewDetect(const cv::Rect2d& _new_detect) {
double intersection_rate = 1.0 * (_new_detect & rect).area() / (_new_detect | rect).area();
bool is_registered = intersection_rate > MIN_NEW_DETECT_INTERSECTION_RATE;
if (is_registered) n_miss_frame = 0;
return is_registered;
}
// trackerの現在地を_imageに書き込む
void draw(cv::Mat& _image) const {
cv::rectangle(_image, rect, cv::Scalar(255, 0, 0), 2, 1);
cv::putText(_image, cv::format("%03d", id), cv::Point(rect.x + 5, rect.y + 17),
cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(255, 0, 0), 1, CV_AA);
}
};
int MyTracker::next_id = 0;
int main(int argc, char* argv[]) {
// フレーム画像列のパスを取得
cv::VideoCapture cap("C:/opencv/samples/data/vtest.avi"); //Macの場合
// detector, trackerの宣言
cv::HOGDescriptor detector;
detector.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector());
std::vector<MyTracker> trackers;
// 1フレームずつループ
for (;;) {
cv::Mat frame;
cap >> frame;
// 人物検出
std::vector<cv::Rect> detections;
detector.detectMultiScale(frame, detections);
// trackerの更新(追跡に失敗したら削除)
for (auto t_it = trackers.begin(); t_it != trackers.end();) {
t_it = (t_it->update(frame)) ? std::next(t_it) : trackers.erase(t_it);
}
// 新しい検出があればそれを起点にtrackerを作成。(既存Trackerに重なる検出は無視)
for (auto& d_rect : detections) {
if (d_rect.size().area() > MAX_DETECT_SIZE.area()) continue;
bool is_exsisting = std::any_of(trackers.begin(), trackers.end(),
[&d_rect](MyTracker& t) {return t.registerNewDetect(d_rect); });
if (!is_exsisting) trackers.push_back(MyTracker(frame, d_rect));
}
// 人物追跡と人物検出の結果を表示
cv::Mat image = frame.clone();
for (auto& t : trackers) t.draw(image);
for (auto& d_rect : detections) cv::rectangle(image, d_rect, cv::Scalar(0, 255, 0), 2, 1);
cv::imshow("demo", image);
cv::waitKey(1);
}
return 0;
}
#pragma once
#include <opencv2/tracking.hpp>
inline cv::Ptr<cv::Tracker> createTrackerByName(cv::String name)
{
cv::Ptr<cv::Tracker> tracker;
if (name == "KCF")
tracker = cv::TrackerKCF::create();
else if (name == "TLD")
tracker = cv::TrackerTLD::create();
else if (name == "BOOSTING")
tracker = cv::TrackerBoosting::create();
else if (name == "MEDIAN_FLOW")
tracker = cv::TrackerMedianFlow::create();
else if (name == "MIL")
tracker = cv::TrackerMIL::create();
else if (name == "GOTURN")
tracker = cv::TrackerGOTURN::create();
else if (name == "MOSSE")
tracker = cv::TrackerMOSSE::create();
else
CV_Error(cv::Error::StsBadArg, "Invalid tracking algorithm name\n");
return tracker;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment