Created
August 21, 2019 10:36
-
-
Save carrotflakes/4a5414f1b07884fa22f0e7bb166db279 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* Dual-fisheye 形式の動画を横長の動画にして出力するプログラム | |
* | |
* コンパイル: | |
* $ g++ convert_dfe.cpp `pkg-config opencv --cflags --libs` -std=c++11 -O3 -o convert_dfe | |
* | |
* 使い方: | |
* $ convert_dfe 入力動画パス 出力動画パス | |
*/ | |
#include <iostream> | |
#include "opencv2/core/core.hpp" | |
#include "opencv2/highgui/highgui.hpp" | |
#include "opencv2/imgproc/imgproc.hpp" | |
#include "opencv2/features2d.hpp" | |
#define _USE_MATH_DEFINES | |
#include <cmath> | |
#include <chrono> | |
using namespace cv; | |
//// 設定 //// | |
// 出力動画のFPS | |
const static double fps = 29.97; | |
// 一つの魚眼のマップ先のサイズ この数値を上げると出力動画の解像度が上がる | |
const static int mapped_size = 640; | |
// 出力動画の上下の切り取りサイズ | |
const static int trim_size = mapped_size / 4; | |
// 出力動画サイズ ここは変えちゃダメ | |
const static Size output_size(mapped_size * 2, mapped_size - trim_size * 2); | |
////////////// | |
int main(int argc, char *argv[]) { | |
if (argc <= 2) { | |
std::cout << "Please call me as following: $ " << argv[0] << " input_movie.mp4 output_movie.mp4" << std::endl; | |
return 0; | |
} | |
std::cout << "input: " << argv[1] << std::endl; | |
std::cout << "output: " << argv[2] << std::endl; | |
VideoCapture video(argv[1]); | |
// CV_FOURCC で出力動画のコーデックを指定しています | |
// ここを参照: https://gist.github.com/takuma7/44f9ecb028ff00e2132e | |
// 大文字小文字区別するようなので注意 | |
VideoWriter writer(argv[2], CV_FOURCC('m','p','4','v'), fps, output_size, true); | |
// プレビュー用ウィンドウ | |
//cv::namedWindow("movie"); | |
Mat img(output_size.height, output_size.width, CV_8UC3); | |
auto lastTime = std::chrono::system_clock::now(); | |
for (int frame_count = 0; ; ++frame_count) { | |
// 進捗を表示 | |
if (frame_count % 100 == 0) { | |
auto elapsedTime = std::chrono::duration_cast<std::chrono::milliseconds> | |
(std::chrono::system_clock::now() - lastTime).count(); | |
std::cout << "current frame: " << frame_count | |
<< ", elapsed: " << elapsedTime << " ms" | |
<< std::endl; | |
lastTime = std::chrono::system_clock::now(); | |
} | |
Mat frame(1080, 1920, CV_8UC3); | |
video >> frame; | |
//if (frame.empty() || waitKey(30) >= 0 || video.get(CV_CAP_PROP_POS_AVI_RATIO) == 1) { | |
if (frame.empty() || video.get(CV_CAP_PROP_POS_AVI_RATIO) == 1) { | |
break; | |
} | |
auto frame_data = frame.data; | |
auto img_data = img.data; | |
int frame_step = frame.step; | |
int img_step = img.step; | |
int frame_elemSize = frame.elemSize(); | |
int img_elemSize = img.elemSize(); | |
for (int y = trim_size; y < mapped_size - trim_size; ++y) { | |
for (int x = 0; x < mapped_size; ++x) { | |
double ax = static_cast<double>(y - mapped_size / 2) / mapped_size; | |
double ay = static_cast<double>(x - mapped_size / 2) / mapped_size; | |
double cx = std::sin(ax * M_PI); | |
double cy = std::sin(ay * M_PI); | |
int lsx = std::round(cx * 428) + 480; | |
int lsy = std::round(-cy * std::cos(ax * M_PI) * 428) + 480; | |
int rsx = std::round(-cx * 428) + 1440; | |
int rsy = std::round(cy * std::cos(ax * M_PI) * 428) + 480; | |
for (int c = 0; c < 3; ++c) | |
img_data[(y - trim_size) * img_step + x * img_elemSize + c] = | |
frame_data[lsy * frame_step + lsx * frame_elemSize + c]; | |
for (int c = 0; c < 3; ++c) | |
img_data[(y - trim_size) * img_step + (x + mapped_size) * img_elemSize + c] = | |
frame_data[rsy * frame_step + rsx * frame_elemSize + c]; | |
// ↑for 文を2つに分けると早い (ループ展開 が効いてる?) | |
/* | |
{ // 左半分 | |
double ax = static_cast<double>(y - mapped_size / 2) / mapped_size; | |
double ay = -static_cast<double>(x - mapped_size / 2) / mapped_size; | |
double cx = std::sin(ax * M_PI); | |
double cy = std::sin(ay * M_PI); | |
int sx = std::round(cx * 428) + 480; | |
int sy = std::round(cy * std::cos(ax * M_PI) * 428) + 480; | |
for (int c = 0; c < 3; ++c) | |
img_data[(y - trim_size) * img.step + x * img.elemSize() + c] = | |
frame_data[sy * frame.step + sx * frame.elemSize() + c]; | |
} | |
{ // 右半分 | |
double ax = -static_cast<double>(y - mapped_size / 2) / mapped_size; | |
double ay = static_cast<double>(x - mapped_size / 2) / mapped_size; | |
double cx = std::sin(ax * M_PI); | |
double cy = std::sin(ay * M_PI); | |
int sx = std::round(cx * 428) + 1440; | |
int sy = std::round(cy * std::cos(ax * M_PI) * 428) + 480; | |
for (int c = 0; c < 3; ++c) | |
img_data[(y - trim_size) * img.step + (x + mapped_size) * img.elemSize() + c] = | |
frame_data[sy * frame.step + sx * frame.elemSize() + c]; | |
}//*/ | |
} | |
} | |
writer << img; | |
// プレビュー | |
//waitKey(1); | |
//imshow("movie", img); | |
} | |
std::cout << std::endl << "done." << std::endl; | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment