Last active
May 17, 2020 11:10
-
-
Save staybuzz/26c39f367a668f700d2876be9ddf077d to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"cell_type": "code", | |
"execution_count": 1, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"from anymotion_sdk import Client\n", | |
"import cv2\n", | |
"import matplotlib.pyplot as plt\n", | |
"import ffmpeg\n", | |
"from PIL import Image, ImageDraw\n", | |
"\n", | |
"import numpy as np" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 2, | |
"metadata": { | |
"scrolled": true | |
}, | |
"outputs": [], | |
"source": [ | |
"def is_fusion_phase1(pos, a, b, c, d, e, f):\n", | |
" # pos: left or right\n", | |
" # print(a, b, c, d, e, f)\n", | |
" if pos == \"left\": # 左側に立つ人をチェックする\n", | |
" if not e:\n", | |
" e = 70 # 脚の角度が取れていない場合を考慮\n", | |
" return (a in range(10, 90) and \\\n", | |
" b in range(90, 180) and \\\n", | |
" c in range(120, 200) and \\\n", | |
" d in range(150, 200) and \\\n", | |
" e in range(10, 80))\n", | |
" else: # 右側に立つ人をチェックする\n", | |
" if not f:\n", | |
" f = 70 # 脚の角度が取れていない場合を考慮\n", | |
" return (c in range(10, 90) and \\\n", | |
" d in range(90, 180) and \\\n", | |
" a in range(120, 200) and \\\n", | |
" b in range(150, 200) and \\\n", | |
" f in range(10,80))\n", | |
"\n", | |
"def is_fusion_phase2(pos, a, b, c, d, e, f):\n", | |
" # pos: left or right\n", | |
" # print(a, b, c, d, e, f)\n", | |
" if pos == \"left\": # 左側に立つ人をチェックする\n", | |
" return a in range(130, 180) and \\\n", | |
" b in range(40, 130) and \\\n", | |
" c in range(50, 150) and \\\n", | |
" d in range(100, 170)\n", | |
" else:\n", | |
" return c in range(130, 180) and \\\n", | |
" d in range(40, 130) and \\\n", | |
" a in range(50, 150) and \\\n", | |
" b in range(100, 170) " | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 3, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"def check_fusion(angles, position):\n", | |
" \"\"\"\n", | |
" angles: 角度情報\n", | |
" position: left or right\n", | |
" \"\"\"\n", | |
" # 各ステップを検出したかを格納するフラグ\n", | |
" phase1 = False\n", | |
" phase2 = False\n", | |
" # 該当フレームを格納するリスト\n", | |
" p1 = []\n", | |
" p2 = []\n", | |
" for i in range(len(angles[0])):\n", | |
" if is_fusion_phase1(position, angles[0][i], angles[1][i], angles[2][i], angles[3][i],\n", | |
" angles[4][i], angles[5][i]):\n", | |
" print(i, \"Phase1!!!\")\n", | |
" phase1 = True\n", | |
" p1.append(i)\n", | |
" elif phase1 and is_fusion_phase2(position, angles[0][i], angles[1][i], angles[2][i], angles[3][i],\n", | |
" angles[4][i], angles[5][i]):\n", | |
" print(i, \"Phase2!!!\")\n", | |
" phase2 = True\n", | |
" p2.append(i)\n", | |
"\n", | |
" if phase1 and phase2:\n", | |
" print(\"Fusion!!!!!!\")\n", | |
" \n", | |
" return ((phase1 and phase2), p1, p2)" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 4, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# 動画の向きを確認する\n", | |
"def check_rotation(path_video_file):\n", | |
" meta_dict = ffmpeg.probe(path_video_file)\n", | |
"\n", | |
" rotateCode = None\n", | |
" try:\n", | |
" if int(meta_dict['streams'][0]['tags']['rotate']) == 90:\n", | |
" rotateCode = cv2.ROTATE_90_CLOCKWISE\n", | |
" elif int(meta_dict['streams'][0]['tags']['rotate']) == 180:\n", | |
" rotateCode = cv2.ROTATE_180\n", | |
" elif int(meta_dict['streams'][0]['tags']['rotate']) == 270:\n", | |
" rotateCode = cv2.ROTATE_90_COUNTERCLOCKWISE\n", | |
" except:\n", | |
" pass\n", | |
"\n", | |
" return rotateCode\n", | |
"\n", | |
"# 動画の指定したフレームを取得する\n", | |
"def get_frame_img(filename, frame_num):\n", | |
" reader = cv2.VideoCapture(filename)\n", | |
" rotateCode = check_rotation(filename)\n", | |
" reader.set(1, frame_num)\n", | |
" ret, frame_img = reader.read()\n", | |
" reader.release()\n", | |
" \n", | |
" if not ret:\n", | |
" return None\n", | |
" if rotateCode:\n", | |
" frame_img = cv2.rotate(frame_img, rotateCode)\n", | |
"\n", | |
" return frame_img\n", | |
"\n", | |
"# 動画の指定したフレームを表示する\n", | |
"def show_frame_img(filename, frame_num:int):\n", | |
" frame_img = get_frame_img(filename, frame_num)\n", | |
" plt.imshow(cv2.cvtColor(frame_img, cv2.COLOR_BGR2RGB))\n", | |
"\n", | |
"# 2つのフレームを横に連結して表示する\n", | |
"def show_frame_img_hconcat(l_filename, r_filename, l_framenum, r_framenum):\n", | |
" l_img = get_frame_img(l_filename, l_framenum)\n", | |
" r_img = get_frame_img(r_filename, r_framenum)\n", | |
" \n", | |
" img = cv2.hconcat([l_img, r_img])\n", | |
" plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))\n", | |
" \n", | |
"# 2つのフレームを横に連結する\n", | |
"def get_frame_img_hconcat(l_filename, r_filename, l_framenum, r_framenum):\n", | |
" l_img = get_frame_img(l_filename, l_framenum)\n", | |
" r_img = get_frame_img(r_filename, r_framenum)\n", | |
" \n", | |
" img = cv2.hconcat([l_img, r_img])\n", | |
" return img" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 5, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# AnyMotion APIの準備\n", | |
"client = Client(client_id=\"CLIENT_ID\", \n", | |
" client_secret=\"CLIENT_SECRET\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 6, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"movie_id: 7529\n", | |
"keypoint_id: 21951\n" | |
] | |
} | |
], | |
"source": [ | |
"# 動画のアップロード(左側)\n", | |
"left_filename = \"fusion_left.mp4\"\n", | |
"left_movie_id = client.upload(left_filename).movie_id\n", | |
"print(f\"movie_id: {left_movie_id}\")\n", | |
"\n", | |
"# 骨格抽出(キーポイント抽出)\n", | |
"left_keypoint_id = client.extract_keypoint(movie_id=left_movie_id)\n", | |
"left_extraction_result = client.wait_for_extraction(left_keypoint_id)\n", | |
"print(f\"keypoint_id: {left_keypoint_id}\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 8, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"movie_id: 7530\n", | |
"keypoint_id: 21952\n" | |
] | |
} | |
], | |
"source": [ | |
"# 動画のアップロード(右側)\n", | |
"right_filename = \"fusion_right.mp4\"\n", | |
"right_movie_id = client.upload(right_filename).movie_id\n", | |
"print(f\"movie_id: {right_movie_id}\")\n", | |
"\n", | |
"# 骨格抽出(キーポイント抽出)\n", | |
"right_keypoint_id = client.extract_keypoint(movie_id=right_movie_id)\n", | |
"right_extraction_result = client.wait_for_extraction(right_keypoint_id)\n", | |
"print(f\"keypoint_id: {right_keypoint_id}\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 9, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# 角度の解析ルールの定義\n", | |
"analyze_angles_rule = [\n", | |
" # left arm\n", | |
" {\n", | |
" \"analysisType\": \"vectorAngle\",\n", | |
" \"points\": [\"rightShoulder\", \"leftShoulder\", \"leftElbow\"]\n", | |
" },\n", | |
" {\n", | |
" \"analysisType\": \"vectorAngle\",\n", | |
" \"points\": [\"leftShoulder\", \"leftElbow\", \"leftWrist\"]\n", | |
" },\n", | |
" # right arm\n", | |
" {\n", | |
" \"analysisType\": \"vectorAngle\",\n", | |
" \"points\": [\"leftShoulder\", \"rightShoulder\", \"rightElbow\"]\n", | |
" },\n", | |
" {\n", | |
" \"analysisType\": \"vectorAngle\",\n", | |
" \"points\": [\"rightShoulder\", \"rightElbow\", \"rightWrist\"]\n", | |
" },\n", | |
" # left leg\n", | |
" {\n", | |
" \"analysisType\": \"vectorAngle\",\n", | |
" \"points\": [\"rightHip\", \"leftHip\", \"leftKnee\"]\n", | |
" },\n", | |
" # right leg\n", | |
" {\n", | |
" \"analysisType\": \"vectorAngle\",\n", | |
" \"points\": [\"leftHip\", \"rightHip\", \"rightKnee\"]\n", | |
" },\n", | |
"]" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 10, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"angles analyzed.\n" | |
] | |
} | |
], | |
"source": [ | |
"# 角度の解析開始(左側)\n", | |
"left_analysis_id = client.analyze_keypoint(left_keypoint_id, rule=analyze_angles_rule)\n", | |
"# 角度情報の取得\n", | |
"left_analysis_result = client.wait_for_analysis(left_analysis_id).json\n", | |
"# dict形式の結果をlist形式へ変換(同時に数値をfloatからintへ変換)\n", | |
"left_angles = [list(map(lambda v: int(v) if v else None, x[\"values\"])) for x in left_analysis_result[\"result\"]]\n", | |
"print(\"angles analyzed.\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 11, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"angles analyzed.\n" | |
] | |
} | |
], | |
"source": [ | |
"# 角度の解析開始(右側)\n", | |
"right_analysis_id = client.analyze_keypoint(right_keypoint_id, rule=analyze_angles_rule)\n", | |
"right_analysis_result = client.wait_for_analysis(right_analysis_id).json\n", | |
"right_angles = [list(map(lambda v: int(v) if v else None, x[\"values\"])) for x in right_analysis_result[\"result\"]]\n", | |
"print(\"angles analyzed.\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 12, | |
"metadata": { | |
"scrolled": true | |
}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"37 Phase1!!!\n", | |
"38 Phase1!!!\n", | |
"39 Phase1!!!\n", | |
"40 Phase1!!!\n", | |
"41 Phase1!!!\n", | |
"42 Phase1!!!\n", | |
"91 Phase1!!!\n", | |
"92 Phase1!!!\n", | |
"93 Phase1!!!\n", | |
"94 Phase1!!!\n", | |
"95 Phase1!!!\n", | |
"96 Phase1!!!\n", | |
"99 Phase1!!!\n", | |
"103 Phase1!!!\n", | |
"104 Phase1!!!\n", | |
"105 Phase1!!!\n", | |
"106 Phase1!!!\n", | |
"107 Phase1!!!\n", | |
"156 Phase2!!!\n", | |
"157 Phase2!!!\n", | |
"158 Phase2!!!\n", | |
"159 Phase2!!!\n", | |
"160 Phase2!!!\n", | |
"Fusion!!!!!!\n" | |
] | |
} | |
], | |
"source": [ | |
"left_result, left_p1, left_p2 = check_fusion(left_angles, \"left\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 13, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"54 Phase1!!!\n", | |
"55 Phase1!!!\n", | |
"56 Phase1!!!\n", | |
"57 Phase1!!!\n", | |
"58 Phase1!!!\n", | |
"80 Phase2!!!\n", | |
"81 Phase2!!!\n", | |
"82 Phase2!!!\n", | |
"83 Phase2!!!\n", | |
"84 Phase2!!!\n", | |
"85 Phase2!!!\n", | |
"86 Phase2!!!\n", | |
"Fusion!!!!!!\n" | |
] | |
} | |
], | |
"source": [ | |
"right_result, right_p1, right_p2 = check_fusion(right_angles, \"right\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 15, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# 検出したフュージョンのフレームを使ってGIFアニメーションを生成する\n", | |
"## 検出したフレームの中央値を取得\n", | |
"left_p1_center = left_p1[int(len(left_p1)/2)]\n", | |
"left_p2_center = left_p2[int(len(left_p2)/2)]\n", | |
"right_p1_center = right_p1[int(len(right_p1)/2)]\n", | |
"right_p2_center = right_p2[int(len(right_p2)/2)]\n", | |
"## Phase1の画像を横方向に結合する\n", | |
"p1_img = get_frame_img_hconcat(left_filename, right_filename, left_p1_center, right_p1_center)\n", | |
"## Phase2の画像を横方向に結合する\n", | |
"p2_img = get_frame_img_hconcat(left_filename, right_filename, left_p2_center, right_p2_center)\n", | |
"## numpy arrayからPILのImageに変換する\n", | |
"im1 = Image.fromarray(cv2.cvtColor(p1_img, cv2.COLOR_BGR2RGB))\n", | |
"im2 = Image.fromarray(cv2.cvtColor(p2_img, cv2.COLOR_BGR2RGB))\n", | |
"## GIFアニメを生成する\n", | |
"im1.save('fusion.gif', save_all=True, append_images=[im2], optimize=False, duration=700, loop=0)" | |
] | |
} | |
], | |
"metadata": { | |
"kernelspec": { | |
"display_name": "Python 3", | |
"language": "python", | |
"name": "python3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.8.0" | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 4 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment