Skip to content

Instantly share code, notes, and snippets.

@Mahedi-61
Last active March 24, 2020 02:48
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Mahedi-61/7d54378b2abd49baf02d8040a53fdea5 to your computer and use it in GitHub Desktop.
Save Mahedi-61/7d54378b2abd49baf02d8040a53fdea5 to your computer and use it in GitHub Desktop.
Notebook for running openpose for my Bangla Sign Language Project in Google Colaboratory
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "bslr.ipynb",
"provenance": [],
"toc_visible": true,
"authorship_tag": "ABX9TyPhXTa+sjrpdTSU3tJ6uH0i",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/gist/Mahedi-61/7d54378b2abd49baf02d8040a53fdea5/bslr.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "7s-6HrO-5reA",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 35
},
"outputId": "e752d167-4767-41a4-b001-ad7c93e65c01"
},
"source": [
"!pwd"
],
"execution_count": 1,
"outputs": [
{
"output_type": "stream",
"text": [
"/content\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "L821Bl1J7Ffa",
"colab_type": "code",
"colab": {}
},
"source": [
"! apt update\n",
"! apt install -y sudo libopencv-dev"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "4Ae0tUFZ7R67",
"colab_type": "code",
"colab": {}
},
"source": [
"import os\n",
"from os.path import exists, join, basename, splitext\n",
"\n",
"git_repo_url = 'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git'\n",
"project_name = splitext(basename(git_repo_url))[0]\n",
"\n",
"# see: https://github.com/CMU-Perceptual-Computing-Lab/openpose/issues/949\n",
"# install new CMake becaue of CUDA10\n",
"!wget -q https://cmake.org/files/v3.13/cmake-3.13.0-Linux-x86_64.tar.gz\n",
"!tar xfz cmake-3.13.0-Linux-x86_64.tar.gz --strip-components=1 -C /usr/local\n",
"# clone openpose\n",
"!git clone -q --depth 1 $git_repo_url\n",
"!sed -i 's/execute_process(COMMAND git checkout master WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}\\/3rdparty\\/caffe)/execute_process(COMMAND git checkout f019d0dfe86f49d1140961f8c7dec22130c83154 WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}\\/3rdparty\\/caffe)/g' openpose/CMakeLists.txt\n",
"# install system dependencies\n",
"!apt-get -qq install -y libatlas-base-dev libprotobuf-dev libleveldb-dev libsnappy-dev libhdf5-serial-dev protobuf-compiler libgflags-dev libgoogle-glog-dev liblmdb-dev opencl-headers ocl-icd-opencl-dev libviennacl-dev\n",
"# install python dependencies\n",
"# build openpose\n",
"!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc`"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "KR2if8mH-1Jk",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 126
},
"outputId": "3369acd1-ae6e-40b0-ff95-9579fc1dbf3d"
},
"source": [
"from google.colab import drive\n",
"drive.mount('/content/drive')"
],
"execution_count": 5,
"outputs": [
{
"output_type": "stream",
"text": [
"Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n",
"\n",
"Enter your authorization code:\n",
"··········\n",
"Mounted at /content/drive\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "gCBYKw-T_Zjy",
"colab_type": "code",
"colab": {}
},
"source": [
"!unzip \"/content/drive/My Drive/50_Words.zip\""
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "CrT2SYVy_8Lz",
"colab_type": "code",
"colab": {}
},
"source": [
"# python packages\n",
"import os\n",
"import numpy as np\n",
"\n",
"# for training CASIA-B Dataset\n",
"def get_all_video_files_for_train ():\n",
" ls_video_files = []\n",
"\n",
" working_dataset_path = os.path.join(\".\", \"50_Words\")\n",
" ls_word = os.listdir(working_dataset_path)\n",
"\n",
" for word in ls_word:\n",
" word_dir = os.path.join(working_dataset_path, word)\n",
" ls_vids_each_word = os.listdir(word_dir)\n",
"\n",
" for vid_each_word in ls_vids_each_word:\n",
" input_dir = os.path.join(word_dir, vid_each_word)\n",
" ls_video_files.append(input_dir)\n",
"\n",
" return ls_video_files"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "7CtJtpJcAyGl",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 55
},
"outputId": "528d1fb1-05e1-4b9e-bb93-c3b1759596bb"
},
"source": [
"ls_video_files = get_all_video_files_for_train()\n",
"print(ls_video_files)"
],
"execution_count": 11,
"outputs": [
{
"output_type": "stream",
"text": [
"['./50_Words/Growth/4.mp4', './50_Words/Growth/1.mp4', './50_Words/Growth/2.mp4', './50_Words/Growth/3.mp4', './50_Words/Picture/4.mp4', './50_Words/Picture/1.mp4', './50_Words/Picture/2.mp4', './50_Words/Picture/3.mp4', './50_Words/Pencil/4.mp4', './50_Words/Pencil/1.mp4', './50_Words/Pencil/2.mp4', './50_Words/Pencil/3.mp4', './50_Words/Clean/4.mp4', './50_Words/Clean/1.mp4', './50_Words/Clean/2.mp4', './50_Words/Clean/3.mp4', './50_Words/Meat/4.mp4', './50_Words/Meat/1.mp4', './50_Words/Meat/2.mp4', './50_Words/Meat/3.mp4', './50_Words/Snake/4.mp4', './50_Words/Snake/1.mp4', './50_Words/Snake/2.mp4', './50_Words/Snake/3.mp4', './50_Words/Short/4.mp4', './50_Words/Short/1.mp4', './50_Words/Short/2.mp4', './50_Words/Short/3.mp4', './50_Words/Me/4.mp4', './50_Words/Me/1.mp4', './50_Words/Me/2.mp4', './50_Words/Me/3.mp4', './50_Words/Food/4.mp4', './50_Words/Food/1.mp4', './50_Words/Food/2.mp4', './50_Words/Food/3.mp4', './50_Words/Wet/4.mp4', './50_Words/Wet/1.mp4', './50_Words/Wet/2.mp4', './50_Words/Wet/3.mp4', './50_Words/Fish/4.mp4', './50_Words/Fish/1.mp4', './50_Words/Fish/2.mp4', './50_Words/Fish/3.mp4', './50_Words/March/4.mp4', './50_Words/March/1.mp4', './50_Words/March/2.mp4', './50_Words/March/3.mp4', './50_Words/Degrade/4.mp4', './50_Words/Degrade/1.mp4', './50_Words/Degrade/2.mp4', './50_Words/Degrade/3.mp4', './50_Words/Table/4.mp4', './50_Words/Table/1.mp4', './50_Words/Table/2.mp4', './50_Words/Table/3.mp4', './50_Words/Telephone/4.mp4', './50_Words/Telephone/1.mp4', './50_Words/Telephone/2.mp4', './50_Words/Telephone/3.mp4', './50_Words/Small/4.mp4', './50_Words/Small/1.mp4', './50_Words/Small/2.mp4', './50_Words/Small/3.mp4', './50_Words/Goodbye/4.mp4', './50_Words/Goodbye/1.mp4', './50_Words/Goodbye/2.mp4', './50_Words/Goodbye/3.mp4', './50_Words/Sit/4.mp4', './50_Words/Sit/1.mp4', './50_Words/Sit/2.mp4', './50_Words/Sit/3.mp4', './50_Words/Mobile/4.mp4', './50_Words/Mobile/1.mp4', './50_Words/Mobile/2.mp4', './50_Words/Mobile/3.mp4', './50_Words/Good/4.mp4', './50_Words/Good/1.mp4', './50_Words/Good/2.mp4', './50_Words/Good/3.mp4', './50_Words/Rose_Color/4.mp4', './50_Words/Rose_Color/1.mp4', './50_Words/Rose_Color/2.mp4', './50_Words/Rose_Color/3.mp4', './50_Words/Caram/4.mp4', './50_Words/Caram/1.mp4', './50_Words/Caram/2.mp4', './50_Words/Caram/3.mp4', './50_Words/Thanks/4.mp4', './50_Words/Thanks/1.mp4', './50_Words/Thanks/2.mp4', './50_Words/Thanks/3.mp4', './50_Words/Book/4.mp4', './50_Words/Book/1.mp4', './50_Words/Book/2.mp4', './50_Words/Book/3.mp4', './50_Words/Together/4.mp4', './50_Words/Together/1.mp4', './50_Words/Together/2.mp4', './50_Words/Together/3.mp4', './50_Words/Come/4.mp4', './50_Words/Come/1.mp4', './50_Words/Come/2.mp4', './50_Words/Come/3.mp4', './50_Words/Bread/4.mp4', './50_Words/Bread/1.mp4', './50_Words/Bread/2.mp4', './50_Words/Bread/3.mp4', './50_Words/Quick/4.mp4', './50_Words/Quick/1.mp4', './50_Words/Quick/2.mp4', './50_Words/Quick/3.mp4', './50_Words/Fate/4.mp4', './50_Words/Fate/1.mp4', './50_Words/Fate/2.mp4', './50_Words/Fate/3.mp4', './50_Words/Exercise_Book/4.mp4', './50_Words/Exercise_Book/1.mp4', './50_Words/Exercise_Book/2.mp4', './50_Words/Exercise_Book/3.mp4', './50_Words/February/4.mp4', './50_Words/February/1.mp4', './50_Words/February/2.mp4', './50_Words/February/3.mp4', './50_Words/Listen/4.mp4', './50_Words/Listen/1.mp4', './50_Words/Listen/2.mp4', './50_Words/Listen/3.mp4', './50_Words/Break/4.mp4', './50_Words/Break/1.mp4', './50_Words/Break/2.mp4', './50_Words/Break/3.mp4', './50_Words/Chair/4.mp4', './50_Words/Chair/1.mp4', './50_Words/Chair/2.mp4', './50_Words/Chair/3.mp4', './50_Words/Procession/4.mp4', './50_Words/Procession/1.mp4', './50_Words/Procession/2.mp4', './50_Words/Procession/3.mp4', './50_Words/Door/4.mp4', './50_Words/Door/1.mp4', './50_Words/Door/2.mp4', './50_Words/Door/3.mp4', './50_Words/Large/4.mp4', './50_Words/Large/1.mp4', './50_Words/Large/2.mp4', './50_Words/Large/3.mp4', './50_Words/Up/4.mp4', './50_Words/Up/1.mp4', './50_Words/Up/2.mp4', './50_Words/Up/3.mp4', './50_Words/Bird/4.mp4', './50_Words/Bird/1.mp4', './50_Words/Bird/2.mp4', './50_Words/Bird/3.mp4', './50_Words/More/4.mp4', './50_Words/More/1.mp4', './50_Words/More/2.mp4', './50_Words/More/3.mp4', './50_Words/Egg/4.mp4', './50_Words/Egg/1.mp4', './50_Words/Egg/2.mp4', './50_Words/Egg/3.mp4', './50_Words/Khoda_Hafez/4.mp4', './50_Words/Khoda_Hafez/1.mp4', './50_Words/Khoda_Hafez/2.mp4', './50_Words/Khoda_Hafez/3.mp4', './50_Words/Black/4.mp4', './50_Words/Black/1.mp4', './50_Words/Black/2.mp4', './50_Words/Black/3.mp4', './50_Words/Bitter/4.mp4', './50_Words/Bitter/1.mp4', './50_Words/Bitter/2.mp4', './50_Words/Bitter/3.mp4', './50_Words/Remember/4.mp4', './50_Words/Remember/1.mp4', './50_Words/Remember/2.mp4', './50_Words/Remember/3.mp4', './50_Words/Exercise/4.mp4', './50_Words/Exercise/1.mp4', './50_Words/Exercise/2.mp4', './50_Words/Exercise/3.mp4', './50_Words/January/4.mp4', './50_Words/January/1.mp4', './50_Words/January/2.mp4', './50_Words/January/3.mp4', './50_Words/Hearing_Impaired/4.mp4', './50_Words/Hearing_Impaired/1.mp4', './50_Words/Hearing_Impaired/2.mp4', './50_Words/Hearing_Impaired/3.mp4', './50_Words/Salam/4.mp4', './50_Words/Salam/1.mp4', './50_Words/Salam/2.mp4', './50_Words/Salam/3.mp4', './50_Words/Tiger/4.mp4', './50_Words/Tiger/1.mp4', './50_Words/Tiger/2.mp4', './50_Words/Tiger/3.mp4']\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "KAGK5_hMBE18",
"colab_type": "code",
"colab": {}
},
"source": [
"import cv2\n",
"\n",
"def gen_video_frames(ls_video_files):\n",
" for video_file in ls_video_files:\n",
" #print(video_file)\n",
"\n",
" v = video_file.split(\"/\")[-2:]\n",
"\n",
" out_word_dir = os.path.join(\"frames\", v[0])\n",
" os.makedirs(out_word_dir, exist_ok = True)\n",
"\n",
" file_name = v[1].split(\".\")[0]\n",
" #print(file_name)\n",
"\n",
" out_vid_dir = os.path.join(out_word_dir, file_name)\n",
" os.makedirs(out_vid_dir, exist_ok = True)\n",
"\n",
" # making all frame sequence per subject\n",
" # capturing video\n",
" vidcap = cv2.VideoCapture(video_file)\n",
" success, image = vidcap.read()\n",
"\n",
" if(vidcap.isOpened() == True):\n",
" count = 0\n",
" success = True\n",
"\n",
" while success:\n",
" success, image = vidcap.read()\n",
" count += 1\n",
" print('reading a new frame: ', success)\n",
"\n",
" if(success == True):\n",
" #save frame as JPEG file\n",
" cv2.imwrite(os.path.join(out_vid_dir, (\"%03d.jpg\") % count), image) "
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "NXmMqHSsBrPI",
"colab_type": "code",
"colab": {}
},
"source": [
"ls_video_files = get_all_video_files_for_train()\n",
"print(len(ls_video_files))\n",
"\n",
"# making video frames for given videos\n",
"gen_video_frames(ls_video_files)"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "6J7KOpBfF-bj",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 35
},
"outputId": "10cb930e-f828-4450-99cb-663fd0b25ed2"
},
"source": [
"!pwd"
],
"execution_count": 22,
"outputs": [
{
"output_type": "stream",
"text": [
"/content/openpose\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "XLJTCDuDGyLV",
"colab_type": "code",
"colab": {}
},
"source": [
"import numpy as np\n",
"# running openpose for dataset\n",
"def run_openpose(word_list):\n",
"\n",
" # considering each subject\n",
" for word_ID in word_list:\n",
"\n",
" word_dir = os.path.join(\"../frames\", word_ID)\n",
" ls_vids_each_word = os.listdir(word_dir)\n",
"\n",
" \n",
" for vid_each_word in ls_vids_each_word:\n",
" vid_dir = os.path.join(word_dir, vid_each_word)\n",
" \n",
" # save_dir for saving pose keypoints data\n",
" save_dir = os.path.join(\"../pose\", word_ID, vid_each_word)\n",
" os.makedirs(save_dir, exist_ok = True)\n",
"\n",
" print(\"\\ncalculationg pose...\")\n",
" os.system(\"./build/examples/openpose/openpose.bin --image_dir \" + \n",
" vid_dir + \" --number_people_max 1 \" + \" --write_json \" + \n",
" save_dir + \" --display 0 --render_pose 0 --hand\")"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "5GgGCBBRHzdL",
"colab_type": "code",
"colab": {}
},
"source": [
"word_list = os.listdir(\"../frames\")\n",
"print(word_list)\n",
"run_openpose(word_list)"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "C-K-Oi3daKZJ",
"colab_type": "code",
"colab": {}
},
"source": [
"!tar -czf t3.tar.gz pose\n"
],
"execution_count": 0,
"outputs": []
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment