Last active
July 20, 2016 11:49
-
-
Save cosacog/1e74b106b718394064ebdee30580bfec to your computer and use it in GitHub Desktop.
psychopy: detect finger extension using openCV and send trigger
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
from psychopy import visual, core, event, parallel | |
import numpy as np | |
import cv2 | |
# define | |
K_ESC = 27 | |
K_ARROW_UP = 2490368 # arrow up | |
K_ARROW_DOWN = 2621440 # arrow down | |
K_RIGHT = 114 # "r" | |
K_LEFT = 108 # "l" | |
K_UP = 117 # "u" | |
K_DOWN = 100 # "d" | |
# settings | |
p_port = parallel.ParallelPort(address='0x0378')# デバイスマネージャーでパラレルポートのポート番号に合わせて設定 | |
trialClock = core.Clock() | |
trig = int('00000001',2) # 2進数を10進数に.トリガーchに合わせて設定 | |
dur_trig = 0.01 # sec trigger duration | |
t_wait = 3 # sec 1コ triggerが出て不応期の時間 | |
pix_thr = 3 # pixel. トリガーを出す運動の大きさの閾値 | |
pix_mov = 5 # pixel. 上下左右に動かすピクセル数 | |
# Setup SimpleBlobDetector parameters. | |
params = cv2.SimpleBlobDetector_Params() | |
# Change thresholds: brightness? | |
params.minThreshold = 0; # white? | |
params.maxThreshold = 55; | |
# Filter by Area. | |
params.filterByArea = True | |
params.minArea = 10 | |
params.maxArea = 50 | |
# Filter by Circularity | |
params.filterByCircularity = True | |
params.minCircularity = 0.1 | |
# Filter by Convexity | |
params.filterByConvexity = True | |
params.minConvexity = 0.87 | |
# Filter by Inertia | |
params.filterByInertia = True | |
params.minInertiaRatio = 0.01 | |
# Create a detector with the parameters | |
ver = (cv2.__version__).split('.') | |
if int(ver[0]) < 3 : | |
detector = cv2.SimpleBlobDetector(params) | |
else : | |
detector = cv2.SimpleBlobDetector_create(params) | |
cap = cv2.VideoCapture(0) | |
print(cap) | |
# | |
key_point_y = np.zeros(10) | |
p_port.setData(0) | |
is_p_up = False | |
trig_set = 0 | |
trialClock.reset() | |
frm_size_center = [320,320] | |
frm_size_orig = (640,480) # original frame size | |
shiftX = 0 | |
shiftY = 0 | |
cv2.namedWindow('keypoints: press R/L, U/D or Arrow Up/Down',cv2.WINDOW_NORMAL) | |
level_rectangle = 0 | |
col_kpoints = (0,0,255) | |
while(True): | |
ret, frame = cap.read() | |
if ret: | |
# index of cropped (zoomed) area (X, Y) | |
idxX = np.round((frm_size_orig[0] - frm_size_center[0])/2) + shiftX | |
idxY = np.round((frm_size_orig[1] - frm_size_center[1])/2) + shiftY | |
# gray scale | |
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) | |
gray_frame_cntr = gray_frame[idxY:(idxY+frm_size_center[1]),idxX:(idxX+frm_size_center[0])] | |
# Detect blobs. | |
keypoints = detector.detect(255-gray_frame_cntr) | |
if len(keypoints) > 0: | |
key_point_y[1:] = key_point_y[:-1] | |
key_point_y[0] = keypoints[0].pt[1] | |
thr = np.mean(key_point_y) - pix_thr # pixel | |
is_thr_up = key_point_y[0] < thr | |
if (is_thr_up) & (not is_p_up): | |
p_port.setData(trig) | |
is_p_up = True | |
trig_set = trig | |
trialClock.reset() | |
print(is_thr_up) # y座標 | |
level_rectangle = 255 | |
col_kpoints = (0,255,0) | |
im_with_keypoints = cv2.drawKeypoints(gray_frame_cntr, keypoints, np.array([]), col_kpoints, cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) | |
cv2.imshow('keypoints: press R/L, U/D or Arrow Up/Down', im_with_keypoints) | |
else: | |
cv2.imshow('keypoints: press R/L, U/D or Arrow Up/Down', gray_frame_cntr) | |
cv2.rectangle(gray_frame, (idxX,idxY), ((idxX+frm_size_center[0]), (idxY+frm_size_center[1])), level_rectangle,3) | |
cv2.imshow("original frame", gray_frame) | |
print(trig_set) | |
## trigger | |
cv2.waitKey(1) | |
t = trialClock.getTime() | |
# durationを適当に決めてトリガーを下げる | |
if (t > dur_trig): | |
p_port.setData(0) | |
trig_set = 0 | |
# 不応期を超えたらready | |
if (t > t_wait): | |
is_p_up = False | |
level_rectangle = 0 | |
col_kpoints = (0,0,255) | |
# key press to abort (esc) | |
key_input = cv2.waitKey(2) | |
if key_input == K_ESC: | |
break | |
elif key_input == K_ARROW_UP: | |
# larger | |
frm_size_center[0] += 10 if frm_size_center[1] <= (480-10) else 0 | |
frm_size_center[1] += 10 if (frm_size_center[1] <= (480-10)) else 0 | |
elif key_input == K_ARROW_DOWN: | |
# smaller | |
frm_size_center[0] -= 10 if (frm_size_center[1] >= 50) else 0 | |
frm_size_center[1] -= 10 if (frm_size_center[1] >= 50) else 0 | |
elif key_input == K_RIGHT: | |
# to right | |
shiftX += pix_mov if (idxX + frm_size_center[0] < (640-10)) else 0 | |
elif key_input == K_LEFT: | |
# to left | |
shiftX -= pix_mov if (idxX > 5) else 0 | |
elif key_input == K_UP: | |
# to up | |
shiftY -= pix_mov if (idxY > 5) else 0 | |
elif key_input == K_DOWN: | |
# to down | |
shiftY += pix_mov if (idxY + frm_size_center[1] < (480-10)) else 0 | |
#全ての処理が終了したあとはストリームを解放 | |
cap.release() | |
cv2.destroyAllWindows() | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
psychopy+opencv2でマーカーの上への動きを検出してトリガーを出します.
要psychopy (windows), opencv2 (160714時点ではopencv2は外部からインポートする必要があると思います)
想定している使い方
このスクリプトでやってること
当方での利用機材、環境
動作確認時に気づいたこと