Skip to content

Instantly share code, notes, and snippets.

@WydD
Last active September 9, 2018 00:01
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save WydD/34f609f817906a3ddaed64c775badb65 to your computer and use it in GitHub Desktop.
Save WydD/34f609f817906a3ddaed64c775badb65 to your computer and use it in GitHub Desktop.
InputLag video analysis using opencv
import numpy as np
import cv2
video = 'D:\\InputLag\\RECentral\\SFV 3.060.mp4'
cap = cv2.VideoCapture(video)
def average_by_line(img):
return img.sum(axis=1) / 1280
DEBUG = 0
i = 0
ret, frame = cap.read()
MIN_Y = 0
MAX_Y = 1280//2
MIN_X = 200
MAX_X = 600
hsv = np.zeros_like(frame[MIN_X:MAX_X, MIN_Y:MAX_Y])
hsv[...,1] = 255
hsv[...,0] = 255
while cap.isOpened():
ret, green_frame = cap.read()
if green_frame is None:
break
i += 1
if i % 60 == 0 and DEBUG:
print(i/60)
# Useful when you've got a buggy detection to check the debug
# if i < 2586:
# continue
# else:
# DEBUG=1
#
# DEBUG frame by frame
# cv2.imshow('frame', green_frame[MIN_X:MAX_X, MIN_Y:MAX_Y])
# cv2.waitKey()
# convert to hsl to ease the detection of green/magenta lines
hsl_frame = cv2.cvtColor(green_frame, cv2.COLOR_BGR2HLS)
hue = average_by_line(hsl_frame[..., 0])
sat = average_by_line(hsl_frame[..., 2])
lum = average_by_line(hsl_frame[..., 1])
# green line selector
selector = (50 <= hue) & (hue <= 65) & (sat > 200) & (lum < 180) & (lum > 50)
# if we have magenta line, just ignore
if np.any(selector) and not np.any((140 <= hue) & (hue <= 160) & (sat > 200) & (lum < 180) & (lum > 50)):
# we have an input, get the first non-zero to have the timing
green_lines = np.nonzero(selector)
if DEBUG:
print("[%.02f] Found one!" % (i / 60), 16.6666 * green_lines[0][0] / 720)
cv2.imshow('frame', green_frame)
# opticflow is computed on grayscale
prvs = cv2.cvtColor(green_frame, cv2.COLOR_BGR2GRAY)
# pure black & white :D
#prvs = (prvs // 128) * 255
lag = 0
if DEBUG:
cur_key = cv2.waitKey()
previous_metric = None
while lag < 15:
ret, frame = cap.read()
i += 1
lag += 1
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# pure black & white :D
# gray = (gray // 128) * 255
# we target only the center left part of the game
# and we compute the optical flow
flow = cv2.calcOpticalFlowFarneback(prvs[MIN_X:MAX_X, MIN_Y:MAX_Y], gray[MIN_X:MAX_X, MIN_Y:MAX_Y], None, 0.5, 3, 15, 3, 5, 1.1, 0)
# sum the magnitude values, if it's high: the character moved
change_metric = np.abs(flow[..., 0]).sum()
if DEBUG:
print("\t",change_metric, end=" / ")
if previous_metric is not None:
print(change_metric/previous_metric)
else:
print(None)
mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
# display the frame then the opticflow representation
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
print("\t\t", (hsv[..., 2] > MIN_X).sum())
rgb = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
cv2.imshow('frame', frame)
cv2.waitKey()
cv2.imshow('frame', rgb)
cv2.waitKey()
# if lag == 1 and change_metric > 200000:
# too much movement to do anything (tekken 7)
# lag = None
# break
# adjust condition with the game
if previous_metric is not None and change_metric > 150000:# and change_metric > (previous_metric*2):
if DEBUG:
print("\t Found lag: ", lag)
break
prvs = gray
previous_metric = change_metric
if lag is None:
continue
if lag == 15:
if DEBUG:
print("\t ERROR COULDNT FIND LAG!")
lag = None
cv2.imshow('frame', green_frame)
cv2.waitKey()
continue
print("%i,%f,%i" % (i, 16.6666 * green_lines[0][0] / 720, lag))
cap.release()
cv2.destroyAllWindows()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment