Skip to content

Instantly share code, notes, and snippets.

@chuanenlin
Created April 12, 2019 02:44
Show Gist options
  • Save chuanenlin/7e2f05a04f7944143812b16683435b9f to your computer and use it in GitHub Desktop.
Save chuanenlin/7e2f05a04f7944143812b16683435b9f to your computer and use it in GitHub Desktop.
import cv2 as cv
import numpy as np
# The video feed is read in as a VideoCapture object
# cap = cv.VideoCapture("shibuya.mp4")
# ret = a boolean return value from getting the frame, first_frame = the first frame in the entire video sequence
# ret, first_frame = cap.read()
# Converts frame to grayscale because we only need the luminance channel for detecting edges - less computationally expensive
# prev_gray = cv.cvtColor(first_frame, cv.COLOR_BGR2GRAY)
# while(cap.isOpened()):
# ret = a boolean return value from getting the frame, frame = the current frame being projected in the video
# ret, frame = cap.read()
# Opens a new window and displays the input frame
# cv.imshow("input", frame)
# Converts each frame to grayscale - we previously only converted the first frame to grayscale
# gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
# Calculates dense optical flow by Farneback method
# https://docs.opencv.org/3.0-beta/modules/video/doc/motion_analysis_and_object_tracking.html#calcopticalflowfarneback
flow = cv.calcOpticalFlowFarneback(prev_gray, gray, None, 0.5, 3, 15, 3, 5, 1.2, 0)
# Updates previous frame
# prev_gray = gray
# Frames are read by intervals of 1 millisecond. The programs breaks out of the while loop when the user presses the 'q' key
# if cv.waitKey(1) & 0xFF == ord('q'):
# break
# The following frees up resources and closes all windows
# cap.release()
# cv.destroyAllWindows()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment