Skip to content

Instantly share code, notes, and snippets.

@onkoe
Created September 21, 2023 20:01
Show Gist options
  • Save onkoe/68188407624b42d62995a6d1d7b15190 to your computer and use it in GitHub Desktop.
Save onkoe/68188407624b42d62995a6d1d7b15190 to your computer and use it in GitHub Desktop.
a reimplementation of ARTracker with pep8 standards and without the YOLO crud
import cv2
import cv2.aruco as aruco
from configparser import SafeConfigParser
from loguru import logger
from cv2.aruco import simple_detection
class ArucoTracker:
"""
## ArucoTracker
Finds ARUCO tags and returns the distance to them.
"""
def __init__(
self, cameras: [int], save_video: bool = False, config_file: str = "config.ini"
):
"""
# Constructor
Creates a new ArucoTracker.
Ends up initializing several libraries, including `OpenCV`.
Args:
- cameras (int): The amount of ARUCO-detecting cameras (webcams) plugged in.
- save_video (bool, optional): Whether or not to save video to disk.
Defaults to False.
- config_file (str, optional): Path to config. Defaults to "config.ini".
"""
# Set some values
self.save_video: bool = save_video
self.distance_to_marker: float = -1.0
self.angle_to_marker: float = -999.99
self.cameras: [int] = cameras
self.main_aruco_tag: int = -1 # for most aruco tags
self.alt_aruco_tag: int = -1 # for the second tag on a gate
# Set additional values from config
config = SafeConfigParser()
config.read(config_file)
self.degrees_per_pixel: float = config.get("DEGREES_PER_PIXEL") # horizontal
self.vdegrees_per_pixel: float = config.get("VDEGREES_PER_PIXEL") # vertical
self.focal_len: float = config.get("FOCAL_LEN") # focal length
self.focal_len_30h: float = config.get("FOCAL_LENGTH30H") # focal: 30°v, 0°h
self.focal_len_30v: float = config.get("FOCAL_LENGTH30V") # focal: 0°v, 30°h
self.known_tag_width: float = config.get("KNOWN_TAG_WIDTH")
self.format: str = config.get("FORMAT")
self.frame_width: int = config.get("FRAME_WIDTH")
self.frame_height: int = config.get("FRAME_HEIGHT")
self.marker_dict: cv2.aruco.Dictionary = aruco.Dictionary_get(aruco.DICT_4X4_50)
# Save video to disk (if requested)
if self.write:
form: str = self.format
# TODO(bray): impl consistent naming scheme for video files
# bc like. who wants a random-ass avi file that gets overwritten??? 🥶
self.video_writer = cv2.VideoWriter(
filename="autonomous.avi",
fourcc=cv2.VideoWriter_fourcc(form[0], form[1], form[2], form[3]),
fps=5,
frameSize=(self.frame_width, self.frame_height),
isColor=False,
)
# Initialize cameras
self.caps: [int] = []
for camera in cameras:
while True:
cam = cv2.VideoCapture(camera)
# Make sure webcam isn't busy
if not cam.isOpened:
logger.error(f"Failed to open camera {camera}.")
cam.release()
continue
# Set some camera device settings in OpenCV
form: str = self.format
cam.set(cv2.CAP_PROP_FRAME_HEIGHT, self.frameHeight)
cam.set(cv2.CAP_PROP_FRAME_WIDTH, self.frameWidth)
cam.set(cv2.CAP_PROP_BUFFERSIZE, 1) # Request 1-frame buffer for speed
cam.set(
cv2.CAP_PROP_FOURCC,
cv2.VideoWriter_fourcc(form[0], form[1], form[2], form[3]),
) # set correct 'fourcc' codec
readable, _frame = cam.read()
if not readable:
logger.error(f"Failed to read from camera {camera}. Releasing...")
cam.release()
else:
self.caps.append(cam)
logger.debug(
f"Camera {camera} is working. Adding to capture devices..."
)
break
# FIXME(BRAY): this isn't done at all. fold into a step function and
# optimize our lighting algo from there. might be worth
# creating a lib for this?
def handle_marker(
self, main_aruco_tag: int, image: cv2.Mat, alt_aruco_tag: int = -1, cameras=-1
) -> bool:
"""Analyzes given markers and returns whether or not a tag was found.
Args:
main_aruco_tag (int): The 'main' ARUCO tag ID to look for
image (cv2.Mat): A captured image by a cv2 VideoCapture.
alt_aruco_tag (int, optional): A secondary ARUCO tag ID used for gates
cameras (int, optional): The camera ID to utilize.
"""
# Convert image to grayscale
cv2.cvtColor(image, cv2.COLOR_RGB2GRAY, image)
if self.marker_ids is None:
print()
def find_grayscale_sweetspot(self, image: cv2.Mat):
"""_summary_
Args:
image (cv2.Mat): _description_
"""
# check visibility at brightness 40, 100, 160, and 220
for light in range(start=40, stop=221, step=60):
self.main_aruco_tag = -1
self.alt_aruco_tag = -1
bw = cv2.threshold(image, light, 255, cv2.THRESH_BINARY)[1]
(self.corners, self.marker_ids, self.rejected) = aruco.detectMarkers(bw)
if self.marker_ids is not None: # check if brightness is good enough
logger.debug(f"Checking for markers at brightness level {light}...")
self.main_aruco_tag = 1
if alt_aruco_tag == -1: # not a gate
# See if we have the correct marker
for marker in range(len(self.marker_ids)):
if self.marker_ids[marker] == main_aruco_tag:
logger.debug(f"Found main marker: {main_aruco_tag}")
self.main_aruco_tag = marker
break
@Brasilius
Copy link

eyes bleed less

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment