-
-
Save Erol444/9767b2b63d5829cf9469496ea291c541 to your computer and use it in GitHub Desktop.
FSYNC Y-adapter demo
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
import cv2 | |
import math | |
import depthai as dai | |
import contextlib | |
import argparse | |
from datetime import timedelta | |
parser = argparse.ArgumentParser(epilog='Press C to capture a set of frames.') | |
parser.add_argument('-f', '--fps', type=float, default=30, | |
help='Camera sensor FPS, applied to all cams') | |
args = parser.parse_args() | |
cam_socket_opts = { | |
'rgb' : dai.CameraBoardSocket.RGB, | |
'left' : dai.CameraBoardSocket.LEFT, | |
'right': dai.CameraBoardSocket.RIGHT, | |
} | |
cam_instance = { | |
'rgb' : 0, | |
'left' : 1, | |
'right': 2, | |
} | |
def create_pipeline(): | |
# Start defining a pipeline | |
pipeline = dai.Pipeline() | |
def create(c): | |
xout = pipeline.create(dai.node.XLinkOut) | |
xout.setStreamName(c) | |
if c =='rgb': | |
cam = pipeline.create(dai.node.ColorCamera) | |
cam.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P) | |
cam.setIspScale(1, 2) # 400P | |
cam.preview.link(xout.input) | |
else: | |
cam = pipeline.create(dai.node.MonoCamera) | |
cam.setResolution(dai.MonoCameraProperties.SensorResolution.THE_800_P) | |
cam.out.link(xout.input) | |
cam.initialControl.setManualExposure(1000, 800) | |
cam.initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.INPUT) | |
cam.setBoardSocket(cam_socket_opts[c]) | |
cam.setFps(args.fps) | |
create('left') | |
create('rgb') | |
create('right') | |
return pipeline | |
ips = [ | |
'1944301091BD751300', | |
'18443010A1F4C31200', | |
# '14442C1031B0C8D600', | |
] | |
# https://docs.python.org/3/library/contextlib.html#contextlib.ExitStack | |
with contextlib.ExitStack() as stack: | |
device_infos = dai.Device.getAllAvailableDevices() | |
if len(device_infos) == 0: raise RuntimeError("No devices found!") | |
else: print("Found", len(device_infos), "devices") | |
queues = [] | |
for ip in ips: | |
device_info = dai.DeviceInfo(ip) | |
# Note: the pipeline isn't set here, as we don't know yet what device it is. | |
# The extra arguments passed are required by the existing overload variants | |
openvino_version = dai.OpenVINO.Version.VERSION_2021_4 | |
usb2_mode = False | |
device = stack.enter_context(dai.Device(openvino_version, device_info, usb2_mode)) | |
cam_list = {'left', 'right', 'rgb'} | |
# cam_list = {'left', 'right'} | |
print('Starting pipeline for', device.getMxId()) | |
# Get a customized pipeline based on identified device type | |
device.startPipeline(create_pipeline()) | |
# Output queue will be used to get the rgb frames from the output defined above | |
for cam in cam_list: | |
queues.append({ | |
'queue': device.getOutputQueue(name=cam, maxSize=4, blocking=False), | |
'msgs': [], # Frame msgs | |
'mx': device.getMxId(), | |
'cam': cam | |
}) | |
def check_sync(queues, timestamp): | |
matching_frames = [] | |
for q in queues: | |
for i, msg in enumerate(q['msgs']): | |
time_diff = abs(msg.getTimestamp() - timestamp) | |
# So below 17ms @ 30 FPS => frames are in sync | |
if time_diff <= timedelta(milliseconds=math.ceil(500 / args.fps)): | |
matching_frames.append(i) | |
break | |
if len(matching_frames) == len(queues): | |
# We have all frames synced. Remove the excess ones | |
for i, q in enumerate(queues): | |
q['msgs'] = q['msgs'][matching_frames[i]:] | |
return True | |
else: | |
return False | |
while True: | |
for q in queues: | |
new_msg = q['queue'].tryGet() | |
if new_msg is not None: | |
print(f"[{q['mx']}] Got {q['cam']} frame, seq: {new_msg.getSequenceNum()} TS: {new_msg.getTimestamp()}") | |
q['msgs'].append(new_msg) | |
if check_sync(queues, new_msg.getTimestamp()): | |
for q in queues: | |
imgFrame: dai.ImgFrame = q['msgs'].pop(0) | |
frame = imgFrame.getCvFrame() | |
# Write text | |
frame = cv2.putText(frame, f"TS {imgFrame.getTimestamp(dai.CameraExposureOffset.MIDDLE)}", (10, 30), cv2.FONT_HERSHEY_TRIPLEX, 1.0, color=(127, 255, 0)) | |
frame = cv2.putText(frame, f"Seq {imgFrame.getSequenceNum()}", (10, 80), cv2.FONT_HERSHEY_TRIPLEX, 1.0, color=(127, 255, 0)) | |
cv2.imshow(f"{q['cam']} - {q['mx']}", frame) | |
if cv2.waitKey(1) == ord('q'): | |
break |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment