Skip to content

Instantly share code, notes, and snippets.

@alex-luxonis
Forked from lucasmediaflow/flash.py
Last active August 2, 2021 00:34
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save alex-luxonis/5bba3781a958290fcb73c7171a4395ed to your computer and use it in GitHub Desktop.
Save alex-luxonis/5bba3781a958290fcb73c7171a4395ed to your computer and use it in GitHub Desktop.
Trying to synchronize camera exposure with external flash
''' Current output:
6: fps: 1.420, latency: 0.062988 s, next exposure will start in 0.541045 s
7: fps: 1.420, latency: 0.062930 s, next exposure will start in 0.541103 s
8: fps: 1.420, latency: 0.063203 s, next exposure will start in 0.54083 s
Sent capture-still command. Last preview frame sequence number was: 8
Waiting before trigger (+1 extra frame): 1.2446962702088058
trigger flash
9: fps: 1.420, latency: 0.720992 s, next exposure will start in -0.116959 s
10: fps: 1.420, latency: 0.089930 s, next exposure will start in 0.514103 s
11: fps: 1.420, latency: 0.063053 s, next exposure will start in 0.540981 s
Got JPEG still capture, sequence number: 10
12: fps: 1.420, latency: 0.062415 s, next exposure will start in 0.541618 s
13: fps: 1.420, latency: 0.063283 s, next exposure will start in 0.54075 s
'''
onRPi = 1 # Note: was tested with this disabled
if onRPi: from gpiozero import LED
import depthai as dai
import cv2
from time import sleep
import time
if onRPi: flashpin = LED(23)
pipeline = dai.Pipeline()
if onRPi: pipeline.setCameraTuningBlobPath('/home/pi/oak/tuning_color_low_light.bin')
camFps = 1 # Note: this low FPS can't actually be achieved, it will be capped to about 1.42
camera = pipeline.createColorCamera()
camera.setResolution(dai.ColorCameraProperties.SensorResolution.THE_12_MP)
camera.setPreviewSize(400, 300) # Can also reduce to say (40, 30)
camera.setInterleaved(True) # Applies to `preview`. Just so OpenCV can display it directly without converting
camera.setFps(camFps)
camera.setPreviewKeepAspectRatio(False)
camera.initialControl.setManualFocus(135)
camera.initialControl.setAutoFocusMode(dai.RawCameraControl.AutoFocusMode.OFF)
jpgEncoder = pipeline.createVideoEncoder()
jpgEncoder.setDefaultProfilePreset(camera.getStillSize(), 1, dai.VideoEncoderProperties.Profile.MJPEG)
jpgEncoderXLinkOut = pipeline.createXLinkOut()
jpgEncoderXLinkOut.setStreamName('jpg')
camControl = pipeline.createXLinkIn()
camControl.setStreamName('camControl')
camPreview = pipeline.createXLinkOut()
camPreview.setStreamName("preview")
camera.still.link(jpgEncoder.input)
camera.preview.link(camPreview.input)
jpgEncoder.bitstream.link(jpgEncoderXLinkOut.input)
camControl.out.link(camera.inputControl)
def triggerflash():
print('trigger flash')
if onRPi: flashpin.on()
sleep(0.1)
if onRPi: flashpin.off()
with dai.Device(pipeline) as dev:
exposureTimeUs = 100000 # TODO can be lowered
camControlQ = dev.getInputQueue('camControl', maxSize = 1, blocking = False)
jpgQ = dev.getOutputQueue('jpg', maxSize = 1, blocking = False)
prvQ = dev.getOutputQueue('preview', maxSize = 1, blocking = False)
ctrl = dai.CameraControl()
ctrl.setManualExposure(exposureTimeUs, 100)
ctrl.setAutoWhiteBalanceMode( dai.RawCameraControl.AutoWhiteBalanceMode.INCANDESCENT)
camControlQ.send(ctrl)
frame = None
previousCaptureTime = -1
previousSequenceNum = -1
doCapture = False
while True:
for jpgFrame in jpgQ.tryGetAll():
print("Got JPEG still capture, sequence number:", jpgFrame.getSequenceNum())
frame = cv2.imdecode(jpgFrame.getData(), cv2.IMREAD_UNCHANGED)
cv2.imshow('aasdf', frame)
logo = cv2.imread('/home/pi/logo.png')
cv2.imshow('logo', logo)
preview = prvQ.get()
tnow = time.monotonic()
tcap = preview.getTimestamp().total_seconds()
seq = preview.getSequenceNum()
if previousCaptureTime >= 0:
seqDiff = seq - previousSequenceNum # Normally should be 1
if seqDiff != 1: print("Lost", seqDiff - 1, "preview frames")
frameTime = (tcap - previousCaptureTime) / seqDiff
fps = 1 / frameTime
latency = tnow - tcap
# Note: getFrameTimestamp() returns a host-synced time captured at MIPI readout start event,
# which should happen right after the exposure ended
nextExposureStartTime = frameTime + tcap - exposureTimeUs/1e6
nextExposureIn = nextExposureStartTime - tnow # Consider getting `time.monotonic()` again
print("{}: fps: {:.3f}, latency: {:.6f} s, next exposure will start in {:.6} s".format(
seq, fps, latency, nextExposureIn))
if doCapture:
doCapture = False
ctrl.setCaptureStill(True)
camControlQ.send(ctrl)
print("Sent capture-still command. Last preview frame sequence number was:", seq)
captureCommandDelay = 1 # Ideally this should be zero, but apparently from tests
# there's one extra frame delay. TODO: improve in FW
timeToWaitUntilFlashing = nextExposureStartTime - time.monotonic() + captureCommandDelay * frameTime
print("Waiting before trigger (+1 extra frame):", timeToWaitUntilFlashing)
# Note: consider making this wait asynchronous, it will delay next preview frame
sleep(timeToWaitUntilFlashing)
triggerflash()
previousCaptureTime = tcap
previousSequenceNum = seq
if 1: # Optional preview display
cv2.imshow('preview', preview.getFrame()) # Note: use getCvFrame() if not in BGR interleaved format already
key = cv2.waitKey(1)
if key == ord('q'):
print('quitting')
break
elif key == ord('c'):
doCapture = True
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment