Skip to content

Instantly share code, notes, and snippets.

@alex-luxonis
Forked from saching13/OAK-D-Lite_HDR.py
Last active January 14, 2022 17:02
Show Gist options
  • Save alex-luxonis/bac3cc827456a98b079a05c0cbe80666 to your computer and use it in GitHub Desktop.
Save alex-luxonis/bac3cc827456a98b079a05c0cbe80666 to your computer and use it in GitHub Desktop.
Add default 13MP resolution, optionally (if 0:) can be changed to 4K downscaled to 1080p. Save to disk
import depthai as dai
import numpy as np
import cv2
import queue
# Create pipeline
pipeline = dai.Pipeline()
# Define source and output
camRgb = pipeline.create(dai.node.ColorCamera)
xoutVideo = pipeline.create(dai.node.XLinkOut)
camControlIn = pipeline.create(dai.node.XLinkIn)
camControlIn.setStreamName('camControl')
camControlIn.out.link(camRgb.inputControl)
xoutVideo.setStreamName("video")
# Properties
camRgb.setBoardSocket(dai.CameraBoardSocket.RGB)
if 1:
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_13_MP)
camRgb.setVideoSize(4192, 3120) # FIXME FW
else:
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4_K)
camRgb.setIspScale(1, 2)
camRgb.setFps(10)
xoutVideo.input.setBlocking(False)
xoutVideo.input.setQueueSize(10)
# Linking
camRgb.video.link(xoutVideo.input)
# exposure_times = np.array([100, 250, 500], dtype=np.float32)
# exposure_times = np.array([], dtype=np.float32)
expTimeA = 100000
expTimeS = 80000
expTimeD = 40000
sensIsoA = 400
sensIsoS = 400
sensIsoD = 700
expTime = 20000
sensIso = 100
setExposure = False
setCapture = False
q = queue.Queue(3)
exp = queue.Queue(3)
with dai.Device(pipeline) as device:
video = device.getOutputQueue(name="video", maxSize=1, blocking=False)
controlQueue = device.getInputQueue('camControl')
while True:
videoIn = video.get()
cv2.imshow("video", videoIn.getCvFrame())
if setExposure:
print('Setting expsure -> {} and ISO -> {}'.format(expTime, sensIso))
ctrl = dai.CameraControl()
ctrl.setManualExposure(expTime, sensIso)
controlQueue.send(ctrl)
setExposure = False
if setCapture:
q.put(videoIn.getCvFrame())
exp.put(expTime)
setCapture = False
if q.full():
img_list = list(q.queue)
exp_times = np.array(list(exp.queue), dtype=np.float32)
print(exp_times)
with exp.mutex:
exp.queue.clear()
with q.mutex:
q.queue.clear()
merge_debevec = cv2.createMergeDebevec()
merge_robertson = cv2.createMergeRobertson()
hdr_debevec = merge_debevec.process(img_list, times=exp_times.copy())
hdr_robertson = merge_robertson.process(img_list, times=exp_times.copy())
# Tonemap HDR image
tonemap1 = cv2.createTonemap(gamma=2.2)
res_debevec = tonemap1.process(hdr_debevec.copy())
# Exposure fusion using Mertens
merge_mertens = cv2.createMergeMertens()
res_mertens = merge_mertens.process(img_list)
# Convert datatype to 8-bit and save
res_debevec_8bit = np.clip(res_debevec*255, 0, 255).astype('uint8')
res_mertens_8bit = np.clip(res_mertens*255, 0, 255).astype('uint8')
cv2.imshow('res_debevec', res_debevec)
cv2.imshow('res_mertens_8bit', res_mertens_8bit)
#cv2.imwrite("res_debevec.png", res_debevec) # why not displayed properly?
cv2.imwrite("res_debevec_8bit.png", res_debevec_8bit)
cv2.imwrite("res_mertens_8bit.png", res_mertens_8bit)
# Get BGR frame from NV12 encoded video frame to show with opencv
# Visualizing the frame on slower hosts might have overhead
key = cv2.waitKey(1)
if key == ord('q'):
break
elif key == ord('a'):
print('Getting Key a')
expTime = expTimeA
sensIso = sensIsoA
setExposure = True
elif key == ord('s'):
print('Getting Key s')
expTime = expTimeS
sensIso = sensIsoS
setExposure = True
elif key == ord('d'):
print('Getting Key d')
expTime = expTimeD
sensIso = sensIsoD
setExposure = True
elif key == ord('c'):
print('Capturing')
setCapture = True
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment