Skip to content

Instantly share code, notes, and snippets.

@Tony363
Created September 20, 2020 11:02
Show Gist options
  • Save Tony363/2fc1cbb692a05fe5e2001bd1fa322d1c to your computer and use it in GitHub Desktop.
Save Tony363/2fc1cbb692a05fe5e2001bd1fa322d1c to your computer and use it in GitHub Desktop.
def Manual(
left_image,
right_image,
cached=None,
work_megapix=0.69,
seam_megapix=0.01,
ba_refine_mask='xxxxx',
finder = cv2.ORB.create(),
blender=cv2.detail.Blender_createDefault(cv2.detail.Blender_NO),
compensator=cv2.detail.ExposureCompensator_createDefault(cv2.detail.ExposureCompensator_GAIN_BLOCKS),
):
img_names = np.asarray([left_image,right_image])
"""
https://docs.opencv.org/master/db/d27/tutorial_py_table_of_contents_feature2d.html
finder = cv2.xfeatures2d_SURF.create(),
finder = cv2.ORB.create(),
finder = cv2.xfeatures2d_SIFT.create(),
finder = cv2.BRISK_create(),
finder = cv2.AKAZE_create(),
finder = cv2.FastFeatureDetector_create(),
"""
if cached is not None:
dst_sz,warper, cameras,corners,masks_warped = cached
blender.prepare(dst_sz)
for idx, name in enumerate(img_names):
corner, image_warped = warper.warp(name, cameras[idx].K().astype(np.float32), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)
p, mask_warped = warper.warp(255 * np.ones((name.shape[0], name.shape[1]), np.uint8), cameras[idx].K().astype(np.float32), cameras[idx].R, cv2.INTER_NEAREST, cv2.BORDER_CONSTANT)
compensator.apply(idx, corners[idx], image_warped, mask_warped)
mask_warped = cv2.bitwise_and(cv2.resize(cv2.dilate(masks_warped[idx], None), (mask_warped.shape[1], mask_warped.shape[0]), 0, 0, cv2.INTER_LINEAR_EXACT), mask_warped)
blender.feed(cv2.UMat(image_warped.astype(np.int16)), mask_warped, corners[idx])
result, result_mask = blender.blend(None, None)
dst = cv2.normalize(src=result, dst=None, alpha=255., norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U)
return False,dst
work_scale = min(1.0, np.sqrt(work_megapix * 1e6 / (left_image.shape[0] * left_image.shape[1]))) # because both image dimensions should be the same
seam_scale = min(1.0, np.sqrt(seam_megapix * 1e6 / (left_image.shape[0] * left_image.shape[1])))
seam_work_aspect = seam_scale / work_scale
full_img_sizes = np.asarray([(name.shape[1],name.shape[0]) for name in img_names])
features = np.asarray([cv2.detail.computeImageFeatures2(finder,cv2.resize(src=name, dsize=None, fx=work_scale, fy=work_scale, interpolation=cv2.INTER_LINEAR_EXACT)) for name in img_names])
images = np.asarray([cv2.resize(src=name, dsize=None, fx=seam_scale, fy=seam_scale, interpolation=cv2.INTER_LINEAR_EXACT) for name in img_names])
matcher = cv2.detail.BestOf2NearestMatcher_create(False, 0.65)
p = matcher.apply2(features)
matcher.collectGarbage()
indices = cv2.detail.leaveBiggestComponent(features, p, 0.3)
estimator = cv2.detail_HomographyBasedEstimator()
b, cameras = estimator.apply(features, p, None)
# cameras = np.asarray([cam.R.astype(np.float32) for cam in cameras])
for cam in cameras:cam.R = cam.R.astype(np.float32) # need to figure out how to turn back from np to cv::detail::CameraParams object
# can explore more different adjuster matrix params
adjuster = cv2.detail_BundleAdjusterRay()
adjuster.setConfThresh(1)
refine_mask = np.zeros((3, 3), np.uint8)
if ba_refine_mask[0] == 'x':
refine_mask[0, 0] = 1
if ba_refine_mask[1] == 'x':
refine_mask[0, 1] = 1
if ba_refine_mask[2] == 'x':
refine_mask[0, 2] = 1
if ba_refine_mask[3] == 'x':
refine_mask[1, 1] = 1
if ba_refine_mask[4] == 'x':
refine_mask[1, 2] = 1
adjuster.setRefinementMask(refine_mask)
b, cameras = adjuster.apply(features, p, cameras)
focals = np.asarray([cam.focal for cam in cameras]) # might need np.sort()
warped_image_scale = focals[len(focals) // 2] if len(focals) % 2 == 1 else (focals[len(focals) // 2] + focals[len(focals) // 2 - 1]) / 2
rmats = np.asarray([np.copy(cam.R)for cam in cameras])
rmats = cv2.detail.waveCorrect(rmats, cv2.detail.WAVE_CORRECT_HORIZ)
for idx, cam in enumerate(cameras):cam.R = rmats[idx] # need to figure out how to vectorize cameras object
warper = cv2.PyRotationWarper('spherical', warped_image_scale * seam_work_aspect) # warper could be nullptr?
masks = np.asarray([cv2.UMat(255 * np.ones((images[i].shape[0], images[i].shape[1]), np.uint8)) for i in range(img_names.shape[0])])
corners = np.asarray([warper.warp(images[idx], Kseam_work_aspect(cameras[idx].K().astype(np.float32),seam_work_aspect), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)[0] for idx in range(img_names.shape[0])])
masks_warped = np.asarray([warper.warp(masks[idx], Kseam_work_aspect(cameras[idx].K().astype(np.float32),seam_work_aspect), cameras[idx].R, cv2.INTER_NEAREST, cv2.BORDER_CONSTANT)[1].get() for idx in range(img_names.shape[0])])
images_warped = np.asarray([warper.warp(images[idx], Kseam_work_aspect(cameras[idx].K().astype(np.float32),seam_work_aspect), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)[1]for idx in range(img_names.shape[0])])
sizes = np.asarray([warper.warp(images[idx], Kseam_work_aspect(cameras[idx].K().astype(np.float32),seam_work_aspect), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)[1].shape[1::-1] for idx in range(img_names.shape[0])])
images_warped_f = np.asarray([img.astype(np.float32) for img in images_warped])
compensator.feed(corners=corners.tolist(), images=images_warped, masks=masks_warped) # .tolist()
seam_finder = cv2.detail_GraphCutSeamFinder('COST_COLOR')
seam_finder.find(images_warped_f, corners.tolist(), masks_warped)# .tolist()
warped_image_scale *= 1/work_scale
warper = cv2.PyRotationWarper('spherical', warped_image_scale)
"""calculate corner and size of time step"""
corners = []
sizes = []
for i in range(len(img_names)):
cameras[i].focal *= 0.9999/work_scale
cameras[i].ppx *= 1/work_scale
cameras[i].ppy *= 1/work_scale
sz = (full_img_sizes[i][0] * 1, full_img_sizes[i][1] * 1)
K = cameras[i].K().astype(np.float32)
roi = warper.warpRoi(sz, K, cameras[i].R)
corners.append(roi[0:2])
sizes.append(roi[2:4])
dst_sz = cv2.detail.resultRoi(corners=corners, sizes=sizes)
blend_width = np.sqrt(dst_sz[2] * dst_sz[3]) * 5 / 100
blender.prepare(dst_sz)
"""Panorama construction step"""
# https://github.com/opencv/opencv/blob/master/samples/cpp/stitching_detailed.cpp#L725 ?
for idx, name in enumerate(img_names):
corner, image_warped = warper.warp(name, cameras[idx].K().astype(np.float32), cameras[idx].R, cv2.INTER_LINEAR, cv2.BORDER_REFLECT)
p, mask_warped = warper.warp(255 * np.ones((name.shape[0], name.shape[1]), np.uint8), cameras[idx].K().astype(np.float32), cameras[idx].R, cv2.INTER_NEAREST, cv2.BORDER_CONSTANT)
compensator.apply(idx, corners[idx], image_warped, mask_warped)
mask_warped = cv2.bitwise_and(cv2.resize(cv2.dilate(masks_warped[idx], None), (mask_warped.shape[1], mask_warped.shape[0]), 0, 0, cv2.INTER_LINEAR_EXACT), mask_warped)
blender.feed(cv2.UMat(image_warped.astype(np.int16)), mask_warped, corners[idx])
result, result_mask = blender.blend(None, None)
dst = cv2.normalize(src=result, dst=None, alpha=255., norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U)
dst = imutils.resize(dst,width=1080)
cached = (dst_sz,warper,cameras,corners,masks_warped)
return False,dst,cached
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment