Skip to content

Instantly share code, notes, and snippets.

@deveshdatwani
Created June 16, 2023 23:37
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save deveshdatwani/5513b7ed39d99e08f6b3a1cd74bf2e85 to your computer and use it in GitHub Desktop.
Save deveshdatwani/5513b7ed39d99e08f6b3a1cd74bf2e85 to your computer and use it in GitHub Desktop.
import cv2
import numpy as np
from matplotlib import pyplot as plt
left_image_address = '/home/deveshdatwani/Desktop/left.jpg'
right_image_address = '/home/deveshdatwani/Desktop/right.jpg'
left_image = cv2.imread(left_image_address, cv2.IMREAD_GRAYSCALE)
right_image = cv2.imread(right_image_address, cv2.IMREAD_GRAYSCALE)
left_image = cv2.resize(left_image, (448, 448))
right_image = cv2.resize(right_image, (448, 448))
assert (left_image.shape) and (right_image.shape)
def getFeatures(image):
orb = cv2.ORB_create()
features = {}
keypoints = orb.detect(image)
keypoints, descriptors = orb.compute(image, keypoints)
features['keypoints'] = keypoints
features['descriptors'] = descriptors
return features
def drawKeypoints(image, features):
new_image = cv2.drawKeypoints(image, features['keypoints'], (0,255,0), 0)
plt.imshow(new_image)
plt.show()
return None
def matchFeatures(features_1, features_2):
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
matches = bf.match(features_1['descriptors'], features_2['descriptors'])
return matches
def drawMatches(img1, kp1, img2, kp2, matches):
matches = sorted(matches, key=lambda x: x.distance)
img3 = cv2.drawMatches(img1, kp1, img2, kp2, matches[:10], None, flags=cv2.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS)
plt.imshow(img3)
plt.show()
return None
def getHomography(points1, points2):
T, mask = cv2.findHomography(points1, points2)
return T
def merge(image1, image2, M):
image3 = cv2
features_left_image = getFeatures(left_image)
features_right_image = getFeatures(right_image)
matches = matchFeatures(features_left_image, features_right_image)
# drawMatches(left_image, features_left_image['keypoints'], right_image, features_right_image['keypoints'], matches)
matches = sorted(matches, key=lambda x: x.distance)
points1_idx = np.array([i.trainIdx for i in matches[:4]], dtype=np.uint32)
points2_idx = np.array([i.queryIdx for i in matches[:4]], dtype=np.uint32)
k_points1 = np.float32([i.pt for i in features_left_image['keypoints']])[points2_idx]
k_points2 = np.float32([i.pt for i in features_right_image['keypoints']])[points1_idx]
T = cv2.getPerspectiveTransform(k_points1, k_points2)
warp = cv2.warpPerspective(right_image, T, (800, 800))
plt.imshow(warp)
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment