Skip to content

Instantly share code, notes, and snippets.

@aerinkim
Created February 7, 2021 02:05
Show Gist options
  • Save aerinkim/d00e07b8aee06a8d83cba4a6c022527f to your computer and use it in GitHub Desktop.
Save aerinkim/d00e07b8aee06a8d83cba4a6c022527f to your computer and use it in GitHub Desktop.
class Calibration:
"""Calibration matrices and utils.
3d XYZ are in 3D egovehicle coord.
2d box xy are in image coord, normalized by width and height
Point cloud are in egovehicle coord
::
xy_image = K * [R|T] * xyz_ego
xyz_image = [R|T] * xyz_ego
image coord:
----> x-axis (u)
|
|
v y-axis (v)
egovehicle coord:
front x, left y, up z
"""
def __init__(self, camera_config: CameraConfig, calib: Dict[str, Any]) -> None:
"""Create a Calibration instance.
Args:
camera_config: A camera config
calib: Calibration data
"""
self.camera_config = camera_config
self.calib_data = calib
self.extrinsic = get_camera_extrinsic_matrix(calib["value"])
self.R = self.extrinsic[0:3, 0:3]
self.T = self.extrinsic[0:3, 3]
self.K = get_camera_intrinsic_matrix(calib["value"])
self.cu = self.calib_data["value"]["focal_center_x_px_"]
self.cv = self.calib_data["value"]["focal_center_y_px_"]
self.fu = self.calib_data["value"]["focal_length_x_px_"]
self.fv = self.calib_data["value"]["focal_length_y_px_"]
self.bx = self.K[0, 3] / (-self.fu)
self.by = self.K[1, 3] / (-self.fv)
self.d = camera_config.distortion_coeffs
self.camera = calib["key"][10:]
def cart2hom(self, pts_3d: np.array) -> np.ndarray:
"""Convert Cartesian coordinates to Homogeneous.
Args:
pts_3d: nx3 points in Cartesian
Returns:
nx4 points in Homogeneous by appending 1
"""
n = pts_3d.shape[0]
pts_3d_hom = np.hstack((pts_3d, np.ones((n, 1))))
return pts_3d_hom
def project_ego_to_image(self, pts_3d_ego: np.array) -> np.ndarray:
"""Project egovehicle coordinate to image.
Args:
pts_3d_ego: nx3 points in egovehicle coord
Returns:
nx3 points in image coord + depth
"""
uv_cam = self.project_ego_to_cam(pts_3d_ego)
return self.project_cam_to_image(uv_cam)
def project_ego_to_cam(self, pts_3d_ego: np.array) -> np.ndarray:
"""Project egovehicle point onto camera frame.
Args:
pts_3d_ego: nx3 points in egovehicle coord.
Returns:
nx3 points in camera coord.
"""
uv_cam = self.extrinsic.dot(self.cart2hom(pts_3d_ego).transpose())
return uv_cam.transpose()[:, 0:3]
def project_cam_to_ego(self, pts_3d_rect: np.array) -> np.ndarray:
"""Project point in camera frame to egovehicle frame.
Args:
pts_3d_rect (np.array): nx3 points in cam coord.
Returns:
np.array: nx3 points in ego coord.
"""
return np.linalg.inv((self.extrinsic)).dot(self.cart2hom(pts_3d_rect).transpose()).transpose()[:, 0:3]
def project_image_to_ego(self, uv_depth: np.array) -> np.ndarray:
"""Project 2D image with depth to egovehicle coordinate.
Args:
uv_depth: nx3 first two channels are uv, 3rd channel
is depth in camera coord. So basically in image coordinate.
Returns:
nx3 points in ego coord.
"""
uv_cam = self.project_image_to_cam(uv_depth)
return self.project_cam_to_ego(uv_cam)
def project_image_to_cam(self, uv_depth: np.array) -> np.ndarray:
"""Project 2D image with depth to camera coordinate.
Args:
uv_depth: nx3 first two channels are uv, 3rd channel
is depth in camera coord.
Returns:
nx3 points in camera coord.
"""
n = uv_depth.shape[0]
x = ((uv_depth[:, 0] - self.cu) * uv_depth[:, 2]) / self.fu + self.bx
y = ((uv_depth[:, 1] - self.cv) * uv_depth[:, 2]) / self.fv + self.by
pts_3d_cam = np.zeros((n, 3))
pts_3d_cam[:, 0] = x
pts_3d_cam[:, 1] = y
pts_3d_cam[:, 2] = uv_depth[:, 2]
return pts_3d_cam
def project_cam_to_image(self, pts_3d_rect: np.array) -> np.ndarray:
"""Project camera coordinate to image.
Args:
pts_3d_ego: nx3 points in egovehicle coord
Returns:
nx3 points in image coord + depth
"""
uv_cam = self.cart2hom(pts_3d_rect).T
uv = self.K.dot(uv_cam)
uv[0:2, :] /= uv[2, :]
return uv.transpose()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment