Skip to content

Instantly share code, notes, and snippets.

@patmarion
Last active September 23, 2018 17:25
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save patmarion/f8ab371b9d2408399f7f33d97cf1c015 to your computer and use it in GitHub Desktop.
Save patmarion/f8ab371b9d2408399f7f33d97cf1c015 to your computer and use it in GitHub Desktop.
# usage: bin/directorPython -m director.mainwindowapp --script test_nuscenes.py /path/to/dataset
import os
import numpy as np
from utils.nuscenes import NuScenes
from utils.data_classes import PointCloud
from director import transformUtils
from director import vtkNumpy as vnp
from director import visualization as vis
from director import objectmodel as om
from director import ioUtils
from director import vtkAll as vtk
from director.debugVis import DebugData
def load_scene(nusc, scene):
pointclouds = []
ego_poses = []
lidar_to_ego = []
timestamps = []
sample_token = scene['first_sample_token']
while sample_token:
sample = nusc.get('sample', sample_token)
sample_data = sample['data']
lidar_data = nusc.get('sample_data', sample_data['LIDAR_TOP'])
ego_pose = nusc.get('ego_pose', lidar_data['ego_pose_token'])
cs_record = nusc.get('calibrated_sensor', lidar_data['calibrated_sensor_token'])
timestamps.append(sample['timestamp'])
pointclouds.append(PointCloud.from_file(os.path.join(nusc.dataroot, lidar_data['filename'])))
ego_poses.append(transformUtils.transformFromPose(ego_pose['translation'], ego_pose['rotation']))
lidar_to_ego.append(transformUtils.transformFromPose(cs_record['translation'], cs_record['rotation']))
sample_token = sample['next']
scenes_folder = om.getOrCreateContainer('scenes', parentObj=om.getOrCreateContainer('nuscenes'))
ego_poses_folder = om.getOrCreateContainer('ego_poses', parentObj=scenes_folder)
scene_folder = om.getOrCreateContainer(scene['name'], parentObj=scenes_folder)
for i in range(len(pointclouds)):
pointcloud_data = pointclouds[i].points.transpose()
points = pointcloud_data[:,:3]
intensity = pointcloud_data[:,3]
point_data = {
'range': np.linalg.norm(points[:,:2], axis=1),
'height': points[:, 2],
'intensity': intensity,
'log_intensity': np.log1p(intensity) / np.log(256)}
pd = vnp.numpyToPolyData(points, point_data)
obj = vis.showPolyData(pd, 'pointcloud {}'.format(timestamps[i]), parent=scene_folder, alpha=0.2, colorByName='log_intensity', colorByRange=[0.2, 0.7])
frame = vis.addChildFrame(obj)
frame.setProperty('Visible', True)
frame.copyFrame(transformUtils.concatenateTransforms([lidar_to_ego[i], ego_poses[i]]))
vis.showFrame(ego_poses[i], 'ego_pose {}'.format(timestamps[i]), parent=ego_poses_folder, visible=False)
def load_map(nusc, map_token):
map = nusc.get('map', map_token)
mask = map['mask']
img = ioUtils.readImage(mask.img_file)
d = DebugData()
d.addPlane((img.GetDimensions()[0]*mask.precision*0.5, img.GetDimensions()[1]*mask.precision*0.5,0), (0,0,1), img.GetDimensions()[0]*mask.precision, img.GetDimensions()[1]*mask.precision)
map_folder = om.getOrCreateContainer('maps', parentObj=om.getOrCreateContainer('nuscenes'))
obj = vis.showPolyData(d.getPolyData(), 'map {}'.format(map_token), parent=map_folder, visible=False)
tex = vtk.vtkTexture()
tex.SetInputData(img)
obj.actor.SetTexture(tex)
def load_nuscenes(dataroot):
nusc = NuScenes(version='v0.1', dataroot=dataroot, verbose=True)
maps = set()
for scene in nusc.scene:
log = nusc.get('log', scene['log_token'])
location = log['location']
if location == 'boston-seaport':
load_scene(nusc, scene)
maps.add(log['map_token'])
for map_token in maps:
load_map(nusc, map_token)
if __name__ == '__main__':
load_nuscenes(_argv[1])
_fields.view.resetCamera()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment