Created
August 12, 2018 17:03
-
-
Save naoya-chiba/b77c6a88a7a40fad005668de6bce0d0d to your computer and use it in GitHub Desktop.
SSII2018のTSを例題に,PCL (C++)とOpen3D (Python) の比較.のソースコード
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# 秋月先生によるSSII チュートリアルの写経\n", | |
"# https://www.slideshare.net/SSII_Slides/3d-101077557\n", | |
"\n", | |
"import open3d\n", | |
"import numpy as np\n", | |
"import copy" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# p.53 読み込みと表示\n", | |
"pointcloud = open3d.read_point_cloud(\"scene1.ply\")\n", | |
"open3d.draw_geometries([pointcloud])" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# p.53 保存\n", | |
"open3d.write_point_cloud(\"output.pcd\", pointcloud)" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# p.53 ダウンサンプリング\n", | |
"pointcloud_ds = open3d.voxel_down_sample(pointcloud, voxel_size = 0.01)\n", | |
"open3d.draw_geometries([pointcloud_ds])" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# p.54 法線推定\n", | |
"# estimate_normals を叩いてから draw_geometries をすると,'n'キーで法線が可視化される.\n", | |
"open3d.estimate_normals(\n", | |
" pointcloud,\n", | |
" search_param = open3d.KDTreeSearchParamHybrid(\n", | |
" radius = 0.1, max_nn = 30))\n", | |
"open3d.draw_geometries([pointcloud])\n", | |
"\n", | |
"# ちなみに法線推定後は has_normals() が True になる\n", | |
"print(pointcloud.has_normals())" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# p.54 orient_normal_towards_camera_location\n", | |
"# 法線の方向を視点ベースでそろえる\n", | |
"open3d.orient_normals_towards_camera_location(\n", | |
" pointcloud,\n", | |
" camera_location = np.array([0., 10., 10.], dtype=\"float64\"))\n", | |
"open3d.draw_geometries([pointcloud])" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# p.55 NumPyとの連携\n", | |
"# ランダムな点を生成\n", | |
"data = np.random.rand(10, 3)\n", | |
"\n", | |
"# Open3Dの点群に変換\n", | |
"pointcloud = open3d.PointCloud()\n", | |
"pointcloud.points = open3d.Vector3dVector(data)\n", | |
"\n", | |
"# ランダムに打った点を可視化\n", | |
"open3d.draw_geometries([pointcloud])\n", | |
"\n", | |
"# numpy.ndarrayに変換.元のデータともちろん一致する\n", | |
"xyz = np.asarray(pointcloud.points)\n", | |
"print(np.linalg.norm(data - xyz))" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# p.56-62 モデルベースマッチング\n", | |
"import open3d\n", | |
"import numpy as np\n", | |
"import copy\n", | |
"\n", | |
"# 位置姿勢推定結果の表示\n", | |
"def draw_registration_result(source, target, transformation):\n", | |
" source_temp = copy.deepcopy(source)\n", | |
" target_temp = copy.deepcopy(target)\n", | |
" source_temp.paint_uniform_color([1, 0.706, 0])\n", | |
" target_temp.paint_uniform_color([0, 0.651, 0.929])\n", | |
" source_temp.transform(transformation)\n", | |
" open3d.draw_geometries([source_temp, target_temp])\n", | |
"\n", | |
"# 法線推定と特徴量計算\n", | |
"def preprocess_point_cloud(pointcloud, voxel_size):\n", | |
" # Keypoint を Voxel Down Sample で生成\n", | |
" keypoints = open3d.voxel_down_sample(pointcloud, voxel_size)\n", | |
"\n", | |
" # 法線推定\n", | |
" radius_normal = voxel_size * 2\n", | |
" view_point = np.array([0., 10., 10.], dtype=\"float64\")\n", | |
" open3d.estimate_normals(\n", | |
" keypoints,\n", | |
" search_param = open3d.KDTreeSearchParamHybrid(\n", | |
" radius = radius_normal,\n", | |
" max_nn = 30))\n", | |
" open3d.orient_normals_towards_camera_location(\n", | |
" keypoints,\n", | |
" camera_location = view_point)\n", | |
"\n", | |
" # FPFH特徴量計算\n", | |
" radius_feature = voxel_size * 5\n", | |
" fpfh = open3d.compute_fpfh_feature(\n", | |
" keypoints,\n", | |
" search_param = open3d.KDTreeSearchParamHybrid(\n", | |
" radius = radius_feature,\n", | |
" max_nn = 100))\n", | |
"\n", | |
" return keypoints, fpfh\n", | |
"\n", | |
"# RANSAC による Global Registration\n", | |
"def execute_global_registration(kp1, kp2, fpfh1, fpfh2, voxel_size):\n", | |
" distance_threshold = voxel_size * 2.5\n", | |
" result = open3d.registration_ransac_based_on_feature_matching(\n", | |
" kp1, kp2, fpfh1, fpfh2,\n", | |
" distance_threshold,\n", | |
" open3d.TransformationEstimationPointToPoint(False), 4,\n", | |
" [open3d.CorrespondenceCheckerBasedOnEdgeLength(0.9),\n", | |
" open3d.CorrespondenceCheckerBasedOnDistance(distance_threshold)],\n", | |
" open3d.RANSACConvergenceCriteria(500000, 1000))\n", | |
" return result\n", | |
"\n", | |
"# ICP によるRegistration\n", | |
"def refine_registration(scene1, scene2, trans, voxel_size):\n", | |
" distance_threshold = voxel_size * 0.4\n", | |
" result = open3d.registration_icp(\n", | |
" scene1, scene2, distance_threshold, trans,\n", | |
" open3d.TransformationEstimationPointToPoint())\n", | |
" return result\n", | |
"\n", | |
"# 読み込み\n", | |
"scene1 = open3d.read_point_cloud(\"scene1.ply\")\n", | |
"scene2 = open3d.read_point_cloud(\"scene2.ply\")\n", | |
"\n", | |
"# scene2 を適当に回転・並進\n", | |
"transform_matrix = np.asarray([\n", | |
" [1., 0., 0., -0.1],\n", | |
" [0., 0., -1., 0.1],\n", | |
" [0., 1., 0., -0.1],\n", | |
" [0., 0., 0., 1.]], dtype=\"float64\")\n", | |
"scene2.transform(transform_matrix)\n", | |
"\n", | |
"# 位置合わせ前の点群の表示\n", | |
"draw_registration_result(scene1, scene2, np.eye(4))\n", | |
"\n", | |
"voxel_size = 0.01\n", | |
"\n", | |
"# RANSAC による Global Registration\n", | |
"scene1_kp, scene1_fpfh = preprocess_point_cloud(scene1, voxel_size)\n", | |
"scene2_kp, scene2_fpfh = preprocess_point_cloud(scene2, voxel_size)\n", | |
"result_ransac = execute_global_registration(\n", | |
" scene1_kp, scene2_kp, scene1_fpfh, scene2_fpfh, voxel_size)\n", | |
"draw_registration_result(scene1, scene2, result_ransac.transformation)\n", | |
"\n", | |
"# ICP による refine\n", | |
"result = refine_registration(scene1, scene2, result_ransac.transformation, voxel_size)\n", | |
"draw_registration_result(scene1, scene2, result.transformation)" | |
] | |
} | |
], | |
"metadata": { | |
"kernelspec": { | |
"display_name": "Python 3", | |
"language": "python", | |
"name": "python3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.6.5" | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 2 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment