diff --git a/runners/cad_strategy.py b/runners/cad_strategy.py index 5cc7518..078678a 100644 --- a/runners/cad_strategy.py +++ b/runners/cad_strategy.py @@ -10,8 +10,8 @@ from PytorchBoot.status import status_manager import sys -sys.path.append("/home/user/nbv_rec/nbv_rec_control") -from utils.control_util import ControlUtil +sys.path.append(r"C:\Document\Local Project\nbv_rec_control") +#from utils.control_util import ControlUtil from utils.communicate_util import CommunicateUtil from utils.pts_util import PtsUtil from utils.view_sample_util import ViewSampleUtil @@ -99,8 +99,8 @@ class CADStrategyRunner(Runner): scan_points_indices_list = scan_points_idx_list, init_view=0, threshold=self.voxel_size, - soft_overlap_threshold= self.soft_overlap_threshold, - hard_overlap_threshold= self.hard_overlap_threshold, + soft_overlap_threshold = self.soft_overlap_threshold, + hard_overlap_threshold = self.hard_overlap_threshold, scan_points_threshold = self.scan_points_threshold, status_info=self.status_info ) @@ -136,13 +136,22 @@ class CADStrategyRunner(Runner): if __name__ == "__main__": - model_path = "/home/user/nbv_rec/data/mesh.obj" + model_path = r"C:\Users\hofee\Downloads\mesh.obj" model = trimesh.load(model_path) - test_pts_L = np.loadtxt("/home/user/nbv_rec/data/cam_pts_0_L.txt") - test_pts_R = np.loadtxt("/home/user/nbv_rec/data/cam_pts_0_R.txt") - cad_to_cam_L = PtsUtil.register_icp(test_pts_L, model) - cad_to_cam_R = PtsUtil.register_icp(test_pts_R, model) - cad_pts_L = PtsUtil.transform_point_cloud(test_pts_L, cad_to_cam_L) - cad_pts_R = PtsUtil.transform_point_cloud(test_pts_R, cad_to_cam_R) - np.savetxt("/home/user/nbv_rec/data/cad_pts_0_L.txt", cad_pts_L) - np.savetxt("/home/user/nbv_rec/data/cad_pts_0_R.txt", cad_pts_R) \ No newline at end of file + test_pts_L = np.load(r"C:\Users\hofee\Downloads\0.npy") + + import open3d as o3d + def add_noise(points, translation, rotation): + R = o3d.geometry.get_rotation_matrix_from_axis_angle(rotation) + noisy_points = points @ R.T + translation + return noisy_points + + translation_noise = np.random.uniform(-0.5, 0.5, size=3) + rotation_noise = np.random.uniform(-np.pi/4, np.pi/4, size=3) + noisy_pts_L = add_noise(test_pts_L, translation_noise, rotation_noise) + + cad_to_cam_L = PtsUtil.register(noisy_pts_L, model) + + cad_pts_L = PtsUtil.transform_point_cloud(noisy_pts_L, cad_to_cam_L) + np.savetxt(r"test.txt", cad_pts_L) + np.savetxt(r"src.txt", noisy_pts_L) \ No newline at end of file diff --git a/runners/inference.py b/runners/inference.py index 82d329b..d57ce9a 100644 --- a/runners/inference.py +++ b/runners/inference.py @@ -23,6 +23,7 @@ class Inferencer(Runner): self.load_experiment("inferencer") self.reconstruct_config = ConfigManager.get("runner", "reconstruct") self.voxel_size = self.reconstruct_config["voxel_size"] + self.max_iter = self.reconstruct_config["max_iter"] def create_experiment(self, backup_name=None): super().create_experiment(backup_name) @@ -45,6 +46,7 @@ class Inferencer(Runner): "combined_scanned_pts": combined_pts } ''' enter loop ''' + iter = 0 while True: ''' inference ''' inference_result = CommunicateUtil.get_inference_data(input_data) @@ -58,9 +60,14 @@ class Inferencer(Runner): ''' update combined pts ''' combined_pts = np.concatenate([combined_pts, curr_cam_pts], axis=0) combined_pts = PtsUtil.voxel_downsample_point_cloud(combined_pts, voxel_size=self.voxel_size) - - ''' update input data ''' + input_data["combined_scanned_pts"] = combined_pts + input_data["scanned_target_points_num"].append(curr_cam_pts.shape[0]) + input_data["scanned_n_to_world_pose_9d"].append(curr_cam_pose) + + ''' check stop condition ''' + if iter >= self.max_iter: + break def run(self): diff --git a/test.py b/test.py deleted file mode 100644 index 8770598..0000000 --- a/test.py +++ /dev/null @@ -1,10 +0,0 @@ -import flask - -app = flask.Flask(__name__) - -@app.route('/hello') -def hello(): - return "Hello, World!" - -if __name__ == '__main__': - app.run(host="0.0.0.0", port=7999) \ No newline at end of file diff --git a/utils/pts_util.py b/utils/pts_util.py index 0d8860c..47dc938 100644 --- a/utils/pts_util.py +++ b/utils/pts_util.py @@ -4,6 +4,7 @@ import torch import trimesh from scipy.spatial import cKDTree + class PtsUtil: @staticmethod @@ -12,7 +13,7 @@ class PtsUtil: o3d_pc.points = o3d.utility.Vector3dVector(point_cloud) downsampled_pc = o3d_pc.voxel_down_sample(voxel_size) return np.asarray(downsampled_pc.points) - + @staticmethod def random_downsample_point_cloud(point_cloud, num_points, require_idx=False): if point_cloud.shape[0] == 0: @@ -23,7 +24,7 @@ class PtsUtil: if require_idx: return point_cloud[idx], idx return point_cloud[idx] - + @staticmethod def fps_downsample_point_cloud(point_cloud, num_points, require_idx=False): N = point_cloud.shape[0] @@ -31,25 +32,29 @@ class PtsUtil: sampled_indices = np.zeros(num_points, dtype=int) sampled_indices[0] = np.random.randint(0, N) - distances = np.linalg.norm(point_cloud - point_cloud[sampled_indices[0]], axis=1) + distances = np.linalg.norm( + point_cloud - point_cloud[sampled_indices[0]], axis=1 + ) for i in range(1, num_points): farthest_index = np.argmax(distances) sampled_indices[i] = farthest_index mask[farthest_index] = True - new_distances = np.linalg.norm(point_cloud - point_cloud[farthest_index], axis=1) + new_distances = np.linalg.norm( + point_cloud - point_cloud[farthest_index], axis=1 + ) distances = np.minimum(distances, new_distances) - + sampled_points = point_cloud[sampled_indices] if require_idx: return sampled_points, sampled_indices return sampled_points - + @staticmethod def random_downsample_point_cloud_tensor(point_cloud, num_points): idx = torch.randint(0, len(point_cloud), (num_points,)) return point_cloud[idx] - + @staticmethod def voxelize_points(points, voxel_size): voxel_indices = np.floor(points / voxel_size).astype(np.int32) @@ -61,9 +66,11 @@ class PtsUtil: points_h = np.concatenate([points, np.ones((points.shape[0], 1))], axis=1) points_h = np.dot(pose_mat, points_h.T).T return points_h[:, :3] - + @staticmethod - def get_overlapping_points(point_cloud_L, point_cloud_R, voxel_size=0.005, require_idx=False): + def get_overlapping_points( + point_cloud_L, point_cloud_R, voxel_size=0.005, require_idx=False + ): voxels_L, indices_L = PtsUtil.voxelize_points(point_cloud_L, voxel_size) voxels_R, _ = PtsUtil.voxelize_points(point_cloud_R, voxel_size) @@ -77,48 +84,88 @@ class PtsUtil: if require_idx: return overlapping_points, mask_L return overlapping_points - + @staticmethod - def filter_points(points, points_normals, cam_pose, voxel_size=0.002, theta=45, z_range=(0.2, 0.45)): - - """ filter with z range """ + def filter_points( + points, + points_normals, + cam_pose, + voxel_size=0.002, + theta=45, + z_range=(0.2, 0.45), + ): + """filter with z range""" points_cam = PtsUtil.transform_point_cloud(points, np.linalg.inv(cam_pose)) idx = (points_cam[:, 2] > z_range[0]) & (points_cam[:, 2] < z_range[1]) z_filtered_points = points[idx] - + """ filter with normal """ - sampled_points = PtsUtil.voxel_downsample_point_cloud(z_filtered_points, voxel_size) - kdtree = cKDTree(points_normals[:,:3]) + sampled_points = PtsUtil.voxel_downsample_point_cloud( + z_filtered_points, voxel_size + ) + kdtree = cKDTree(points_normals[:, :3]) _, indices = kdtree.query(sampled_points) nearest_points = points_normals[indices] normals = nearest_points[:, 3:] - camera_axis = -cam_pose[:3, 2] + camera_axis = -cam_pose[:3, 2] normals_normalized = normals / np.linalg.norm(normals, axis=1, keepdims=True) cos_theta = np.dot(normals_normalized, camera_axis) theta_rad = np.deg2rad(theta) idx = cos_theta > np.cos(theta_rad) - filtered_sampled_points= sampled_points[idx] + filtered_sampled_points = sampled_points[idx] return filtered_sampled_points[:, :3] - + @staticmethod - def register_icp(pcl: np.ndarray, model: trimesh.Trimesh, threshold=0.5) -> np.ndarray: - mesh_points = np.asarray(model.vertices) - downsampled_mesh_points = PtsUtil.random_downsample_point_cloud(mesh_points, pcl.shape[0]) - mesh_point_cloud = o3d.geometry.PointCloud() - mesh_point_cloud.points = o3d.utility.Vector3dVector(downsampled_mesh_points) - np.savetxt("mesh_point_cloud.txt", downsampled_mesh_points) - pcl_point_cloud = o3d.geometry.PointCloud() - pcl_point_cloud.points = o3d.utility.Vector3dVector(pcl) - initial_transform = np.eye(4) - reg_icp = o3d.pipelines.registration.registration_icp( - pcl_point_cloud, mesh_point_cloud, threshold, - initial_transform, - o3d.pipelines.registration.TransformationEstimationPointToPoint() + def register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.01): + radius_normal = voxel_size * 2 + pipreg = o3d.pipelines.registration + model_pcd = o3d.geometry.PointCloud() + model_pcd.points = o3d.utility.Vector3dVector(model.vertices) + model_downsampled = model_pcd.voxel_down_sample(voxel_size) + model_downsampled.estimate_normals( + search_param=o3d.geometry.KDTreeSearchParamHybrid( + radius=radius_normal, max_nn=30 + ) ) - if np.allclose(reg_icp.transformation, np.eye(4)): - print("Registration failed. Check your initial alignment and point cloud quality.") - else: - print("Registration successful.") - print(reg_icp.transformation) - return reg_icp.transformation \ No newline at end of file + model_fpfh = pipreg.compute_fpfh_feature( + model_downsampled, + o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100), + ) + + source_pcd = o3d.geometry.PointCloud() + source_pcd.points = o3d.utility.Vector3dVector(pcl) + source_downsampled = source_pcd.voxel_down_sample(voxel_size) + source_downsampled.estimate_normals( + search_param=o3d.geometry.KDTreeSearchParamHybrid( + radius=radius_normal, max_nn=30 + ) + ) + source_fpfh = pipreg.compute_fpfh_feature( + source_downsampled, + o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100), + ) + + reg_ransac = pipreg.registration_ransac_based_on_feature_matching( + source_downsampled, + model_downsampled, + source_fpfh, + model_fpfh, + mutual_filter=True, + max_correspondence_distance=voxel_size * 1.5, + estimation_method=pipreg.TransformationEstimationPointToPoint(False), + ransac_n=4, + checkers=[pipreg.CorrespondenceCheckerBasedOnEdgeLength(0.9)], + criteria=pipreg.RANSACConvergenceCriteria(4000000, 500), + ) + + reg_icp = pipreg.registration_icp( + source_downsampled, + model_downsampled, + voxel_size * 2, + reg_ransac.transformation, + pipreg.TransformationEstimationPointToPlane(), + pipreg.ICPConvergenceCriteria(max_iteration=200), + ) + + return reg_icp.transformation