finish register

This commit is contained in:
hofee 2024-10-08 00:24:22 +08:00
parent 2209acce1b
commit 825f8652d5
4 changed files with 116 additions and 63 deletions

View File

@ -10,8 +10,8 @@ from PytorchBoot.status import status_manager
import sys import sys
sys.path.append("/home/user/nbv_rec/nbv_rec_control") sys.path.append(r"C:\Document\Local Project\nbv_rec_control")
from utils.control_util import ControlUtil #from utils.control_util import ControlUtil
from utils.communicate_util import CommunicateUtil from utils.communicate_util import CommunicateUtil
from utils.pts_util import PtsUtil from utils.pts_util import PtsUtil
from utils.view_sample_util import ViewSampleUtil from utils.view_sample_util import ViewSampleUtil
@ -99,8 +99,8 @@ class CADStrategyRunner(Runner):
scan_points_indices_list = scan_points_idx_list, scan_points_indices_list = scan_points_idx_list,
init_view=0, init_view=0,
threshold=self.voxel_size, threshold=self.voxel_size,
soft_overlap_threshold= self.soft_overlap_threshold, soft_overlap_threshold = self.soft_overlap_threshold,
hard_overlap_threshold= self.hard_overlap_threshold, hard_overlap_threshold = self.hard_overlap_threshold,
scan_points_threshold = self.scan_points_threshold, scan_points_threshold = self.scan_points_threshold,
status_info=self.status_info status_info=self.status_info
) )
@ -136,13 +136,22 @@ class CADStrategyRunner(Runner):
if __name__ == "__main__": if __name__ == "__main__":
model_path = "/home/user/nbv_rec/data/mesh.obj" model_path = r"C:\Users\hofee\Downloads\mesh.obj"
model = trimesh.load(model_path) model = trimesh.load(model_path)
test_pts_L = np.loadtxt("/home/user/nbv_rec/data/cam_pts_0_L.txt") test_pts_L = np.load(r"C:\Users\hofee\Downloads\0.npy")
test_pts_R = np.loadtxt("/home/user/nbv_rec/data/cam_pts_0_R.txt")
cad_to_cam_L = PtsUtil.register_icp(test_pts_L, model) import open3d as o3d
cad_to_cam_R = PtsUtil.register_icp(test_pts_R, model) def add_noise(points, translation, rotation):
cad_pts_L = PtsUtil.transform_point_cloud(test_pts_L, cad_to_cam_L) R = o3d.geometry.get_rotation_matrix_from_axis_angle(rotation)
cad_pts_R = PtsUtil.transform_point_cloud(test_pts_R, cad_to_cam_R) noisy_points = points @ R.T + translation
np.savetxt("/home/user/nbv_rec/data/cad_pts_0_L.txt", cad_pts_L) return noisy_points
np.savetxt("/home/user/nbv_rec/data/cad_pts_0_R.txt", cad_pts_R)
translation_noise = np.random.uniform(-0.5, 0.5, size=3)
rotation_noise = np.random.uniform(-np.pi/4, np.pi/4, size=3)
noisy_pts_L = add_noise(test_pts_L, translation_noise, rotation_noise)
cad_to_cam_L = PtsUtil.register(noisy_pts_L, model)
cad_pts_L = PtsUtil.transform_point_cloud(noisy_pts_L, cad_to_cam_L)
np.savetxt(r"test.txt", cad_pts_L)
np.savetxt(r"src.txt", noisy_pts_L)

View File

@ -23,6 +23,7 @@ class Inferencer(Runner):
self.load_experiment("inferencer") self.load_experiment("inferencer")
self.reconstruct_config = ConfigManager.get("runner", "reconstruct") self.reconstruct_config = ConfigManager.get("runner", "reconstruct")
self.voxel_size = self.reconstruct_config["voxel_size"] self.voxel_size = self.reconstruct_config["voxel_size"]
self.max_iter = self.reconstruct_config["max_iter"]
def create_experiment(self, backup_name=None): def create_experiment(self, backup_name=None):
super().create_experiment(backup_name) super().create_experiment(backup_name)
@ -45,6 +46,7 @@ class Inferencer(Runner):
"combined_scanned_pts": combined_pts "combined_scanned_pts": combined_pts
} }
''' enter loop ''' ''' enter loop '''
iter = 0
while True: while True:
''' inference ''' ''' inference '''
inference_result = CommunicateUtil.get_inference_data(input_data) inference_result = CommunicateUtil.get_inference_data(input_data)
@ -58,9 +60,14 @@ class Inferencer(Runner):
''' update combined pts ''' ''' update combined pts '''
combined_pts = np.concatenate([combined_pts, curr_cam_pts], axis=0) combined_pts = np.concatenate([combined_pts, curr_cam_pts], axis=0)
combined_pts = PtsUtil.voxel_downsample_point_cloud(combined_pts, voxel_size=self.voxel_size) combined_pts = PtsUtil.voxel_downsample_point_cloud(combined_pts, voxel_size=self.voxel_size)
''' update input data ''' ''' update input data '''
input_data["combined_scanned_pts"] = combined_pts
input_data["scanned_target_points_num"].append(curr_cam_pts.shape[0])
input_data["scanned_n_to_world_pose_9d"].append(curr_cam_pose)
''' check stop condition '''
if iter >= self.max_iter:
break
def run(self): def run(self):

10
test.py
View File

@ -1,10 +0,0 @@
import flask
app = flask.Flask(__name__)
@app.route('/hello')
def hello():
return "Hello, World!"
if __name__ == '__main__':
app.run(host="0.0.0.0", port=7999)

View File

@ -4,6 +4,7 @@ import torch
import trimesh import trimesh
from scipy.spatial import cKDTree from scipy.spatial import cKDTree
class PtsUtil: class PtsUtil:
@staticmethod @staticmethod
@ -31,13 +32,17 @@ class PtsUtil:
sampled_indices = np.zeros(num_points, dtype=int) sampled_indices = np.zeros(num_points, dtype=int)
sampled_indices[0] = np.random.randint(0, N) sampled_indices[0] = np.random.randint(0, N)
distances = np.linalg.norm(point_cloud - point_cloud[sampled_indices[0]], axis=1) distances = np.linalg.norm(
point_cloud - point_cloud[sampled_indices[0]], axis=1
)
for i in range(1, num_points): for i in range(1, num_points):
farthest_index = np.argmax(distances) farthest_index = np.argmax(distances)
sampled_indices[i] = farthest_index sampled_indices[i] = farthest_index
mask[farthest_index] = True mask[farthest_index] = True
new_distances = np.linalg.norm(point_cloud - point_cloud[farthest_index], axis=1) new_distances = np.linalg.norm(
point_cloud - point_cloud[farthest_index], axis=1
)
distances = np.minimum(distances, new_distances) distances = np.minimum(distances, new_distances)
sampled_points = point_cloud[sampled_indices] sampled_points = point_cloud[sampled_indices]
@ -63,7 +68,9 @@ class PtsUtil:
return points_h[:, :3] return points_h[:, :3]
@staticmethod @staticmethod
def get_overlapping_points(point_cloud_L, point_cloud_R, voxel_size=0.005, require_idx=False): def get_overlapping_points(
point_cloud_L, point_cloud_R, voxel_size=0.005, require_idx=False
):
voxels_L, indices_L = PtsUtil.voxelize_points(point_cloud_L, voxel_size) voxels_L, indices_L = PtsUtil.voxelize_points(point_cloud_L, voxel_size)
voxels_R, _ = PtsUtil.voxelize_points(point_cloud_R, voxel_size) voxels_R, _ = PtsUtil.voxelize_points(point_cloud_R, voxel_size)
@ -79,16 +86,24 @@ class PtsUtil:
return overlapping_points return overlapping_points
@staticmethod @staticmethod
def filter_points(points, points_normals, cam_pose, voxel_size=0.002, theta=45, z_range=(0.2, 0.45)): def filter_points(
points,
""" filter with z range """ points_normals,
cam_pose,
voxel_size=0.002,
theta=45,
z_range=(0.2, 0.45),
):
"""filter with z range"""
points_cam = PtsUtil.transform_point_cloud(points, np.linalg.inv(cam_pose)) points_cam = PtsUtil.transform_point_cloud(points, np.linalg.inv(cam_pose))
idx = (points_cam[:, 2] > z_range[0]) & (points_cam[:, 2] < z_range[1]) idx = (points_cam[:, 2] > z_range[0]) & (points_cam[:, 2] < z_range[1])
z_filtered_points = points[idx] z_filtered_points = points[idx]
""" filter with normal """ """ filter with normal """
sampled_points = PtsUtil.voxel_downsample_point_cloud(z_filtered_points, voxel_size) sampled_points = PtsUtil.voxel_downsample_point_cloud(
kdtree = cKDTree(points_normals[:,:3]) z_filtered_points, voxel_size
)
kdtree = cKDTree(points_normals[:, :3])
_, indices = kdtree.query(sampled_points) _, indices = kdtree.query(sampled_points)
nearest_points = points_normals[indices] nearest_points = points_normals[indices]
@ -98,27 +113,59 @@ class PtsUtil:
cos_theta = np.dot(normals_normalized, camera_axis) cos_theta = np.dot(normals_normalized, camera_axis)
theta_rad = np.deg2rad(theta) theta_rad = np.deg2rad(theta)
idx = cos_theta > np.cos(theta_rad) idx = cos_theta > np.cos(theta_rad)
filtered_sampled_points= sampled_points[idx] filtered_sampled_points = sampled_points[idx]
return filtered_sampled_points[:, :3] return filtered_sampled_points[:, :3]
@staticmethod @staticmethod
def register_icp(pcl: np.ndarray, model: trimesh.Trimesh, threshold=0.5) -> np.ndarray: def register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.01):
mesh_points = np.asarray(model.vertices) radius_normal = voxel_size * 2
downsampled_mesh_points = PtsUtil.random_downsample_point_cloud(mesh_points, pcl.shape[0]) pipreg = o3d.pipelines.registration
mesh_point_cloud = o3d.geometry.PointCloud() model_pcd = o3d.geometry.PointCloud()
mesh_point_cloud.points = o3d.utility.Vector3dVector(downsampled_mesh_points) model_pcd.points = o3d.utility.Vector3dVector(model.vertices)
np.savetxt("mesh_point_cloud.txt", downsampled_mesh_points) model_downsampled = model_pcd.voxel_down_sample(voxel_size)
pcl_point_cloud = o3d.geometry.PointCloud() model_downsampled.estimate_normals(
pcl_point_cloud.points = o3d.utility.Vector3dVector(pcl) search_param=o3d.geometry.KDTreeSearchParamHybrid(
initial_transform = np.eye(4) radius=radius_normal, max_nn=30
reg_icp = o3d.pipelines.registration.registration_icp(
pcl_point_cloud, mesh_point_cloud, threshold,
initial_transform,
o3d.pipelines.registration.TransformationEstimationPointToPoint()
) )
if np.allclose(reg_icp.transformation, np.eye(4)): )
print("Registration failed. Check your initial alignment and point cloud quality.") model_fpfh = pipreg.compute_fpfh_feature(
else: model_downsampled,
print("Registration successful.") o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100),
print(reg_icp.transformation) )
source_pcd = o3d.geometry.PointCloud()
source_pcd.points = o3d.utility.Vector3dVector(pcl)
source_downsampled = source_pcd.voxel_down_sample(voxel_size)
source_downsampled.estimate_normals(
search_param=o3d.geometry.KDTreeSearchParamHybrid(
radius=radius_normal, max_nn=30
)
)
source_fpfh = pipreg.compute_fpfh_feature(
source_downsampled,
o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100),
)
reg_ransac = pipreg.registration_ransac_based_on_feature_matching(
source_downsampled,
model_downsampled,
source_fpfh,
model_fpfh,
mutual_filter=True,
max_correspondence_distance=voxel_size * 1.5,
estimation_method=pipreg.TransformationEstimationPointToPoint(False),
ransac_n=4,
checkers=[pipreg.CorrespondenceCheckerBasedOnEdgeLength(0.9)],
criteria=pipreg.RANSACConvergenceCriteria(4000000, 500),
)
reg_icp = pipreg.registration_icp(
source_downsampled,
model_downsampled,
voxel_size * 2,
reg_ransac.transformation,
pipreg.TransformationEstimationPointToPlane(),
pipreg.ICPConvergenceCriteria(max_iteration=200),
)
return reg_icp.transformation return reg_icp.transformation