update normal strategy
This commit is contained in:
@@ -5,7 +5,7 @@ import torch
|
||||
class PtsUtil:
|
||||
|
||||
@staticmethod
|
||||
def voxel_downsample_point_cloud(point_cloud, voxel_size=0.005):
|
||||
def voxel_downsample_point_cloud(point_cloud, voxel_size=0.005, require_idx=False):
|
||||
voxel_indices = np.floor(point_cloud / voxel_size).astype(np.int32)
|
||||
unique_voxels = np.unique(voxel_indices, axis=0, return_inverse=True)
|
||||
return unique_voxels[0]*voxel_size
|
||||
|
@@ -8,14 +8,15 @@ class ReconstructionUtil:
|
||||
def compute_coverage_rate(target_point_cloud, combined_point_cloud, threshold=0.01):
|
||||
kdtree = cKDTree(combined_point_cloud)
|
||||
distances, _ = kdtree.query(target_point_cloud)
|
||||
covered_points_num = np.sum(distances < threshold)
|
||||
covered_points_num = np.sum(distances < threshold*2)
|
||||
coverage_rate = covered_points_num / target_point_cloud.shape[0]
|
||||
return coverage_rate, covered_points_num
|
||||
|
||||
@staticmethod
|
||||
def compute_coverage_rate_with_normal(target_point_cloud, combined_point_cloud, target_normal, combined_normal, threshold=0.01, normal_threshold=0.1):
|
||||
kdtree = cKDTree(combined_point_cloud)
|
||||
distances, indices = kdtree.query(target_point_cloud)
|
||||
is_covered_by_distance = distances < threshold
|
||||
is_covered_by_distance = distances < threshold*2
|
||||
normal_dots = np.einsum('ij,ij->i', target_normal, combined_normal[indices])
|
||||
is_covered_by_normal = normal_dots > normal_threshold
|
||||
covered_points_num = np.sum(is_covered_by_distance & is_covered_by_normal)
|
||||
@@ -25,15 +26,14 @@ class ReconstructionUtil:
|
||||
|
||||
|
||||
@staticmethod
|
||||
def compute_overlap_rate(new_point_cloud, combined_point_cloud, threshold=0.01):
|
||||
def check_overlap(new_point_cloud, combined_point_cloud, overlap_area_threshold=25, voxel_size=0.01):
|
||||
kdtree = cKDTree(combined_point_cloud)
|
||||
distances, _ = kdtree.query(new_point_cloud)
|
||||
overlapping_points = np.sum(distances < threshold)
|
||||
if new_point_cloud.shape[0] == 0:
|
||||
overlap_rate = 0
|
||||
else:
|
||||
overlap_rate = overlapping_points / new_point_cloud.shape[0]
|
||||
return overlap_rate
|
||||
overlapping_points = np.sum(distances < voxel_size*2)
|
||||
cm = 0.01
|
||||
voxel_size_cm = voxel_size / cm
|
||||
overlap_area = overlapping_points * voxel_size_cm * voxel_size_cm
|
||||
return overlap_area > overlap_area_threshold
|
||||
|
||||
|
||||
@staticmethod
|
||||
@@ -49,7 +49,7 @@ class ReconstructionUtil:
|
||||
return new_added_points
|
||||
|
||||
@staticmethod
|
||||
def compute_next_best_view_sequence_with_overlap(target_point_cloud, point_cloud_list, scan_points_indices_list, threshold=0.01, soft_overlap_threshold=0.5, hard_overlap_threshold=0.7, init_view = 0, scan_points_threshold=5, status_info=None):
|
||||
def compute_next_best_view_sequence(target_point_cloud, point_cloud_list, scan_points_indices_list, threshold=0.01, overlap_area_threshold=25, init_view = 0, scan_points_threshold=5, status_info=None):
|
||||
selected_views = [init_view]
|
||||
combined_point_cloud = point_cloud_list[init_view]
|
||||
history_indices = [scan_points_indices_list[init_view]]
|
||||
@@ -83,22 +83,16 @@ class ReconstructionUtil:
|
||||
if selected_views:
|
||||
new_scan_points_indices = scan_points_indices_list[view_index]
|
||||
if not ReconstructionUtil.check_scan_points_overlap(history_indices, new_scan_points_indices, scan_points_threshold):
|
||||
overlap_threshold = hard_overlap_threshold
|
||||
curr_overlap_area_threshold = overlap_area_threshold
|
||||
else:
|
||||
overlap_threshold = soft_overlap_threshold
|
||||
start = time.time()
|
||||
overlap_rate = ReconstructionUtil.compute_overlap_rate(point_cloud_list[view_index],combined_point_cloud, threshold)
|
||||
end = time.time()
|
||||
# print(f"overlap_rate Time: {end-start}")
|
||||
if overlap_rate < overlap_threshold:
|
||||
curr_overlap_area_threshold = overlap_area_threshold * 0.5
|
||||
|
||||
if not ReconstructionUtil.check_overlap(point_cloud_list[view_index], combined_point_cloud, overlap_area_threshold = curr_overlap_area_threshold, voxel_size=threshold):
|
||||
continue
|
||||
|
||||
start = time.time()
|
||||
new_combined_point_cloud = np.vstack([combined_point_cloud, point_cloud_list[view_index]])
|
||||
new_downsampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(new_combined_point_cloud,threshold)
|
||||
new_coverage, new_covered_num = ReconstructionUtil.compute_coverage_rate(downsampled_max_rec_pts, new_downsampled_combined_point_cloud, threshold)
|
||||
end = time.time()
|
||||
#print(f"compute_coverage_rate Time: {end-start}")
|
||||
coverage_increase = new_coverage - current_coverage
|
||||
if coverage_increase > best_coverage_increase:
|
||||
best_coverage_increase = coverage_increase
|
||||
@@ -107,6 +101,101 @@ class ReconstructionUtil:
|
||||
best_combined_point_cloud = new_downsampled_combined_point_cloud
|
||||
|
||||
|
||||
if best_view is not None:
|
||||
if best_coverage_increase <=1e-3 or best_covered_num - current_covered_num <= 5:
|
||||
break
|
||||
|
||||
selected_views.append(best_view)
|
||||
best_rec_pts_num = best_combined_point_cloud.shape[0]
|
||||
print(f"Current rec pts num: {curr_rec_pts_num}, Best rec pts num: {best_rec_pts_num}, Best cover pts: {best_covered_num}, Max rec pts num: {max_rec_pts_num}")
|
||||
print(f"Current coverage: {current_coverage+best_coverage_increase}, Best coverage increase: {best_coverage_increase}, Max Real coverage: {max_real_rec_pts_coverage}")
|
||||
current_covered_num = best_covered_num
|
||||
curr_rec_pts_num = best_rec_pts_num
|
||||
combined_point_cloud = best_combined_point_cloud
|
||||
remaining_views.remove(best_view)
|
||||
history_indices.append(scan_points_indices_list[best_view])
|
||||
current_coverage += best_coverage_increase
|
||||
cnt_processed_view += 1
|
||||
if status_info is not None:
|
||||
sm = status_info["status_manager"]
|
||||
app_name = status_info["app_name"]
|
||||
runner_name = status_info["runner_name"]
|
||||
sm.set_status(app_name, runner_name, "current coverage", current_coverage)
|
||||
sm.set_progress(app_name, runner_name, "processed view", cnt_processed_view, len(point_cloud_list))
|
||||
|
||||
view_sequence.append((best_view, current_coverage))
|
||||
|
||||
else:
|
||||
break
|
||||
if status_info is not None:
|
||||
sm = status_info["status_manager"]
|
||||
app_name = status_info["app_name"]
|
||||
runner_name = status_info["runner_name"]
|
||||
sm.set_progress(app_name, runner_name, "processed view", len(point_cloud_list), len(point_cloud_list))
|
||||
return view_sequence, remaining_views, combined_point_cloud
|
||||
|
||||
@staticmethod
|
||||
def compute_next_best_view_sequence_with_normal(target_point_cloud, target_normal, point_cloud_list, normal_list, scan_points_indices_list, threshold=0.01, overlap_area_threshold=25, init_view = 0, scan_points_threshold=5, status_info=None):
|
||||
selected_views = [init_view]
|
||||
combined_point_cloud = point_cloud_list[init_view]
|
||||
combined_normal = normal_list[init_view]
|
||||
history_indices = [scan_points_indices_list[init_view]]
|
||||
|
||||
max_rec_pts = np.vstack(point_cloud_list)
|
||||
max_rec_nrm = np.vstack(normal_list)
|
||||
downsampled_max_rec_pts, idx = PtsUtil.voxel_downsample_point_cloud(max_rec_pts, threshold, require_idx=True)
|
||||
downsampled_max_rec_nrm = max_rec_nrm[idx]
|
||||
|
||||
max_rec_pts_num = downsampled_max_rec_pts.shape[0]
|
||||
try:
|
||||
max_real_rec_pts_coverage, _ = ReconstructionUtil.compute_coverage_rate_with_normal(target_point_cloud, downsampled_max_rec_pts, target_normal, downsampled_max_rec_nrm, threshold)
|
||||
except:
|
||||
import ipdb; ipdb.set_trace()
|
||||
|
||||
new_coverage, new_covered_num = ReconstructionUtil.compute_coverage_rate_with_normal(downsampled_max_rec_pts, combined_point_cloud, downsampled_max_rec_nrm, combined_normal, threshold)
|
||||
current_coverage = new_coverage
|
||||
current_covered_num = new_covered_num
|
||||
|
||||
remaining_views = list(range(len(point_cloud_list)))
|
||||
view_sequence = [(init_view, current_coverage)]
|
||||
cnt_processed_view = 0
|
||||
remaining_views.remove(init_view)
|
||||
curr_rec_pts_num = combined_point_cloud.shape[0]
|
||||
|
||||
while remaining_views:
|
||||
best_view = None
|
||||
best_coverage_increase = -1
|
||||
best_combined_point_cloud = None
|
||||
best_combined_normal = None
|
||||
best_covered_num = 0
|
||||
|
||||
for view_index in remaining_views:
|
||||
if point_cloud_list[view_index].shape[0] == 0:
|
||||
continue
|
||||
if selected_views:
|
||||
new_scan_points_indices = scan_points_indices_list[view_index]
|
||||
if not ReconstructionUtil.check_scan_points_overlap(history_indices, new_scan_points_indices, scan_points_threshold):
|
||||
curr_overlap_area_threshold = overlap_area_threshold
|
||||
else:
|
||||
curr_overlap_area_threshold = overlap_area_threshold * 0.5
|
||||
|
||||
if not ReconstructionUtil.check_overlap(point_cloud_list[view_index], combined_point_cloud, overlap_area_threshold = curr_overlap_area_threshold, voxel_size=threshold):
|
||||
continue
|
||||
|
||||
new_combined_point_cloud = np.vstack([combined_point_cloud, point_cloud_list[view_index]])
|
||||
new_combined_normal = np.vstack([combined_normal, normal_list[view_index]])
|
||||
new_downsampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(new_combined_point_cloud,threshold)
|
||||
new_downsampled_combined_normal = new_combined_normal[idx]
|
||||
new_coverage, new_covered_num = ReconstructionUtil.compute_coverage_rate_with_normal(downsampled_max_rec_pts, new_downsampled_combined_point_cloud, downsampled_max_rec_nrm, new_downsampled_combined_normal, threshold)
|
||||
coverage_increase = new_coverage - current_coverage
|
||||
if coverage_increase > best_coverage_increase:
|
||||
best_coverage_increase = coverage_increase
|
||||
best_view = view_index
|
||||
best_covered_num = new_covered_num
|
||||
best_combined_point_cloud = new_downsampled_combined_point_cloud
|
||||
best_combined_normal = new_downsampled_combined_normal
|
||||
|
||||
|
||||
if best_view is not None:
|
||||
if best_coverage_increase <=1e-3 or best_covered_num - current_covered_num <= 5:
|
||||
break
|
||||
@@ -118,6 +207,7 @@ class ReconstructionUtil:
|
||||
current_covered_num = best_covered_num
|
||||
curr_rec_pts_num = best_rec_pts_num
|
||||
combined_point_cloud = best_combined_point_cloud
|
||||
combined_normal = best_combined_normal
|
||||
remaining_views.remove(best_view)
|
||||
history_indices.append(scan_points_indices_list[best_view])
|
||||
current_coverage += best_coverage_increase
|
||||
|
42
utils/vis.py
42
utils/vis.py
@@ -47,6 +47,42 @@ class visualizeUtil:
|
||||
all_combined_pts = np.vstack(all_combined_pts)
|
||||
downsampled_all_pts = PtsUtil.voxel_downsample_point_cloud(all_combined_pts, 0.001)
|
||||
np.savetxt(os.path.join(output_dir, "all_combined_pts.txt"), downsampled_all_pts)
|
||||
|
||||
@staticmethod
|
||||
def save_seq_cam_pos_and_cam_axis(root, scene, frame_idx_list, output_dir):
|
||||
all_cam_pos = []
|
||||
all_cam_axis = []
|
||||
for i in frame_idx_list:
|
||||
path = DataLoadUtil.get_path(root, scene, i)
|
||||
cam_info = DataLoadUtil.load_cam_info(path, binocular=True)
|
||||
cam_pose = cam_info["cam_to_world"]
|
||||
cam_pos = cam_pose[:3, 3]
|
||||
cam_axis = cam_pose[:3, 2]
|
||||
|
||||
num_samples = 10
|
||||
sample_points = [cam_pos + 0.02*t * cam_axis for t in range(num_samples)]
|
||||
sample_points = np.array(sample_points)
|
||||
|
||||
all_cam_pos.append(cam_pos)
|
||||
all_cam_axis.append(sample_points)
|
||||
|
||||
all_cam_pos = np.array(all_cam_pos)
|
||||
all_cam_axis = np.array(all_cam_axis).reshape(-1, 3)
|
||||
np.savetxt(os.path.join(output_dir, "seq_cam_pos.txt"), all_cam_pos)
|
||||
np.savetxt(os.path.join(output_dir, "seq_cam_axis.txt"), all_cam_axis)
|
||||
|
||||
@staticmethod
|
||||
def save_seq_combined_pts(root, scene, frame_idx_list, output_dir):
|
||||
all_combined_pts = []
|
||||
for i in frame_idx_list:
|
||||
path = DataLoadUtil.get_path(root, scene, i)
|
||||
pts = DataLoadUtil.load_from_preprocessed_pts(path,"npy")
|
||||
if pts.shape[0] == 0:
|
||||
continue
|
||||
all_combined_pts.append(pts)
|
||||
all_combined_pts = np.vstack(all_combined_pts)
|
||||
downsampled_all_pts = PtsUtil.voxel_downsample_point_cloud(all_combined_pts, 0.001)
|
||||
np.savetxt(os.path.join(output_dir, "seq_combined_pts.txt"), downsampled_all_pts)
|
||||
|
||||
@staticmethod
|
||||
def save_target_mesh_at_world_space(
|
||||
@@ -126,12 +162,14 @@ class visualizeUtil:
|
||||
# ------ Debug ------
|
||||
|
||||
if __name__ == "__main__":
|
||||
root = r"/home/yan20/nbv_rec/project/franka_control/temp"
|
||||
root = r"C:\Document\Local Project\nbv_rec\nbv_reconstruction\temp"
|
||||
model_dir = r"H:\\AI\\Datasets\\scaled_object_box_meshes"
|
||||
scene = "box"
|
||||
output_dir = r"C:\Document\Local Project\nbv_rec\nbv_reconstruction\test"
|
||||
|
||||
#visualizeUtil.save_all_cam_pos_and_cam_axis(root, scene, output_dir)
|
||||
visualizeUtil.save_all_combined_pts(root, scene, output_dir)
|
||||
visualizeUtil.save_seq_combined_pts(root, scene, [0, 121, 286, 175, 111,366,45,230,232,225,255,17,199,78,60], output_dir)
|
||||
visualizeUtil.save_seq_cam_pos_and_cam_axis(root, scene, [0, 121, 286, 175, 111,366,45,230,232,225,255,17,199,78,60], output_dir)
|
||||
visualizeUtil.save_target_mesh_at_world_space(root, model_dir, scene)
|
||||
#visualizeUtil.save_points_and_normals(root, scene,"10", output_dir, binocular=True)
|
||||
#visualizeUtil.save_points_and_normals(root, scene,"10", output_dir, binocular=True)
|
||||
|
Reference in New Issue
Block a user