update normal in computing strategy
This commit is contained in:
parent
9d0119549e
commit
a1226eb294
@ -13,7 +13,7 @@ runner:
|
|||||||
generate:
|
generate:
|
||||||
voxel_threshold: 0.003
|
voxel_threshold: 0.003
|
||||||
overlap_area_threshold: 25
|
overlap_area_threshold: 25
|
||||||
compute_with_normal: True
|
compute_with_normal: False
|
||||||
scan_points_threshold: 10
|
scan_points_threshold: 10
|
||||||
overwrite: False
|
overwrite: False
|
||||||
seq_num: 15
|
seq_num: 15
|
||||||
|
@ -164,9 +164,9 @@ def save_scene_data(root, scene, scene_idx=0, scene_total=1,file_type="txt"):
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
#root = "/media/hofee/repository/new_data_with_normal"
|
#root = "/media/hofee/repository/new_data_with_normal"
|
||||||
root = r"C:\Document\Local Project\nbv_rec\nbv_reconstruction\temp"
|
root = r"C:\Document\Datasets\nbv_rec_part2"
|
||||||
scene_list = os.listdir(root)
|
scene_list = os.listdir(root)
|
||||||
from_idx = 0 # 1000
|
from_idx = 600 # 1000
|
||||||
to_idx = len(scene_list) # 1500
|
to_idx = len(scene_list) # 1500
|
||||||
|
|
||||||
|
|
||||||
|
11
utils/pts.py
11
utils/pts.py
@ -7,8 +7,15 @@ class PtsUtil:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def voxel_downsample_point_cloud(point_cloud, voxel_size=0.005, require_idx=False):
|
def voxel_downsample_point_cloud(point_cloud, voxel_size=0.005, require_idx=False):
|
||||||
voxel_indices = np.floor(point_cloud / voxel_size).astype(np.int32)
|
voxel_indices = np.floor(point_cloud / voxel_size).astype(np.int32)
|
||||||
unique_voxels = np.unique(voxel_indices, axis=0, return_inverse=True)
|
if require_idx:
|
||||||
return unique_voxels[0]*voxel_size
|
_, inverse, counts = np.unique(voxel_indices, axis=0, return_inverse=True, return_counts=True)
|
||||||
|
idx_sort = np.argsort(inverse)
|
||||||
|
idx_unique = idx_sort[np.cumsum(counts)-counts]
|
||||||
|
downsampled_points = point_cloud[idx_unique]
|
||||||
|
return downsampled_points, idx_unique
|
||||||
|
else:
|
||||||
|
unique_voxels = np.unique(voxel_indices, axis=0, return_inverse=True)
|
||||||
|
return unique_voxels[0]*voxel_size
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def random_downsample_point_cloud(point_cloud, num_points, require_idx=False):
|
def random_downsample_point_cloud(point_cloud, num_points, require_idx=False):
|
||||||
|
@ -19,6 +19,12 @@ class ReconstructionUtil:
|
|||||||
is_covered_by_distance = distances < threshold*2
|
is_covered_by_distance = distances < threshold*2
|
||||||
normal_dots = np.einsum('ij,ij->i', target_normal, combined_normal[indices])
|
normal_dots = np.einsum('ij,ij->i', target_normal, combined_normal[indices])
|
||||||
is_covered_by_normal = normal_dots > normal_threshold
|
is_covered_by_normal = normal_dots > normal_threshold
|
||||||
|
|
||||||
|
pts_nrm_target = np.hstack([target_point_cloud, target_normal])
|
||||||
|
np.savetxt("pts_nrm_target.txt", pts_nrm_target)
|
||||||
|
pts_nrm_combined = np.hstack([combined_point_cloud, combined_normal])
|
||||||
|
np.savetxt("pts_nrm_combined.txt", pts_nrm_combined)
|
||||||
|
import ipdb; ipdb.set_trace()
|
||||||
covered_points_num = np.sum(is_covered_by_distance & is_covered_by_normal)
|
covered_points_num = np.sum(is_covered_by_distance & is_covered_by_normal)
|
||||||
coverage_rate = covered_points_num / target_point_cloud.shape[0]
|
coverage_rate = covered_points_num / target_point_cloud.shape[0]
|
||||||
|
|
||||||
@ -145,7 +151,6 @@ class ReconstructionUtil:
|
|||||||
max_rec_nrm = np.vstack(normal_list)
|
max_rec_nrm = np.vstack(normal_list)
|
||||||
downsampled_max_rec_pts, idx = PtsUtil.voxel_downsample_point_cloud(max_rec_pts, threshold, require_idx=True)
|
downsampled_max_rec_pts, idx = PtsUtil.voxel_downsample_point_cloud(max_rec_pts, threshold, require_idx=True)
|
||||||
downsampled_max_rec_nrm = max_rec_nrm[idx]
|
downsampled_max_rec_nrm = max_rec_nrm[idx]
|
||||||
|
|
||||||
max_rec_pts_num = downsampled_max_rec_pts.shape[0]
|
max_rec_pts_num = downsampled_max_rec_pts.shape[0]
|
||||||
try:
|
try:
|
||||||
max_real_rec_pts_coverage, _ = ReconstructionUtil.compute_coverage_rate_with_normal(target_point_cloud, downsampled_max_rec_pts, target_normal, downsampled_max_rec_nrm, threshold)
|
max_real_rec_pts_coverage, _ = ReconstructionUtil.compute_coverage_rate_with_normal(target_point_cloud, downsampled_max_rec_pts, target_normal, downsampled_max_rec_nrm, threshold)
|
||||||
@ -184,7 +189,7 @@ class ReconstructionUtil:
|
|||||||
|
|
||||||
new_combined_point_cloud = np.vstack([combined_point_cloud, point_cloud_list[view_index]])
|
new_combined_point_cloud = np.vstack([combined_point_cloud, point_cloud_list[view_index]])
|
||||||
new_combined_normal = np.vstack([combined_normal, normal_list[view_index]])
|
new_combined_normal = np.vstack([combined_normal, normal_list[view_index]])
|
||||||
new_downsampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(new_combined_point_cloud,threshold)
|
new_downsampled_combined_point_cloud, idx = PtsUtil.voxel_downsample_point_cloud(new_combined_point_cloud,threshold, require_idx=True)
|
||||||
new_downsampled_combined_normal = new_combined_normal[idx]
|
new_downsampled_combined_normal = new_combined_normal[idx]
|
||||||
new_coverage, new_covered_num = ReconstructionUtil.compute_coverage_rate_with_normal(downsampled_max_rec_pts, new_downsampled_combined_point_cloud, downsampled_max_rec_nrm, new_downsampled_combined_normal, threshold)
|
new_coverage, new_covered_num = ReconstructionUtil.compute_coverage_rate_with_normal(downsampled_max_rec_pts, new_downsampled_combined_point_cloud, downsampled_max_rec_nrm, new_downsampled_combined_normal, threshold)
|
||||||
coverage_increase = new_coverage - current_coverage
|
coverage_increase = new_coverage - current_coverage
|
||||||
|
22
utils/vis.py
22
utils/vis.py
@ -157,6 +157,17 @@ class visualizeUtil:
|
|||||||
np.savetxt(os.path.join(output_dir, "target_pts.txt"), sampled_target_points)
|
np.savetxt(os.path.join(output_dir, "target_pts.txt"), sampled_target_points)
|
||||||
np.savetxt(os.path.join(output_dir, "target_normal.txt"), sampled_visualized_normal)
|
np.savetxt(os.path.join(output_dir, "target_normal.txt"), sampled_visualized_normal)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def save_pts_nrm(pts_nrm, output_dir):
|
||||||
|
pts = pts_nrm[:, :3]
|
||||||
|
nrm = pts_nrm[:, 3:]
|
||||||
|
visualized_nrm = []
|
||||||
|
num_samples = 10
|
||||||
|
for i in range(len(pts)):
|
||||||
|
visualized_nrm.append(pts[i] + 0.02*t * nrm[i] for t in range(num_samples))
|
||||||
|
visualized_nrm = np.array(visualized_nrm).reshape(-1, 3)
|
||||||
|
np.savetxt(os.path.join(output_dir, "nrm.txt"), visualized_nrm)
|
||||||
|
np.savetxt(os.path.join(output_dir, "pts.txt"), pts)
|
||||||
|
|
||||||
|
|
||||||
# ------ Debug ------
|
# ------ Debug ------
|
||||||
@ -168,8 +179,11 @@ if __name__ == "__main__":
|
|||||||
output_dir = r"C:\Document\Local Project\nbv_rec\nbv_reconstruction\test"
|
output_dir = r"C:\Document\Local Project\nbv_rec\nbv_reconstruction\test"
|
||||||
|
|
||||||
#visualizeUtil.save_all_cam_pos_and_cam_axis(root, scene, output_dir)
|
#visualizeUtil.save_all_cam_pos_and_cam_axis(root, scene, output_dir)
|
||||||
visualizeUtil.save_all_combined_pts(root, scene, output_dir)
|
# visualizeUtil.save_all_combined_pts(root, scene, output_dir)
|
||||||
visualizeUtil.save_seq_combined_pts(root, scene, [0, 121, 286, 175, 111,366,45,230,232,225,255,17,199,78,60], output_dir)
|
# visualizeUtil.save_seq_combined_pts(root, scene, [0, 121, 286, 175, 111,366,45,230,232,225,255,17,199,78,60], output_dir)
|
||||||
visualizeUtil.save_seq_cam_pos_and_cam_axis(root, scene, [0, 121, 286, 175, 111,366,45,230,232,225,255,17,199,78,60], output_dir)
|
# visualizeUtil.save_seq_cam_pos_and_cam_axis(root, scene, [0, 121, 286, 175, 111,366,45,230,232,225,255,17,199,78,60], output_dir)
|
||||||
visualizeUtil.save_target_mesh_at_world_space(root, model_dir, scene)
|
# visualizeUtil.save_target_mesh_at_world_space(root, model_dir, scene)
|
||||||
#visualizeUtil.save_points_and_normals(root, scene,"10", output_dir, binocular=True)
|
#visualizeUtil.save_points_and_normals(root, scene,"10", output_dir, binocular=True)
|
||||||
|
pts_nrm = np.loadtxt(r"C:\Document\Local Project\nbv_rec\nbv_reconstruction\pts_nrm_target.txt")
|
||||||
|
visualizeUtil.save_pts_nrm(pts_nrm, output_dir)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user