This commit is contained in:
hofee 2024-10-12 16:39:00 +08:00
parent cd85fed3a0
commit 3fe74eb6eb
5 changed files with 226 additions and 32 deletions

33
move_cam_to.py Normal file
View File

@ -0,0 +1,33 @@
from utils.data_load import DataLoadUtil
from utils.control_util import ControlUtil
from utils.view_util import ViewUtil
from utils.pts_util import PtsUtil
from utils.communicate_util import CommunicateUtil
import numpy as np
import os
if __name__ == "__main__":
idx = "2"
ControlUtil.connect_robot()
ControlUtil.franka_reset()
root_path = "/home/yan20/nbv_rec/project/franka_control/temp_output/cad_model_world"
frame_path = os.path.join(root_path, idx)
cam_info = DataLoadUtil.load_cam_info(frame_path, binocular=True)
render_camL_to_world = cam_info["cam_to_world"]
render_camO_to_world = cam_info["cam_to_world_O"]
L_to_O = np.dot(np.linalg.inv(render_camO_to_world), render_camL_to_world)
ControlUtil.set_pose(render_camO_to_world)
real_camO_to_world = ControlUtil.get_pose()
real_camL_to_world = np.dot(real_camO_to_world,L_to_O)
view_data = CommunicateUtil.get_view_data(init=True)
cam_pts = ViewUtil.get_pts(view_data)
np.savetxt(f"cam_pts_{idx}.txt", cam_pts)
world_pts = PtsUtil.transform_point_cloud(cam_pts, render_camL_to_world)
print(real_camL_to_world)
np.savetxt(f"world_pts_{idx}.txt", world_pts)
# import ipdb;ipdb.set_trace()
# view_data = CommunicateUtil.get_view_data()
# cam_pts = ViewUtil.get_pts(view_data)
# np.savetxt(f"cam_pts_{idx}.txt", cam_pts)

109
register.py Normal file
View File

@ -0,0 +1,109 @@
import os
import numpy as np
import trimesh
from utils.pts_util import PtsUtil
import numpy as np
import open3d as o3d
import torch
import trimesh
from scipy.spatial import cKDTree
def register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.002):
radius_normal = voxel_size * 2
pipreg = o3d.pipelines.registration
model_pcd = o3d.geometry.PointCloud()
model_pcd.points = o3d.utility.Vector3dVector(model.vertices)
model_downsampled = model_pcd.voxel_down_sample(voxel_size)
model_downsampled.estimate_normals(
search_param=o3d.geometry.KDTreeSearchParamHybrid(
radius=radius_normal, max_nn=30
)
)
# model_fpfh = pipreg.compute_fpfh_feature(
# model_downsampled,
# o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100),
# )
source_pcd = o3d.geometry.PointCloud()
source_pcd.points = o3d.utility.Vector3dVector(pcl)
source_downsampled = source_pcd.voxel_down_sample(voxel_size)
source_downsampled.estimate_normals(
search_param=o3d.geometry.KDTreeSearchParamHybrid(
radius=radius_normal, max_nn=30
)
)
# source_fpfh = pipreg.compute_fpfh_feature(
# source_downsampled,
# o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100),
# )
# reg_ransac = pipreg.registration_ransac_based_on_feature_matching(
# source_downsampled,
# model_downsampled,
# source_fpfh,
# model_fpfh,
# mutual_filter=True,
# max_correspondence_distance=voxel_size * 2,
# estimation_method=pipreg.TransformationEstimationPointToPoint(False),
# ransac_n=4,
# checkers=[pipreg.CorrespondenceCheckerBasedOnEdgeLength(0.9)],
# criteria=pipreg.RANSACConvergenceCriteria(4000000, 500),
# )
reg_icp2 = pipreg.registration_icp(
source_downsampled,
model_downsampled,
voxel_size*10,
np.eye(4),
pipreg.TransformationEstimationPointToPlane(),
pipreg.ICPConvergenceCriteria(max_iteration=2000),
)
reg_icp = pipreg.registration_icp(
source_downsampled,
model_downsampled,
voxel_size,
reg_icp2.transformation,
pipreg.TransformationEstimationPointToPlane(),
pipreg.ICPConvergenceCriteria(max_iteration=2000),
)
return reg_icp2.transformation, reg_icp.transformation
if __name__ == "__main__":
model_dir = "/home/yan20/Desktop/nbv_rec/data/models/bear"
model_path = os.path.join(model_dir,"mesh.ply")
temp_name = "cad_model_world"
cad_model = trimesh.load(model_path)
pts_path = "/home/yan20/nbv_rec/project/franka_control/first_real_pts_bear.txt"
pts = np.loadtxt(pts_path)
very_coarse_real_to_cad = np.eye(4)
cad_model_pts = cad_model.vertices
very_coarse_real_to_cad[:3,3] = np.mean(cad_model_pts, axis=0) - np.mean(pts, axis=0)
very_coarse_cad_pts = PtsUtil.transform_point_cloud(pts, very_coarse_real_to_cad)
target_point = np.array([-3.422540776542676300e-02, -2.412379452948226755e-02, 1.123609126159126337e-01])
# 设置一个容忍度
tolerance = 1e-5
# 计算每个点与目标点之间的距离
distances = np.linalg.norm(very_coarse_cad_pts - target_point, axis=1)
# 统计距离小于容忍度的点的数量
count = np.sum(distances < tolerance)
print(count)
print(very_coarse_cad_pts.shape)
print(np.mean(pts, axis=0), np.mean(cad_model_pts, axis=0), np.mean(very_coarse_cad_pts, axis=0))
pts = pts[distances > tolerance]
np.savetxt(os.path.join(temp_name + "_filtered.txt"), pts)
# very_coarse_real_to_cad[:3,3] = np.mean(cad_model_pts, axis=0) - np.mean(pts, axis=0)
# very_coarse_cad_pts = PtsUtil.transform_point_cloud(pts, very_coarse_real_to_cad)
# np.savetxt(os.path.join(temp_name + "_very_coarse_reg.txt"), very_coarse_cad_pts)
# real_to_cad = PtsUtil.register(very_coarse_cad_pts, cad_model)
# cad_pts = PtsUtil.transform_point_cloud(very_coarse_cad_pts, real_to_cad)
# np.savetxt(os.path.join(temp_name + "_reg.txt"), cad_pts)

View File

@ -53,12 +53,13 @@ class CADStrategyRunner(Runner):
def load_experiment(self, backup_name=None):
super().load_experiment(backup_name)
def split_scan_pts_and_obj_pts(self, world_pts, scan_pts_z, z_threshold = 0.003):
scan_pts = world_pts[scan_pts_z < z_threshold]
obj_pts = world_pts[scan_pts_z >= z_threshold]
def split_scan_pts_and_obj_pts(self, world_pts, z_threshold = 0):
scan_pts = world_pts[world_pts[:,2] < z_threshold]
obj_pts = world_pts[world_pts[:,2] >= z_threshold]
return scan_pts, obj_pts
def run_one_model(self, model_name):
temp_dir = "/home/yan20/nbv_rec/project/franka_control/temp_output"
result = dict()
shot_pts_list = []
@ -66,6 +67,7 @@ class CADStrategyRunner(Runner):
''' init robot '''
Log.info("[Part 1/5] start init and register")
ControlUtil.init()
''' load CAD model '''
model_path = os.path.join(self.model_dir, model_name,"mesh.ply")
temp_name = "cad_model_world"
@ -75,21 +77,25 @@ class CADStrategyRunner(Runner):
view_data = CommunicateUtil.get_view_data(init=True)
first_cam_pts = ViewUtil.get_pts(view_data)
first_cam_to_real_world = ControlUtil.get_pose()
first_real_world_pts = PtsUtil.transform_point_cloud(first_cam_pts, first_cam_to_real_world)
_, first_splitted_real_world_pts = self.split_scan_pts_and_obj_pts(first_real_world_pts)
np.savetxt(f"first_real_pts_{model_name}.txt", first_splitted_real_world_pts)
''' register '''
Log.info("[Part 1/5] do registeration")
cam_to_cad = PtsUtil.register(first_cam_pts, cad_model)
cad_to_real_world = first_cam_to_real_world @ np.linalg.inv(cam_to_cad)
real_world_to_cad = PtsUtil.register(first_splitted_real_world_pts, cad_model)
cad_to_real_world = np.linalg.inv(real_world_to_cad)
Log.success("[Part 1/5] finish init and register")
real_world_to_blender_world = np.eye(4)
real_world_to_blender_world[:3, 3] = np.asarray([0, 0, 0.9215])
cad_to_blender_world = real_world_to_blender_world @ cad_to_real_world
cad_model_blender_world:trimesh.Trimesh = cad_model.apply_transform(cad_to_blender_world)
cad_model_real_world:trimesh.Trimesh = cad_model.apply_transform(cad_to_real_world)
cad_model_real_world.export(os.path.join(temp_dir, f"real_world_{temp_name}.obj"))
cad_model_blender_world:trimesh.Trimesh = cad_model.apply_transform(real_world_to_blender_world)
with tempfile.TemporaryDirectory() as temp_dir:
temp_dir = "/home/yan20/nbv_rec/project/franka_control/temp_output"
cad_model_blender_world.export(os.path.join(temp_dir, f"{temp_name}.obj"))
scene_dir = os.path.join(temp_dir, temp_name)
''' sample view '''
Log.info("[Part 2/5] start running renderer")
@ -102,7 +108,7 @@ class CADStrategyRunner(Runner):
world_model_points = np.loadtxt(os.path.join(scene_dir, "points_and_normals.txt"))[:,:3]
''' preprocess '''
Log.info("[Part 3/5] start preprocessing data")
save_scene_data_multithread(temp_dir, temp_name)
save_scene_data(temp_dir, temp_name)
Log.success("[Part 3/5] finish preprocessing data")
pts_dir = os.path.join(temp_dir,temp_name,"pts")
@ -154,16 +160,13 @@ class CADStrategyRunner(Runner):
Log.info("[Part 5/5] start running robot")
''' take best seq view '''
cad_model_real_world = cad_model_blender_world.apply_transform(np.linalg.inv(real_world_to_blender_world))
cad_model_real_world_pts = cad_model_real_world.vertices
cad_model_real_world.export(os.path.join(temp_dir, f"{temp_name}_real_world.obj"))
voxel_downsampled_cad_model_real_world_pts = PtsUtil.voxel_downsample_point_cloud(cad_model_real_world_pts, self.voxel_size)
#import ipdb; ipdb.set_trace()
target_scanned_pts = np.concatenate(sample_view_pts_list)
voxel_downsampled_target_scanned_pts = PtsUtil.voxel_downsample_point_cloud(target_scanned_pts, self.voxel_size)
result = dict()
gt_scanned_pts = np.concatenate(render_pts, axis=0)
voxel_down_sampled_gt_scanned_pts = PtsUtil.voxel_downsample_point_cloud(gt_scanned_pts, self.voxel_size)
result["gt_final_coverage_rate_cad"] = ReconstructionUtil.compute_coverage_rate(voxel_downsampled_cad_model_real_world_pts, voxel_down_sampled_gt_scanned_pts, self.voxel_size)
result["gt_final_coverage_rate_cad"] = ReconstructionUtil.compute_coverage_rate(voxel_downsampled_target_scanned_pts, voxel_down_sampled_gt_scanned_pts, self.voxel_size)
step = 1
result["real_coverage_rate_seq"] = []
for cam_to_world in cam_to_world_seq:
@ -180,7 +183,7 @@ class CADStrategyRunner(Runner):
scanned_pts = np.concatenate(shot_pts_list, axis=0)
voxel_down_sampled_scanned_pts = PtsUtil.voxel_downsample_point_cloud(scanned_pts, self.voxel_size)
voxel_down_sampled_scanned_pts_world = PtsUtil.transform_point_cloud(voxel_down_sampled_scanned_pts, first_cam_to_real_world)
curr_CR = ReconstructionUtil.compute_coverage_rate(voxel_downsampled_cad_model_real_world_pts, voxel_down_sampled_scanned_pts_world, self.voxel_size)
curr_CR = ReconstructionUtil.compute_coverage_rate(voxel_downsampled_target_scanned_pts, voxel_down_sampled_scanned_pts_world, self.voxel_size)
Log.success(f"(step {step}/{len(cam_to_world_seq)}) current coverage: {curr_CR} | gt coverage: {result['gt_final_coverage_rate_cad']}")
result["real_final_coverage_rate"] = curr_CR
result["real_coverage_rate_seq"].append(curr_CR)

View File

@ -11,23 +11,23 @@ class ControlUtil:
cnt_rotation = 0
BASE_TO_WORLD:np.ndarray = np.asarray([
[1, 0, 0, -0.7323],
[0, 1, 0, 0.05926],
[0, 0, 1, -0.21],
[1, 0, 0, -0.61091665],
[0, 1, 0, -0.00309726],
[0, 0, 1, -0.1136743],
[0, 0, 0, 1]
])
CAMERA_TO_GRIPPER:np.ndarray = np.asarray([
[0, -1, 0, 0.01],
[1, 0, 0, 0],
[0, 0, 1, 0.075],
[0, 0, 1, 0.08],
[0, 0, 0, 1]
])
INIT_GRIPPER_POSE:np.ndarray = np.asarray([
[ 0.44808722 , 0.61103352 , 0.65256787 , 0.36428118],
[ 0.51676868 , -0.77267257 , 0.36866054, -0.26519364],
[ 0.72948524 , 0.17203456 ,-0.66200043 , 0.60938969],
[ 0. , 0. , 0. , 1. ]
[ 0.46532393, 0.62171798, 0.63002284, 0.21230963],
[ 0.43205618, -0.78075723, 0.45136491, -0.25127173],
[ 0.77251656, 0.06217437, -0.63193429, 0.499957 ],
[ 0. , 0. , 0. , 1. ],
])
@ -159,7 +159,7 @@ class ControlUtil:
if __name__ == "__main__":
ControlUtil.connect_robot()
#ControlUtil.franka_reset()
def main_test():
print(ControlUtil.get_curr_gripper_to_base_pose())
ControlUtil.init()
@ -167,4 +167,7 @@ if __name__ == "__main__":
def rotate_back(rotation):
ControlUtil.rotate_display_table(-rotation)
rotate_back(122.0661478599)
#rotate_back(45.3125623)
ControlUtil.init()
#print(ControlUtil.get_curr_gripper_to_base_pose())

View File

@ -3,6 +3,7 @@ import open3d as o3d
import torch
import trimesh
from scipy.spatial import cKDTree
from utils.pose_util import PoseUtil
class PtsUtil:
@ -117,8 +118,8 @@ class PtsUtil:
return filtered_sampled_points[:, :3]
@staticmethod
def register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.01):
radius_normal = voxel_size * 2
def old_register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.002):
radius_normal = voxel_size * 3
pipreg = o3d.pipelines.registration
model_pcd = o3d.geometry.PointCloud()
model_pcd.points = o3d.utility.Vector3dVector(model.vertices)
@ -152,7 +153,7 @@ class PtsUtil:
source_fpfh,
model_fpfh,
mutual_filter=True,
max_correspondence_distance=voxel_size * 1.5,
max_correspondence_distance=voxel_size * 2,
estimation_method=pipreg.TransformationEstimationPointToPoint(False),
ransac_n=4,
checkers=[pipreg.CorrespondenceCheckerBasedOnEdgeLength(0.9)],
@ -162,14 +163,59 @@ class PtsUtil:
reg_icp = pipreg.registration_icp(
source_downsampled,
model_downsampled,
voxel_size * 2,
voxel_size/2,
reg_ransac.transformation,
pipreg.TransformationEstimationPointToPlane(),
pipreg.ICPConvergenceCriteria(max_iteration=200),
pipreg.ICPConvergenceCriteria(max_iteration=2000),
)
return reg_icp.transformation
@staticmethod
def chamfer_distance(pcl_a, pcl_b):
distances = np.linalg.norm(pcl_a[:, None] - pcl_b, axis=2)
min_distances = np.min(distances, axis=1)
return np.sum(min_distances)
@staticmethod
def register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.008, max_iter=100000):
model_pts = model.vertices
sampled_world_pts = PtsUtil.voxel_downsample_point_cloud(pcl, voxel_size)
sampled_model_pts = PtsUtil.voxel_downsample_point_cloud(model_pts, voxel_size)
best_pose = np.eye(4)
best_pose[:3, 3] = np.mean(sampled_world_pts, axis=0) - np.mean(sampled_model_pts, axis=0)
best_distance = float('inf')
temperature = 1.0
cnt_unchange = 0
for _ in range(max_iter):
print(best_distance)
new_pose = best_pose.copy()
rotation_noise = np.random.randn(3)
rotation_noise /= np.linalg.norm(rotation_noise)
rotation_noise *= temperature
translation_noise = np.random.randn(3) * 0.1 * temperature
rotation_matrix = PoseUtil.get_uniform_rotation(0, 360)
new_pose[:3, :3] = rotation_matrix @ best_pose[:3, :3]
new_pose[:3, 3] += translation_noise
distance = PtsUtil.chamfer_distance(
PtsUtil.transform_point_cloud(sampled_world_pts, new_pose),
sampled_model_pts
)
if distance < best_distance:
best_pose, best_distance = new_pose, distance
cnt_unchange = 0
else:
cnt_unchange += 1
if cnt_unchange > 11110:
break
temperature *= 0.999
print(temperature)
return best_pose
@staticmethod
def get_pts_from_depth(depth, cam_intrinsic, cam_extrinsic):
h, w = depth.shape