update
This commit is contained in:
parent
3fe74eb6eb
commit
8d43d4de60
@ -91,8 +91,6 @@ class CADStrategyRunner(Runner):
|
||||
cad_model_real_world.export(os.path.join(temp_dir, f"real_world_{temp_name}.obj"))
|
||||
cad_model_blender_world:trimesh.Trimesh = cad_model.apply_transform(real_world_to_blender_world)
|
||||
|
||||
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_dir = "/home/yan20/nbv_rec/project/franka_control/temp_output"
|
||||
cad_model_blender_world.export(os.path.join(temp_dir, f"{temp_name}.obj"))
|
||||
@ -173,7 +171,7 @@ class CADStrategyRunner(Runner):
|
||||
try:
|
||||
ControlUtil.move_to(cam_to_world)
|
||||
''' get world pts '''
|
||||
time.sleep(1)
|
||||
time.sleep(0.5)
|
||||
view_data = CommunicateUtil.get_view_data()
|
||||
if view_data is None:
|
||||
Log.error("Failed to get view data")
|
||||
@ -224,23 +222,3 @@ if __name__ == "__main__":
|
||||
model_path = r"C:\Users\hofee\Downloads\mesh.obj"
|
||||
model = trimesh.load(model_path)
|
||||
|
||||
''' test register '''
|
||||
# test_pts_L = np.load(r"C:\Users\hofee\Downloads\0.npy")
|
||||
|
||||
# import open3d as o3d
|
||||
# def add_noise(points, translation, rotation):
|
||||
# R = o3d.geometry.get_rotation_matrix_from_axis_angle(rotation)
|
||||
# noisy_points = points @ R.T + translation
|
||||
# return noisy_points
|
||||
|
||||
# translation_noise = np.random.uniform(-0.5, 0.5, size=3)
|
||||
# rotation_noise = np.random.uniform(-np.pi/4, np.pi/4, size=3)
|
||||
# noisy_pts_L = add_noise(test_pts_L, translation_noise, rotation_noise)
|
||||
|
||||
# cad_to_cam_L = PtsUtil.register(noisy_pts_L, model)
|
||||
|
||||
# cad_pts_L = PtsUtil.transform_point_cloud(noisy_pts_L, cad_to_cam_L)
|
||||
# np.savetxt(r"test.txt", cad_pts_L)
|
||||
# np.savetxt(r"src.txt", noisy_pts_L)
|
||||
|
||||
|
@ -61,8 +61,8 @@ class ControlUtil:
|
||||
@staticmethod
|
||||
def rotate_display_table(degree):
|
||||
turn_directions = {
|
||||
"left": 0,
|
||||
"right": 1
|
||||
"left": 1,
|
||||
"right": 0
|
||||
}
|
||||
ControlUtil.cnt_rotation += degree
|
||||
print(f"Table rotated {ControlUtil.cnt_rotation} degree")
|
||||
@ -114,7 +114,7 @@ class ControlUtil:
|
||||
min_new_cam_to_world = None
|
||||
for display_table_rot in np.linspace(0.1,360, 1800):
|
||||
display_table_rot_z_pose = ControlUtil.get_z_axis_rot_mat(display_table_rot)
|
||||
new_cam_to_world = display_table_rot_z_pose @ cam_to_world
|
||||
new_cam_to_world = np.linalg.inv(display_table_rot_z_pose) @ cam_to_world
|
||||
if ControlUtil.check_limit(new_cam_to_world):
|
||||
if display_table_rot < min_display_table_rot:
|
||||
min_display_table_rot, min_new_cam_to_world = display_table_rot, new_cam_to_world
|
||||
@ -147,11 +147,12 @@ class ControlUtil:
|
||||
@staticmethod
|
||||
def move_to(pose: np.ndarray):
|
||||
rot_degree, cam_to_world = ControlUtil.solve_display_table_rot_and_cam_to_world(pose)
|
||||
exec_time = rot_degree/9
|
||||
exec_time = abs(rot_degree)/9
|
||||
start_time = time.time()
|
||||
ControlUtil.rotate_display_table(rot_degree)
|
||||
ControlUtil.set_pose(cam_to_world)
|
||||
end_time = time.time()
|
||||
print(f"Move to pose with rotation {rot_degree} degree, exec time: {end_time - start_time}|exec time: {exec_time}")
|
||||
if end_time - start_time < exec_time:
|
||||
time.sleep(exec_time - (end_time - start_time))
|
||||
|
||||
@ -159,7 +160,7 @@ class ControlUtil:
|
||||
|
||||
if __name__ == "__main__":
|
||||
ControlUtil.connect_robot()
|
||||
#ControlUtil.franka_reset()
|
||||
# ControlUtil.franka_reset()
|
||||
def main_test():
|
||||
print(ControlUtil.get_curr_gripper_to_base_pose())
|
||||
ControlUtil.init()
|
||||
@ -167,7 +168,59 @@ if __name__ == "__main__":
|
||||
def rotate_back(rotation):
|
||||
ControlUtil.rotate_display_table(-rotation)
|
||||
|
||||
#rotate_back(45.3125623)
|
||||
#main_test()
|
||||
import sys; sys.path.append("/home/yan20/nbv_rec/project/franka_control")
|
||||
from utils.communicate_util import CommunicateUtil
|
||||
import ipdb
|
||||
ControlUtil.init()
|
||||
#print(ControlUtil.get_curr_gripper_to_base_pose())
|
||||
view_data_0 = CommunicateUtil.get_view_data(init=True)
|
||||
depth_extrinsics_0 = view_data_0["depth_extrinsics"]
|
||||
cam_to_world_0 = ControlUtil.get_pose()
|
||||
print("cam_extrinsics_0")
|
||||
print(depth_extrinsics_0)
|
||||
print("cam_to_world_0")
|
||||
print(cam_to_world_0)
|
||||
|
||||
ipdb.set_trace()
|
||||
TEST_POSE:np.ndarray = np.asarray([
|
||||
[ 0.46532393, 0.62171798, 0.63002284, 0.30230963],
|
||||
[ 0.43205618, -0.78075723, 0.45136491, -0.29127173],
|
||||
[ 0.77251656, 0.06217437, -0.63193429, 0.559957 ],
|
||||
[ 0. , 0. , 0. , 1. ],
|
||||
])
|
||||
TEST_POSE_CAM_TO_WORLD = ControlUtil.BASE_TO_WORLD @ TEST_POSE @ ControlUtil.CAMERA_TO_GRIPPER
|
||||
ControlUtil.move_to(TEST_POSE_CAM_TO_WORLD)
|
||||
view_data_1 = CommunicateUtil.get_view_data()
|
||||
depth_extrinsics_1 = view_data_1["depth_extrinsics"]
|
||||
depth_extrinsics_1[:3,3] = depth_extrinsics_1[:3,3] / 1000
|
||||
cam_to_world_1 = ControlUtil.get_pose()
|
||||
print("cam_extrinsics_1")
|
||||
print(depth_extrinsics_1)
|
||||
print("cam_to_world_1")
|
||||
print(TEST_POSE_CAM_TO_WORLD)
|
||||
actual_cam_to_world_1 = cam_to_world_0 @ depth_extrinsics_1
|
||||
print("actual_cam_to_world_1")
|
||||
print(actual_cam_to_world_1)
|
||||
ipdb.set_trace()
|
||||
TEST_POSE_2:np.ndarray = np.asarray(
|
||||
[[ 0.74398544, -0.61922696, 0.251049, 0.47000935],
|
||||
[-0.47287207, -0.75338888, -0.45692666, 0.20843903],
|
||||
[ 0.47207883 , 0.22123272, -0.85334192, 0.57863381],
|
||||
[ 0. , 0. , 0. , 1. , ]]
|
||||
)
|
||||
TEST_POSE_CAM_TO_WORLD_2 = ControlUtil.BASE_TO_WORLD @ TEST_POSE_2 @ ControlUtil.CAMERA_TO_GRIPPER
|
||||
|
||||
#ControlUtil.move_to(TEST_POSE_CAM_TO_WORLD_2)
|
||||
ControlUtil.set_pose(TEST_POSE_CAM_TO_WORLD_2)
|
||||
view_data_2 = CommunicateUtil.get_view_data()
|
||||
depth_extrinsics_2 = view_data_2["depth_extrinsics"]
|
||||
depth_extrinsics_2[:3,3] = depth_extrinsics_2[:3,3] / 1000
|
||||
cam_to_world_2 = ControlUtil.get_pose()
|
||||
print("cam_extrinsics_2")
|
||||
print(depth_extrinsics_2)
|
||||
print("cam_to_world_2")
|
||||
print(TEST_POSE_CAM_TO_WORLD_2)
|
||||
actual_cam_to_world_2 = cam_to_world_0 @ depth_extrinsics_2
|
||||
print("actual_cam_to_world_2")
|
||||
print(actual_cam_to_world_2)
|
||||
ipdb.set_trace()
|
@ -178,43 +178,143 @@ class PtsUtil:
|
||||
return np.sum(min_distances)
|
||||
|
||||
@staticmethod
|
||||
def register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.008, max_iter=100000):
|
||||
model_pts = model.vertices
|
||||
sampled_world_pts = PtsUtil.voxel_downsample_point_cloud(pcl, voxel_size)
|
||||
sampled_model_pts = PtsUtil.voxel_downsample_point_cloud(model_pts, voxel_size)
|
||||
best_pose = np.eye(4)
|
||||
best_pose[:3, 3] = np.mean(sampled_world_pts, axis=0) - np.mean(sampled_model_pts, axis=0)
|
||||
best_distance = float('inf')
|
||||
temperature = 1.0
|
||||
cnt_unchange = 0
|
||||
for _ in range(max_iter):
|
||||
print(best_distance)
|
||||
new_pose = best_pose.copy()
|
||||
rotation_noise = np.random.randn(3)
|
||||
rotation_noise /= np.linalg.norm(rotation_noise)
|
||||
rotation_noise *= temperature
|
||||
translation_noise = np.random.randn(3) * 0.1 * temperature
|
||||
rotation_matrix = PoseUtil.get_uniform_rotation(0, 360)
|
||||
new_pose[:3, :3] = rotation_matrix @ best_pose[:3, :3]
|
||||
new_pose[:3, 3] += translation_noise
|
||||
def multi_scale_icp(
|
||||
source, target, voxel_size_range, init_transformation=None, steps=20
|
||||
):
|
||||
pipreg = o3d.pipelines.registration
|
||||
|
||||
distance = PtsUtil.chamfer_distance(
|
||||
PtsUtil.transform_point_cloud(sampled_world_pts, new_pose),
|
||||
sampled_model_pts
|
||||
if init_transformation is not None:
|
||||
current_transformation = init_transformation
|
||||
else:
|
||||
current_transformation = np.identity(4)
|
||||
cnt = 0
|
||||
best_score = 1e10
|
||||
best_reg = None
|
||||
voxel_sizes = []
|
||||
for i in range(steps):
|
||||
voxel_sizes.append(
|
||||
voxel_size_range[0]
|
||||
+ i * (voxel_size_range[1] - voxel_size_range[0]) / steps
|
||||
)
|
||||
|
||||
if distance < best_distance:
|
||||
best_pose, best_distance = new_pose, distance
|
||||
cnt_unchange = 0
|
||||
for voxel_size in voxel_sizes:
|
||||
radius_normal = voxel_size * 2
|
||||
source_downsampled = source.voxel_down_sample(voxel_size)
|
||||
source_downsampled.estimate_normals(
|
||||
search_param=o3d.geometry.KDTreeSearchParamHybrid(
|
||||
radius=radius_normal, max_nn=30
|
||||
)
|
||||
)
|
||||
target_downsampled = target.voxel_down_sample(voxel_size)
|
||||
target_downsampled.estimate_normals(
|
||||
search_param=o3d.geometry.KDTreeSearchParamHybrid(
|
||||
radius=radius_normal, max_nn=30
|
||||
)
|
||||
)
|
||||
|
||||
reg_icp = pipreg.registration_icp(
|
||||
source_downsampled,
|
||||
target_downsampled,
|
||||
voxel_size * 2,
|
||||
current_transformation,
|
||||
pipreg.TransformationEstimationPointToPlane(),
|
||||
pipreg.ICPConvergenceCriteria(max_iteration=500),
|
||||
)
|
||||
cnt += 1
|
||||
if reg_icp.fitness == 0:
|
||||
score = 1e10
|
||||
else:
|
||||
cnt_unchange += 1
|
||||
if cnt_unchange > 11110:
|
||||
break
|
||||
score = reg_icp.inlier_rmse / reg_icp.fitness
|
||||
|
||||
temperature *= 0.999
|
||||
print(temperature)
|
||||
if score < best_score:
|
||||
best_score = score
|
||||
best_reg = reg_icp
|
||||
|
||||
return best_pose
|
||||
return best_reg, best_score
|
||||
|
||||
@staticmethod
|
||||
def multi_scale_ransac(source_downsampled, target_downsampled, source_fpfh, model_fpfh, voxel_size_range, steps=20):
|
||||
pipreg = o3d.pipelines.registration
|
||||
cnt = 0
|
||||
best_score = 1e10
|
||||
best_reg = None
|
||||
voxel_sizes = []
|
||||
for i in range(steps):
|
||||
voxel_sizes.append(
|
||||
voxel_size_range[0]
|
||||
+ i * (voxel_size_range[1] - voxel_size_range[0]) / steps
|
||||
)
|
||||
|
||||
for voxel_size in voxel_sizes:
|
||||
reg_ransac = pipreg.registration_ransac_based_on_feature_matching(
|
||||
source_downsampled,
|
||||
target_downsampled,
|
||||
source_fpfh,
|
||||
model_fpfh,
|
||||
mutual_filter=True,
|
||||
max_correspondence_distance=voxel_size*2,
|
||||
estimation_method=pipreg.TransformationEstimationPointToPoint(False),
|
||||
ransac_n=4,
|
||||
checkers=[pipreg.CorrespondenceCheckerBasedOnEdgeLength(0.9)],
|
||||
criteria=pipreg.RANSACConvergenceCriteria(8000000, 500),
|
||||
)
|
||||
cnt += 1
|
||||
if reg_ransac.fitness == 0:
|
||||
score = 1e10
|
||||
else:
|
||||
score = reg_ransac.inlier_rmse / reg_ransac.fitness
|
||||
if score < best_score:
|
||||
best_score = score
|
||||
best_reg = reg_ransac
|
||||
|
||||
return best_reg, best_score
|
||||
|
||||
@staticmethod
|
||||
def register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.01):
|
||||
radius_normal = voxel_size * 2
|
||||
pipreg = o3d.pipelines.registration
|
||||
model_pcd = o3d.geometry.PointCloud()
|
||||
model_pcd.points = o3d.utility.Vector3dVector(model.vertices)
|
||||
model_downsampled = model_pcd.voxel_down_sample(voxel_size)
|
||||
model_downsampled.estimate_normals(
|
||||
search_param=o3d.geometry.KDTreeSearchParamHybrid(
|
||||
radius=radius_normal, max_nn=30
|
||||
)
|
||||
)
|
||||
model_fpfh = pipreg.compute_fpfh_feature(
|
||||
model_downsampled,
|
||||
o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100),
|
||||
)
|
||||
|
||||
source_pcd = o3d.geometry.PointCloud()
|
||||
source_pcd.points = o3d.utility.Vector3dVector(pcl)
|
||||
source_downsampled = source_pcd.voxel_down_sample(voxel_size)
|
||||
source_downsampled.estimate_normals(
|
||||
search_param=o3d.geometry.KDTreeSearchParamHybrid(
|
||||
radius=radius_normal, max_nn=30
|
||||
)
|
||||
)
|
||||
source_fpfh = pipreg.compute_fpfh_feature(
|
||||
source_downsampled,
|
||||
o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100),
|
||||
)
|
||||
|
||||
reg_ransac, ransac_best_score = PtsUtil.multi_scale_ransac(
|
||||
source_downsampled,
|
||||
model_downsampled,
|
||||
source_fpfh,
|
||||
model_fpfh,
|
||||
voxel_size_range=(0.03, 0.005),
|
||||
steps=3,
|
||||
)
|
||||
reg_icp, icp_best_score = PtsUtil.multi_scale_icp(
|
||||
source_downsampled,
|
||||
model_downsampled,
|
||||
voxel_size_range=(0.02, 0.001),
|
||||
init_transformation=reg_ransac.transformation,
|
||||
steps=50,
|
||||
)
|
||||
return reg_icp.transformation
|
||||
|
||||
@staticmethod
|
||||
def get_pts_from_depth(depth, cam_intrinsic, cam_extrinsic):
|
||||
|
Loading…
x
Reference in New Issue
Block a user