Generate random object arrangements

This commit is contained in:
Michel Breyer 2021-05-26 14:46:12 +02:00
parent 914b808179
commit 062c541b56
6 changed files with 161 additions and 104 deletions

View File

@ -1,8 +1,7 @@
#!/usr/bin/env python3
import argparse
import actionlib
import argparse
import cv_bridge
import numpy as np
import rospy
@ -12,43 +11,81 @@ from geometry_msgs.msg import Pose
from sensor_msgs.msg import JointState, Image, CameraInfo
import std_srvs.srv
from robot_utils.controllers import CartesianPoseController
from robot_utils.ros.conversions import *
from robot_utils.ros.tf import TransformTree
from simulation import BtPandaSim
CONTROLLER_UPDATE_RATE = 60
JOINT_PUBLISH_RATE = 60
CAM_PUBLISH_RATE = 5
from robot_tools.controllers import CartesianPoseController
from robot_tools.ros.conversions import *
from robot_tools.ros.tf import TransformTree
from simulation import Simulation
class BtSimNode:
def __init__(self, gui):
self.sim = BtPandaSim(gui=gui, sleep=False)
self.sim = Simulation(gui=gui, sleep=False)
self.controller = CartesianPoseController(self.sim.arm)
self.controller_update_rate = 60
self.joint_state_publish_rate = 60
self.camera_publish_rate = 5
self.cv_bridge = cv_bridge.CvBridge()
self.tf_tree = TransformTree()
self.reset_server = rospy.Service("reset", std_srvs.srv.Trigger, self.reset)
self.setup_robot_topics()
self.setup_camera_topics()
self.setup_gripper_actions()
self.broadcast_transforms()
rospy.Service("reset", std_srvs.srv.Trigger, self.reset)
self.step_cnt = 0
self.reset_requested = False
def setup_robot_topics(self):
self.joint_pub = rospy.Publisher("joint_states", JointState, queue_size=10)
rospy.Subscriber("cmd", Pose, self.target_pose_cb)
def target_pose_cb(self, msg):
self.controller.set_target(from_pose_msg(msg))
def setup_camera_topics(self):
self.cam_info_msg = to_camera_info_msg(self.sim.camera.intrinsic)
self.cam_info_msg.header.frame_id = "cam_optical_frame"
self.cam_info_pub = rospy.Publisher(
"/cam/depth/camera_info",
CameraInfo,
queue_size=10,
)
self.depth_pub = rospy.Publisher("/cam/depth/image_raw", Image, queue_size=10)
def setup_gripper_actions(self):
self.move_server = actionlib.SimpleActionServer(
"move",
"/franka_gripper/move",
franka_gripper.msg.MoveAction,
self.move,
False,
)
self.joint_pub = rospy.Publisher("joint_states", JointState, queue_size=10)
self.cam_info_pub = rospy.Publisher(
"/cam/depth/camera_info", CameraInfo, queue_size=10
)
self.cam_info_msg = to_camera_info_msg(self.sim.camera.info)
self.cam_info_msg.header.frame_id = "cam_optical_frame"
self.depth_pub = rospy.Publisher("/cam/depth/image_raw", Image, queue_size=10)
rospy.Subscriber("target", Pose, self.target_pose_cb)
self.step_cnt = 0
self.reset_requested = False
self.move_server.start()
T_base_task = Transform(Rotation.identity(), np.r_[0.2, -0.15, 0.1])
self.tf_tree.broadcast_static(T_base_task, "panda_link0", "task")
def move(self, goal):
self.sim.gripper.move(goal.width)
self.move_server.set_succeeded()
def broadcast_transforms(self):
msgs = []
msg = geometry_msgs.msg.TransformStamped()
msg.header.stamp = rospy.Time.now()
msg.header.frame_id = "world"
msg.child_frame_id = "panda_link0"
msg.transform = to_transform_msg(self.sim.T_W_B)
msgs.append(msg)
msg = geometry_msgs.msg.TransformStamped()
msg.header.stamp = rospy.Time.now()
msg.header.frame_id = "world"
msg.child_frame_id = "task"
msg.transform = to_transform_msg(Transform.translation(self.sim.origin))
msgs.append(msg)
self.tf_tree.static_broadcaster.sendTransform(msgs)
def reset(self, req):
self.reset_requested = True
@ -69,11 +106,11 @@ class BtSimNode:
rate.sleep()
def handle_updates(self):
if self.step_cnt % int(self.sim.rate / CONTROLLER_UPDATE_RATE) == 0:
if self.step_cnt % int(self.sim.rate / self.controller_update_rate) == 0:
self.controller.update()
if self.step_cnt % int(self.sim.rate / JOINT_PUBLISH_RATE) == 0:
if self.step_cnt % int(self.sim.rate / self.joint_state_publish_rate) == 0:
self.publish_joint_state()
if self.step_cnt % int(self.sim.rate / CAM_PUBLISH_RATE) == 0:
if self.step_cnt % int(self.sim.rate / self.camera_publish_rate) == 0:
self.publish_cam_info()
self.publish_cam_imgs()
@ -95,18 +132,11 @@ class BtSimNode:
self.cam_info_pub.publish(self.cam_info_msg)
def publish_cam_imgs(self):
_, depth = self.sim.camera.update()
_, depth = self.sim.camera.get_image()
depth_msg = self.cv_bridge.cv2_to_imgmsg(depth)
depth_msg.header.stamp = rospy.Time.now()
self.depth_pub.publish(depth_msg)
def move(self, goal):
self.sim.gripper.move(goal.width)
self.move_server.set_succeeded()
def target_pose_cb(self, msg):
self.controller.set_target(from_pose_msg(msg))
def main(args):
rospy.init_node("bt_sim")

View File

@ -2,7 +2,11 @@ active_grasp:
frame_id: task
base_frame_id: panda_link0
ee_frame_id: panda_hand
ee_grasp_offset: [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.06]
camera_name: cam
ee_grasp_offset: [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.065]
camera:
frame_id: cam_optical_frame
info_topic: /cam/depth/camera_info
depth_topic: /cam/depth/image_raw
vgn:
model: $(find vgn)/data/models/vgn_conv.pth
finger_depth: 0.05

View File

@ -1,7 +1,7 @@
<?xml version="1.0" ?>
<launch>
<!-- Arguments -->
<arg name="rviz" default="true" />
<arg name="launch_rviz" default="true" />
<!-- Load parameters. -->
<rosparam command="load" file="$(find active_grasp)/config/active_grasp.yaml" subst_value="true" />
@ -12,5 +12,5 @@
<node pkg="robot_state_publisher" type="robot_state_publisher" name="robot_state_publisher" />
<!-- Visualize the robot. -->
<node pkg="rviz" type="rviz" name="rviz" args="-d $(find active_grasp)/launch/panda_visualization.rviz" if="$(arg rviz)" />
<node pkg="rviz" type="rviz" name="rviz" args="-d $(find active_grasp)/launch/panda_visualization.rviz" if="$(arg launch_rviz)" />
</launch>

View File

@ -1,17 +1,16 @@
from pathlib import Path
import cv_bridge
import numpy as np
from pathlib import Path
import rospy
import scipy.interpolate
from geometry_msgs.msg import Pose
from sensor_msgs.msg import Image, CameraInfo
from robot_utils.spatial import Rotation, Transform
from robot_utils.ros.conversions import *
from robot_utils.ros.tf import TransformTree
from robot_utils.perception import *
from robot_tools.spatial import Rotation, Transform
from robot_tools.ros.conversions import *
from robot_tools.ros.tf import TransformTree
from robot_tools.perception import *
from vgn import vis
from vgn.detection import VGN, compute_grasps
@ -21,6 +20,8 @@ def get_policy(name):
return SingleViewBaseline()
elif name == "fixed-trajectory":
return FixedTrajectoryBaseline()
elif name == "mvp":
return MultiViewPicking()
else:
raise ValueError("{} policy does not exist.".format(name))
@ -36,13 +37,13 @@ class Policy:
self.ee_frame_id = params["ee_frame_id"]
self.tf = TransformTree()
self.H_EE_G = Transform.from_list(params["ee_grasp_offset"])
self.target_pose_pub = rospy.Publisher("/target", Pose, queue_size=10)
self.target_pose_pub = rospy.Publisher("/cmd", Pose, queue_size=10)
# Camera
camera_name = params["camera_name"]
self.cam_frame_id = camera_name + "_optical_frame"
depth_topic = camera_name + "/depth/image_raw"
msg = rospy.wait_for_message(camera_name + "/depth/camera_info", CameraInfo)
self.cam_frame_id = rospy.get_param("~camera/frame_id")
info_topic = rospy.get_param("~camera/info_topic")
depth_topic = rospy.get_param("~camera/depth_topic")
msg = rospy.wait_for_message(info_topic, CameraInfo, rospy.Duration(2.0))
self.intrinsic = from_camera_info_msg(msg)
self.cv_bridge = cv_bridge.CvBridge()
@ -50,36 +51,29 @@ class Policy:
self.tsdf = UniformTSDFVolume(0.3, 40)
# VGN
params = rospy.get_param("vgn")
self.vgn = VGN(Path(params["model"]))
self.vgn = VGN(Path(rospy.get_param("vgn/model")))
rospy.sleep(1.0)
self.H_B_T = self.tf.lookup(self.base_frame_id, self.frame_id, rospy.Time.now())
rospy.Subscriber(depth_topic, Image, self.sensor_cb, queue_size=1)
vis.draw_workspace(0.3)
def sensor_cb(self, msg):
self.last_depth_img = self.cv_bridge.imgmsg_to_cv2(msg).astype(np.float32)
self.last_extrinsic = self.tf.lookup(
self.cam_frame_id, self.frame_id, msg.header.stamp, rospy.Duration(0.1)
)
def get_tsdf_grid(self):
map_cloud = self.tsdf.get_map_cloud()
points = np.asarray(map_cloud.points)
distances = np.asarray(map_cloud.colors)[:, 0]
return create_grid_from_map_cloud(points, distances, self.tsdf.voxel_size)
def plan_best_grasp(self):
tsdf_grid = self.get_tsdf_grid()
tsdf_grid = self.tsdf.get_grid()
out = self.vgn.predict(tsdf_grid)
grasps = compute_grasps(out, voxel_size=self.tsdf.voxel_size)
grasps = compute_grasps(self.tsdf.voxel_size, out)
vis.draw_tsdf(tsdf_grid, self.tsdf.voxel_size)
vis.draw_grasps(grasps, 0.05)
# Ensure that the camera is pointing forward.
if len(grasps) == 0:
return
grasp = grasps[0]
rot = grasp.pose.rotation
axis = rot.as_matrix()[:, 0]
@ -156,3 +150,7 @@ class FixedTrajectoryBaseline(Policy):
self.origin + np.r_[self.radius * np.cos(t), self.radius * np.sin(t), 0.0]
)
self.target_pose_pub.publish(to_pose_msg(self.target))
class MultiViewPicking(Policy):
pass

45
run.py
View File

@ -1,23 +1,21 @@
import argparse
import numpy as np
import rospy
from geometry_msgs.msg import Pose
import std_srvs.srv
from policies import get_policy
from robot_utils.ros.conversions import *
from robot_utils.ros.panda import PandaGripperRosInterface
from robot_tools.ros.conversions import *
from robot_tools.ros.panda import PandaGripperClient
class GraspController:
def __init__(self, policy, rate):
self.policy = policy
self.rate = rate
self.reset_client = rospy.ServiceProxy("reset", std_srvs.srv.Trigger)
self.target_pose_pub = rospy.Publisher("target", Pose, queue_size=10)
self.gripper = PandaGripperRosInterface()
self.reset_client = rospy.ServiceProxy("/reset", std_srvs.srv.Trigger)
self.target_pose_pub = rospy.Publisher("/cmd", Pose, queue_size=10)
self.gripper = PandaGripperClient()
rospy.sleep(1.0)
def run(self):
@ -28,6 +26,7 @@ class GraspController:
def reset(self):
req = std_srvs.srv.TriggerRequest()
self.reset_client(req)
rospy.sleep(1.0) # wait for states to be updated
def explore(self):
r = rospy.Rate(self.rate)
@ -40,6 +39,10 @@ class GraspController:
self.gripper.move(0.08)
rospy.sleep(1.0)
target = self.policy.best_grasp
if not target:
return
self.target_pose_pub.publish(to_pose_msg(target))
rospy.sleep(2.0)
self.gripper.move(0.0)
@ -51,18 +54,30 @@ class GraspController:
rospy.sleep(1.0)
def main(args):
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
"--policy",
type=str,
choices=[
"single-view",
"fixed-trajectory",
],
)
parser.add_argument("--rate", type=int, default=10)
return parser
def main():
rospy.init_node("active_grasp")
parser = create_parser()
args = parser.parse_args()
policy = get_policy(args.policy)
gc = GraspController(policy, args.rate)
gc.run()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--policy", type=str, choices=["single-view", "fixed-trajectory"]
)
parser.add_argument("--rate", type=int, default=10)
args = parser.parse_args()
main(args)
main()

View File

@ -1,39 +1,49 @@
from pathlib import Path
import pybullet as p
import rospkg
from robot_utils.btsim import *
from robot_utils.spatial import Rotation, Transform
from robot_tools.bullet import *
from robot_tools.spatial import Rotation, Transform
from robot_tools.utils import scan_dir_for_urdfs
class BtPandaSim(BtSim):
class Simulation(BtManipulationSim):
def __init__(self, gui=True, sleep=True):
super().__init__(gui, sleep)
p.configureDebugVisualizer(p.COV_ENABLE_GUI, 0)
p.resetDebugVisualizerCamera(1.4, 50, -35, [0.0, 0.0, 0.6])
self.arm = BtPandaArm()
self.find_object_urdfs()
self.add_table()
self.add_robot()
def find_object_urdfs(self):
rospack = rospkg.RosPack()
root = Path(rospack.get_path("vgn")) / "data/urdfs/packed/test"
self.urdfs = scan_dir_for_urdfs(root)
def add_table(self):
p.loadURDF("plane.urdf")
ori = Rotation.from_rotvec(np.array([0, 0, np.pi / 2])).as_quat()
p.loadURDF("table/table.urdf", baseOrientation=ori, useFixedBase=True)
self.length = 0.3
self.origin = [-0.2, -0.5 * self.length, 0.5]
def add_robot(self):
self.T_W_B = Transform(Rotation.identity(), np.r_[-0.6, 0.0, 0.5])
self.arm = BtPandaArm(self.T_W_B)
self.gripper = BtPandaGripper(self.arm)
self.camera = BtCamera(
self.arm, 11, 320, 240, 1.047, 0.1, 1.0
) # link 11 corresponds to cam_optical_frame
self.T_W_B = Transform(Rotation.identity(), np.r_[-0.6, 0.0, 0.4])
self.load_table()
self.load_robot()
self.load_objects()
self.camera = BtCamera(320, 240, 1.047, 0.1, 1.0, self.arm.uid, 11)
def reset(self):
self.remove_all_objects()
urdfs = np.random.choice(self.urdfs, 4)
self.add_random_arrangement(urdfs, np.r_[self.origin[:2], 0.625], self.length)
self.set_initial_arm_configuration()
def set_initial_arm_configuration(self):
q = self.arm.configurations["ready"]
q[0] = np.deg2rad(np.random.uniform(-10, 10))
q[5] = np.deg2rad(np.random.uniform(90, 105))
for i, q_i in enumerate(q):
p.resetJointState(self.arm.uid, i, q_i, 0)
def load_table(self):
p.loadURDF("plane.urdf")
p.loadURDF(
"table/table.urdf",
baseOrientation=Rotation.from_rotvec(np.array([0, 0, np.pi / 2])).as_quat(),
useFixedBase=True,
)
def load_robot(self):
self.arm.load(self.T_W_B)
def load_objects(self):
p.loadURDF("cube_small.urdf", [-0.2, 0.0, 0.8])