success
This commit is contained in:
22
baselines/grasping/GSNet/graspnetAPI/examples/exam_check_data.py
Executable file
22
baselines/grasping/GSNet/graspnetAPI/examples/exam_check_data.py
Executable file
@@ -0,0 +1,22 @@
|
||||
__author__ = 'mhgou'
|
||||
__version__ = '1.0'
|
||||
|
||||
from graspnetAPI import GraspNet
|
||||
|
||||
# GraspNetAPI example for checking the data completeness.
|
||||
# change the graspnet_root path
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
####################################################################
|
||||
graspnet_root = '/home/gmh/graspnet' ### ROOT PATH FOR GRASPNET ###
|
||||
####################################################################
|
||||
|
||||
g = GraspNet(graspnet_root, 'kinect', 'all')
|
||||
if g.checkDataCompleteness():
|
||||
print('Check for kinect passed')
|
||||
|
||||
|
||||
g = GraspNet(graspnet_root, 'realsense', 'all')
|
||||
if g.checkDataCompleteness():
|
||||
print('Check for realsense passed')
|
76
baselines/grasping/GSNet/graspnetAPI/examples/exam_convert.py
Executable file
76
baselines/grasping/GSNet/graspnetAPI/examples/exam_convert.py
Executable file
@@ -0,0 +1,76 @@
|
||||
__author__ = 'mhgou'
|
||||
__version__ = '1.0'
|
||||
|
||||
from graspnetAPI import GraspNet
|
||||
import cv2
|
||||
import open3d as o3d
|
||||
|
||||
# GraspNetAPI example for checking the data completeness.
|
||||
# change the graspnet_root path
|
||||
|
||||
camera = 'kinect'
|
||||
sceneId = 5
|
||||
annId = 3
|
||||
|
||||
####################################################################
|
||||
graspnet_root = '/home/gmh/graspnet' # ROOT PATH FOR GRASPNET
|
||||
####################################################################
|
||||
|
||||
g = GraspNet(graspnet_root, camera = camera, split = 'all')
|
||||
|
||||
bgr = g.loadBGR(sceneId = sceneId, camera = camera, annId = annId)
|
||||
depth = g.loadDepth(sceneId = sceneId, camera = camera, annId = annId)
|
||||
|
||||
# Rect to 6d
|
||||
rect_grasp_group = g.loadGrasp(sceneId = sceneId, camera = camera, annId = annId, fric_coef_thresh = 0.2, format = 'rect')
|
||||
|
||||
# RectGrasp to Grasp
|
||||
rect_grasp = rect_grasp_group.random_sample(1)[0]
|
||||
img = rect_grasp.to_opencv_image(bgr)
|
||||
|
||||
cv2.imshow('rect grasp', img)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
||||
|
||||
grasp = rect_grasp.to_grasp(camera, depth)
|
||||
if grasp is not None:
|
||||
geometry = []
|
||||
geometry.append(g.loadScenePointCloud(sceneId, camera, annId))
|
||||
geometry.append(grasp.to_open3d_geometry())
|
||||
o3d.visualization.draw_geometries(geometry)
|
||||
else:
|
||||
print('No result because the depth is invalid, please try again!')
|
||||
|
||||
# RectGraspGroup to GraspGroup
|
||||
sample_rect_grasp_group = rect_grasp_group.random_sample(20)
|
||||
img = sample_rect_grasp_group.to_opencv_image(bgr)
|
||||
cv2.imshow('rect grasp', img)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
||||
|
||||
grasp_group = sample_rect_grasp_group.to_grasp_group(camera, depth)
|
||||
if grasp_group is not None:
|
||||
geometry = []
|
||||
geometry.append(g.loadScenePointCloud(sceneId, camera, annId))
|
||||
geometry += grasp_group.to_open3d_geometry_list()
|
||||
o3d.visualization.draw_geometries(geometry)
|
||||
|
||||
# 6d to Rect
|
||||
_6d_grasp_group = g.loadGrasp(sceneId = sceneId, camera = camera, annId = annId, fric_coef_thresh = 0.2, format = '6d')
|
||||
|
||||
# Grasp to RectGrasp conversion is not applicable as only very few 6d grasp can be converted to rectangle grasp.
|
||||
|
||||
# GraspGroup to RectGraspGroup
|
||||
sample_6d_grasp_group = _6d_grasp_group.random_sample(20)
|
||||
geometry = []
|
||||
geometry.append(g.loadScenePointCloud(sceneId, camera, annId))
|
||||
geometry += sample_6d_grasp_group.to_open3d_geometry_list()
|
||||
o3d.visualization.draw_geometries(geometry)
|
||||
|
||||
rect_grasp_group = _6d_grasp_group.to_rect_grasp_group(camera)
|
||||
img = rect_grasp_group.to_opencv_image(bgr)
|
||||
|
||||
cv2.imshow('rect grasps', img)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
||||
|
31
baselines/grasping/GSNet/graspnetAPI/examples/exam_eval.py
Executable file
31
baselines/grasping/GSNet/graspnetAPI/examples/exam_eval.py
Executable file
@@ -0,0 +1,31 @@
|
||||
__author__ = 'mhgou'
|
||||
__version__ = '1.0'
|
||||
|
||||
# GraspNetAPI example for evaluate grasps for a scene.
|
||||
# change the graspnet_root path
|
||||
import numpy as np
|
||||
from graspnetAPI import GraspNetEval
|
||||
|
||||
####################################################################
|
||||
graspnet_root = '/home/gmh/graspnet' # ROOT PATH FOR GRASPNET
|
||||
dump_folder = '/home/gmh/git/rgbd_graspnet/dump_affordance_iounan/' # ROOT PATH FOR DUMP
|
||||
####################################################################
|
||||
|
||||
sceneId = 121
|
||||
camera = 'kinect'
|
||||
ge_k = GraspNetEval(root = graspnet_root, camera = 'kinect', split = 'test')
|
||||
ge_r = GraspNetEval(root = graspnet_root, camera = 'realsense', split = 'test')
|
||||
|
||||
# eval a single scene
|
||||
print('Evaluating scene:{}, camera:{}'.format(sceneId, camera))
|
||||
acc = ge_k.eval_scene(scene_id = sceneId, dump_folder = dump_folder)
|
||||
np_acc = np.array(acc)
|
||||
print('mean accuracy:{}'.format(np.mean(np_acc)))
|
||||
|
||||
# # eval all data for kinect
|
||||
# print('Evaluating kinect')
|
||||
# res, ap = ge_k.eval_all(dump_folder, proc = 24)
|
||||
|
||||
# # eval 'seen' split for realsense
|
||||
# print('Evaluating realsense')
|
||||
# res, ap = ge_r.eval_seen(dump_folder, proc = 24)
|
@@ -0,0 +1,52 @@
|
||||
__author__ = 'mhgou'
|
||||
__version__ = '1.0'
|
||||
|
||||
# GraspNetAPI example for generating rectangle grasp from 6d grasp.
|
||||
# change the graspnet_root path and NUM_PROCESS
|
||||
|
||||
from graspnetAPI import GraspNet
|
||||
from graspnetAPI.graspnet import TOTAL_SCENE_NUM
|
||||
import os
|
||||
import numpy as np
|
||||
from tqdm import tqdm
|
||||
|
||||
######################################################################
|
||||
NUM_PROCESS = 24 # change NUM_PROCESS to the number of cores to use. #
|
||||
######################################################################
|
||||
|
||||
def generate_scene_rectangle_grasp(sceneId, dump_folder, camera):
|
||||
g = GraspNet(graspnet_root, camera=camera, split='all')
|
||||
objIds = g.getObjIds(sceneIds = sceneId)
|
||||
grasp_labels = g.loadGraspLabels(objIds)
|
||||
collision_labels = g.loadCollisionLabels(sceneIds = sceneId)
|
||||
scene_dir = os.path.join(dump_folder,'scene_%04d' % sceneId)
|
||||
if not os.path.exists(scene_dir):
|
||||
os.mkdir(scene_dir)
|
||||
camera_dir = os.path.join(scene_dir, camera)
|
||||
if not os.path.exists(camera_dir):
|
||||
os.mkdir(camera_dir)
|
||||
for annId in tqdm(range(256), 'Scene:{}, Camera:{}'.format(sceneId, camera)):
|
||||
_6d_grasp = g.loadGrasp(sceneId = sceneId, annId = annId, format = '6d', camera = camera, grasp_labels = grasp_labels, collision_labels = collision_labels, fric_coef_thresh = 1.0)
|
||||
rect_grasp_group = _6d_grasp.to_rect_grasp_group(camera)
|
||||
rect_grasp_group.save_npy(os.path.join(camera_dir, '%04d.npy' % annId))
|
||||
|
||||
if __name__ == '__main__':
|
||||
####################################################################
|
||||
graspnet_root = '/home/minghao/graspnet' # ROOT PATH FOR GRASPNET ##
|
||||
####################################################################
|
||||
|
||||
dump_folder = 'rect_labels'
|
||||
if not os.path.exists(dump_folder):
|
||||
os.mkdir(dump_folder)
|
||||
|
||||
if NUM_PROCESS > 1:
|
||||
from multiprocessing import Pool
|
||||
pool = Pool(24)
|
||||
for camera in ['realsense', 'kinect']:
|
||||
for sceneId in range(120):
|
||||
pool.apply_async(func = generate_scene_rectangle_grasp, args = (sceneId, dump_folder, camera))
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
else:
|
||||
generate_scene_rectangle_grasp(sceneId, dump_folder, camera)
|
95
baselines/grasping/GSNet/graspnetAPI/examples/exam_grasp_format.py
Executable file
95
baselines/grasping/GSNet/graspnetAPI/examples/exam_grasp_format.py
Executable file
@@ -0,0 +1,95 @@
|
||||
__author__ = 'mhgou'
|
||||
__version__ = '1.0'
|
||||
|
||||
from graspnetAPI import GraspNet, Grasp, GraspGroup
|
||||
import open3d as o3d
|
||||
import cv2
|
||||
import numpy as np
|
||||
|
||||
# GraspNetAPI example for loading grasp for a scene.
|
||||
# change the graspnet_root path
|
||||
|
||||
####################################################################
|
||||
graspnet_root = '/disk1/graspnet' # ROOT PATH FOR GRASPNET
|
||||
####################################################################
|
||||
|
||||
sceneId = 1
|
||||
annId = 3
|
||||
|
||||
# initialize a GraspNet instance
|
||||
g = GraspNet(graspnet_root, camera='kinect', split='train')
|
||||
|
||||
# load grasps of scene 1 with annotation id = 3, camera = kinect and fric_coef_thresh = 0.2
|
||||
_6d_grasp = g.loadGrasp(sceneId = sceneId, annId = annId, format = '6d', camera = 'kinect', fric_coef_thresh = 0.2)
|
||||
print('6d grasp:\n{}'.format(_6d_grasp))
|
||||
|
||||
# _6d_grasp is an GraspGroup instance defined in grasp.py
|
||||
print('_6d_grasp:\n{}'.format(_6d_grasp))
|
||||
|
||||
# index
|
||||
grasp = _6d_grasp[0]
|
||||
print('_6d_grasp[0](grasp):\n{}'.format(grasp))
|
||||
|
||||
# slice
|
||||
print('_6d_grasp[0:2]:\n{}'.format(_6d_grasp[0:2]))
|
||||
print('_6d_grasp[[0,1]]:\n{}'.format(_6d_grasp[[0,1]]))
|
||||
|
||||
# grasp is a Grasp instance defined in grasp.py
|
||||
# access and set properties
|
||||
print('grasp.translation={}'.format(grasp.translation))
|
||||
grasp.translation = np.array([1.0, 2.0, 3.0])
|
||||
print('After modification, grasp.translation={}'.format(grasp.translation))
|
||||
print('grasp.rotation_matrix={}'.format(grasp.rotation_matrix))
|
||||
grasp.rotation_matrix = np.eye(3).reshape((9))
|
||||
print('After modification, grasp.rotation_matrix={}'.format(grasp.rotation_matrix))
|
||||
print('grasp.width={}, height:{}, depth:{}, score:{}'.format(grasp.width, grasp.height, grasp.depth, grasp.score))
|
||||
print('More operation on Grasp and GraspGroup can be seen in the API document')
|
||||
|
||||
|
||||
# load rectangle grasps of scene 1 with annotation id = 3, camera = realsense and fric_coef_thresh = 0.2
|
||||
rect_grasp_group = g.loadGrasp(sceneId = sceneId, annId = annId, format = 'rect', camera = 'realsense', fric_coef_thresh = 0.2)
|
||||
print('rectangle grasp group:\n{}'.format(rect_grasp_group))
|
||||
|
||||
# rect_grasp is an RectGraspGroup instance defined in grasp.py
|
||||
print('rect_grasp_group:\n{}'.format(rect_grasp_group))
|
||||
|
||||
# index
|
||||
rect_grasp = rect_grasp_group[0]
|
||||
print('rect_grasp_group[0](rect_grasp):\n{}'.format(rect_grasp))
|
||||
|
||||
# slice
|
||||
print('rect_grasp_group[0:2]:\n{}'.format(rect_grasp_group[0:2]))
|
||||
print('rect_grasp_group[[0,1]]:\n{}'.format(rect_grasp_group[[0,1]]))
|
||||
|
||||
# properties of rect_grasp
|
||||
print('rect_grasp.center_point:{}, open_point:{}, height:{}, score:{}'.format(rect_grasp.center_point, rect_grasp.open_point, rect_grasp.height, rect_grasp.score))
|
||||
|
||||
# transform grasp
|
||||
g = Grasp() # simple Grasp
|
||||
frame = o3d.geometry.TriangleMesh.create_coordinate_frame(0.1)
|
||||
|
||||
# Grasp before transformation
|
||||
o3d.visualization.draw_geometries([g.to_open3d_geometry(), frame])
|
||||
g.translation = np.array((0,0,0.01))
|
||||
|
||||
# setup a transformation matrix
|
||||
T = np.eye(4)
|
||||
T[:3,3] = np.array((0.01, 0.02, 0.03))
|
||||
T[:3,:3] = np.array([[0,0,1.0],[1,0,0],[0,1,0]])
|
||||
g.transform(T)
|
||||
|
||||
# Grasp after transformation
|
||||
o3d.visualization.draw_geometries([g.to_open3d_geometry(), frame])
|
||||
|
||||
g1 = Grasp()
|
||||
gg = GraspGroup()
|
||||
gg.add(g)
|
||||
gg.add(g1)
|
||||
|
||||
# GraspGroup before transformation
|
||||
o3d.visualization.draw_geometries([*gg.to_open3d_geometry_list(), frame])
|
||||
|
||||
gg.transform(T)
|
||||
|
||||
# GraspGroup after transformation
|
||||
o3d.visualization.draw_geometries([*gg.to_open3d_geometry_list(), frame])
|
40
baselines/grasping/GSNet/graspnetAPI/examples/exam_loadGrasp.py
Executable file
40
baselines/grasping/GSNet/graspnetAPI/examples/exam_loadGrasp.py
Executable file
@@ -0,0 +1,40 @@
|
||||
__author__ = 'mhgou'
|
||||
__version__ = '1.0'
|
||||
|
||||
from graspnetAPI import GraspNet
|
||||
import open3d as o3d
|
||||
import cv2
|
||||
|
||||
# GraspNetAPI example for loading grasp for a scene.
|
||||
# change the graspnet_root path
|
||||
|
||||
####################################################################
|
||||
graspnet_root = '/mnt/h/AI/Datasets/graspnet-1billion/test_seen' # ROOT PATH FOR GRASPNET
|
||||
####################################################################
|
||||
|
||||
sceneId = 100
|
||||
annId = 3
|
||||
|
||||
# initialize a GraspNet instance
|
||||
g = GraspNet(graspnet_root, camera='kinect', split='test_seen')
|
||||
|
||||
# load grasps of scene 1 with annotation id = 3, camera = kinect and fric_coef_thresh = 0.2
|
||||
_6d_grasp = g.loadGrasp(sceneId = sceneId, annId = annId, format = '6d', camera = 'kinect', fric_coef_thresh = 0.2)
|
||||
print('6d grasp:\n{}'.format(_6d_grasp))
|
||||
|
||||
# visualize the grasps using open3d
|
||||
geometries = []
|
||||
geometries.append(g.loadScenePointCloud(sceneId = sceneId, annId = annId, camera = 'kinect'))
|
||||
geometries += _6d_grasp.random_sample(numGrasp = 20).to_open3d_geometry_list()
|
||||
o3d.visualization.draw_geometries(geometries)
|
||||
|
||||
# load rectangle grasps of scene 1 with annotation id = 3, camera = realsense and fric_coef_thresh = 0.2
|
||||
rect_grasp = g.loadGrasp(sceneId = sceneId, annId = annId, format = 'rect', camera = 'realsense', fric_coef_thresh = 0.2)
|
||||
print('rectangle grasp:\n{}'.format(rect_grasp))
|
||||
|
||||
# visualize the rectanglegrasps using opencv
|
||||
bgr = g.loadBGR(sceneId = sceneId, annId = annId, camera = 'realsense')
|
||||
img = rect_grasp.to_opencv_image(bgr, numGrasp = 20)
|
||||
cv2.imshow('rectangle grasps', img)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
38
baselines/grasping/GSNet/graspnetAPI/examples/exam_nms.py
Executable file
38
baselines/grasping/GSNet/graspnetAPI/examples/exam_nms.py
Executable file
@@ -0,0 +1,38 @@
|
||||
__author__ = 'mhgou'
|
||||
__version__ = '1.0'
|
||||
|
||||
# GraspNetAPI example for grasp nms.
|
||||
# change the graspnet_root path
|
||||
|
||||
####################################################################
|
||||
graspnet_root = '/home/gmh/graspnet' # ROOT PATH FOR GRASPNET
|
||||
####################################################################
|
||||
|
||||
sceneId = 1
|
||||
annId = 3
|
||||
|
||||
from graspnetAPI import GraspNet
|
||||
import open3d as o3d
|
||||
import cv2
|
||||
|
||||
# initialize a GraspNet instance
|
||||
g = GraspNet(graspnet_root, camera='kinect', split='train')
|
||||
|
||||
# load grasps of scene 1 with annotation id = 3, camera = kinect and fric_coef_thresh = 0.2
|
||||
_6d_grasp = g.loadGrasp(sceneId = sceneId, annId = annId, format = '6d', camera = 'kinect', fric_coef_thresh = 0.2)
|
||||
print('6d grasp:\n{}'.format(_6d_grasp))
|
||||
|
||||
# visualize the grasps using open3d
|
||||
geometries = []
|
||||
geometries.append(g.loadScenePointCloud(sceneId = sceneId, annId = annId, camera = 'kinect'))
|
||||
geometries += _6d_grasp.random_sample(numGrasp = 1000).to_open3d_geometry_list()
|
||||
o3d.visualization.draw_geometries(geometries)
|
||||
|
||||
nms_grasp = _6d_grasp.nms(translation_thresh = 0.1, rotation_thresh = 30 / 180.0 * 3.1416)
|
||||
print('grasp after nms:\n{}'.format(nms_grasp))
|
||||
|
||||
# visualize the grasps using open3d
|
||||
geometries = []
|
||||
geometries.append(g.loadScenePointCloud(sceneId = sceneId, annId = annId, camera = 'kinect'))
|
||||
geometries += nms_grasp.to_open3d_geometry_list()
|
||||
o3d.visualization.draw_geometries(geometries)
|
26
baselines/grasping/GSNet/graspnetAPI/examples/exam_vis.py
Executable file
26
baselines/grasping/GSNet/graspnetAPI/examples/exam_vis.py
Executable file
@@ -0,0 +1,26 @@
|
||||
__author__ = 'mhgou'
|
||||
__version__ = '1.0'
|
||||
|
||||
# GraspNetAPI example for visualization.
|
||||
# change the graspnet_root path
|
||||
|
||||
####################################################################
|
||||
graspnet_root = '/mnt/h/AI/Datasets/graspnet-1billion/test_seen' # ROOT PATH FOR GRASPNET
|
||||
####################################################################
|
||||
|
||||
from graspnetAPI import GraspNet
|
||||
|
||||
# initialize a GraspNet instance
|
||||
g = GraspNet(graspnet_root, camera='kinect', split='test_seen')
|
||||
|
||||
# show object grasps
|
||||
g.showObjGrasp(objIds = 0, show=True)
|
||||
|
||||
# show 6d poses
|
||||
g.show6DPose(sceneIds = 0, show = True)
|
||||
|
||||
# show scene rectangle grasps
|
||||
g.showSceneGrasp(sceneId = 0, camera = 'realsense', annId = 0, format = 'rect', numGrasp = 20)
|
||||
|
||||
# show scene 6d grasps(You may need to wait several minutes)
|
||||
g.showSceneGrasp(sceneId = 4, camera = 'kinect', annId = 2, format = '6d')
|
Reference in New Issue
Block a user