-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest.py
More file actions
135 lines (107 loc) · 4.92 KB
/
test.py
File metadata and controls
135 lines (107 loc) · 4.92 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import os
import sys
import numpy as np
import argparse
import torch
from torch.utils.data import DataLoader
# Add sggnet to path
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, ROOT_DIR)
from sggnet.models.hggqnet import HGGQNet
from sggnet.dataset.graspnet_dataset import GraspNetDataset, collate_fn
from sggnet.utils.collision_detector import ModelFreeCollisionDetector
from sggnet.graspnetAPI.grasp import GraspGroup
from sggnet.graspnetAPI.graspnet_eval import GraspNetEval
parser = argparse.ArgumentParser()
parser.add_argument('--dataset_root', required=True, help='Dataset root')
parser.add_argument('--checkpoint_path', required=True, help='Model checkpoint path')
parser.add_argument('--dump_dir', required=True, help='Dump dir to save outputs')
parser.add_argument('--camera', required=True, help='Camera split [realsense/kinect]')
parser.add_argument('--split', required=True, help='Dataset split [seen/similar/novel]')
parser.add_argument('--num_point', type=int, default=4096, help='Point Number [default: 4096]')
parser.add_argument('--batch_size', type=int, default=1, help='Batch Size during inference [default: 1]')
parser.add_argument('--collision_thresh', type=float, default=0.01, help='Collision Threshold [default: 0.01]')
parser.add_argument('--voxel_size', type=float, default=0.01, help='Voxel Size [default: 0.01]')
cfgs = parser.parse_args()
if not os.path.exists(cfgs.dump_dir):
os.makedirs(cfgs.dump_dir)
def my_worker_init_fn(worker_id):
np.random.seed(np.random.get_state()[1][0] + worker_id)
def inference():
"""Run inference on test dataset."""
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Create dataset
test_dataset = GraspNetDataset(
cfgs.dataset_root,
camera=cfgs.camera,
split='test_' + cfgs.split,
num_points=cfgs.num_point,
remove_outlier=True,
load_label=False
)
test_dataloader = DataLoader(
test_dataset,
batch_size=cfgs.batch_size,
shuffle=False,
num_workers=4,
worker_init_fn=my_worker_init_fn,
collate_fn=collate_fn
)
scene_list = test_dataset.scene_list()
# Load model
model = HGGQNet()
model.to(device)
model.eval()
checkpoint = torch.load(cfgs.checkpoint_path)
model.load_state_dict(checkpoint['model_state_dict'])
print(f"-> loaded checkpoint {cfgs.checkpoint_path} (epoch: {checkpoint.get('epoch', 'unknown')})")
# Inference loop
for batch_idx, batch_data in enumerate(test_dataloader):
grasp_configs_tensor, grasp_config_scores_tensor, graph, graph_indices = batch_data
grasp_configs_tensor = grasp_configs_tensor.to(device)
graph = graph.to(device)
graph_indices = graph_indices.to(device)
with torch.no_grad():
predicted_scores, _, _ = model(grasp_configs_tensor, graph, graph_indices)
# Process and save results
for i in range(cfgs.batch_size):
data_idx = batch_idx * cfgs.batch_size + i
if data_idx >= len(test_dataset):
break
# Get grasp configurations and scores
grasps = grasp_configs_tensor[i].detach().cpu().numpy()
scores = predicted_scores[i].detach().cpu().numpy()
# Create GraspGroup
gg = GraspGroup(grasps)
gg.scores = scores
# Collision detection
if cfgs.collision_thresh > 0:
cloud, _ = test_dataset.get_data(data_idx, return_raw_cloud=True)
mfcdetector = ModelFreeCollisionDetector(cloud, voxel_size=cfgs.voxel_size)
collision_mask = mfcdetector.detect(gg, approach_dist=0.05, collision_thresh=cfgs.collision_thresh)
gg = gg[~collision_mask]
# Save grasps
save_dir = os.path.join(cfgs.dump_dir, scene_list[data_idx], cfgs.camera)
save_path = os.path.join(save_dir, str(data_idx % 256).zfill(4) + '.npy')
if not os.path.exists(save_dir):
os.makedirs(save_dir)
gg.save_npy(save_path)
if (batch_idx + 1) % 100 == 0:
print(f'Processed {batch_idx + 1} batches')
def evaluate():
"""Evaluate using GraspNet API."""
ge = GraspNetEval(root=cfgs.dataset_root, camera=cfgs.camera, split='test_' + cfgs.split)
if cfgs.split == 'seen':
res, ap = ge.eval_seen(cfgs.dump_dir, proc=4)
elif cfgs.split == 'similar':
res, ap = ge.eval_similar(cfgs.dump_dir, proc=4)
else:
res, ap = ge.eval_novel(cfgs.dump_dir, proc=4)
save_dir = os.path.join(cfgs.dump_dir, 'ap_{}.npy'.format(cfgs.camera))
np.save(save_dir, res)
print(f"Evaluation results saved to {save_dir}")
if __name__ == '__main__':
print("Running inference...")
inference()
print("Running evaluation...")
evaluate()