add global_feat
This commit is contained in:
@@ -7,12 +7,11 @@ from PytorchBoot.utils.log_util import Log
|
||||
import torch
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(r"/media/hofee/data/project/python/nbv_reconstruction/nbv_reconstruction")
|
||||
sys.path.append(r"/home/data/hofee/project/nbv_rec/nbv_reconstruction")
|
||||
|
||||
from utils.data_load import DataLoadUtil
|
||||
from utils.pose import PoseUtil
|
||||
from utils.pts import PtsUtil
|
||||
from utils.reconstruction import ReconstructionUtil
|
||||
|
||||
|
||||
@stereotype.dataset("nbv_reconstruction_dataset")
|
||||
@@ -35,7 +34,7 @@ class NBVReconstructionDataset(BaseDataset):
|
||||
self.model_dir = config["model_dir"]
|
||||
self.filter_degree = config["filter_degree"]
|
||||
if self.type == namespace.Mode.TRAIN:
|
||||
scale_ratio = 1
|
||||
scale_ratio = 10
|
||||
self.datalist = self.datalist*scale_ratio
|
||||
if self.cache:
|
||||
expr_root = ConfigManager.get("runner", "experiment", "root_dir")
|
||||
@@ -56,20 +55,34 @@ class NBVReconstructionDataset(BaseDataset):
|
||||
def get_datalist(self):
|
||||
datalist = []
|
||||
for scene_name in self.scene_name_list:
|
||||
label_path = DataLoadUtil.get_label_path_old(self.root_dir, scene_name)
|
||||
label_data = DataLoadUtil.load_label(label_path)
|
||||
for data_pair in label_data["data_pairs"]:
|
||||
scanned_views = data_pair[0]
|
||||
next_best_view = data_pair[1]
|
||||
seq_num = DataLoadUtil.get_label_num(self.root_dir, scene_name)
|
||||
scene_max_coverage_rate = 0
|
||||
max_coverage_rate_list = []
|
||||
|
||||
for seq_idx in range(seq_num):
|
||||
label_path = DataLoadUtil.get_label_path(self.root_dir, scene_name, seq_idx)
|
||||
label_data = DataLoadUtil.load_label(label_path)
|
||||
max_coverage_rate = label_data["max_coverage_rate"]
|
||||
datalist.append(
|
||||
{
|
||||
"scanned_views": scanned_views,
|
||||
"next_best_view": next_best_view,
|
||||
"max_coverage_rate": max_coverage_rate,
|
||||
"scene_name": scene_name,
|
||||
}
|
||||
)
|
||||
if max_coverage_rate > scene_max_coverage_rate:
|
||||
scene_max_coverage_rate = max_coverage_rate
|
||||
max_coverage_rate_list.append(max_coverage_rate)
|
||||
mean_coverage_rate = np.mean(max_coverage_rate_list)
|
||||
|
||||
for seq_idx in range(seq_num):
|
||||
label_path = DataLoadUtil.get_label_path(self.root_dir, scene_name, seq_idx)
|
||||
label_data = DataLoadUtil.load_label(label_path)
|
||||
if max_coverage_rate_list[seq_idx] > mean_coverage_rate - 0.1:
|
||||
for data_pair in label_data["data_pairs"]:
|
||||
scanned_views = data_pair[0]
|
||||
next_best_view = data_pair[1]
|
||||
datalist.append({
|
||||
"scanned_views": scanned_views,
|
||||
"next_best_view": next_best_view,
|
||||
"seq_max_coverage_rate": max_coverage_rate,
|
||||
"scene_name": scene_name,
|
||||
"label_idx": seq_idx,
|
||||
"scene_max_coverage_rate": scene_max_coverage_rate
|
||||
})
|
||||
return datalist
|
||||
|
||||
def preprocess_cache(self):
|
||||
@@ -102,7 +115,7 @@ class NBVReconstructionDataset(BaseDataset):
|
||||
data_item_info = self.datalist[index]
|
||||
scanned_views = data_item_info["scanned_views"]
|
||||
nbv = data_item_info["next_best_view"]
|
||||
max_coverage_rate = data_item_info["max_coverage_rate"]
|
||||
max_coverage_rate = data_item_info["seq_max_coverage_rate"]
|
||||
scene_name = data_item_info["scene_name"]
|
||||
scanned_views_pts, scanned_coverages_rate, scanned_n_to_world_pose = [], [], []
|
||||
|
||||
@@ -151,13 +164,18 @@ class NBVReconstructionDataset(BaseDataset):
|
||||
best_to_world_6d = PoseUtil.matrix_to_rotation_6d_numpy(np.asarray(best_frame_to_world[:3,:3]))
|
||||
best_to_world_trans = best_frame_to_world[:3,3]
|
||||
best_to_world_9d = np.concatenate([best_to_world_6d, best_to_world_trans], axis=0)
|
||||
|
||||
combined_scanned_views_pts = np.concatenate(scanned_views_pts, axis=0)
|
||||
voxel_downsampled_combined_scanned_pts_np = PtsUtil.voxel_downsample_point_cloud(combined_scanned_views_pts, 0.002)
|
||||
random_downsampled_combined_scanned_pts_np = PtsUtil.random_downsample_point_cloud(voxel_downsampled_combined_scanned_pts_np, self.pts_num)
|
||||
data_item = {
|
||||
"scanned_pts": np.asarray(scanned_views_pts,dtype=np.float32),
|
||||
"combined_scanned_pts": np.asarray(random_downsampled_combined_scanned_pts_np,dtype=np.float32),
|
||||
"scanned_coverage_rate": scanned_coverages_rate,
|
||||
"scanned_n_to_world_pose_9d": np.asarray(scanned_n_to_world_pose,dtype=np.float32),
|
||||
"best_coverage_rate": nbv_coverage_rate,
|
||||
"best_to_world_pose_9d": np.asarray(best_to_world_9d,dtype=np.float32),
|
||||
"max_coverage_rate": max_coverage_rate,
|
||||
"seq_max_coverage_rate": max_coverage_rate,
|
||||
"scene_name": scene_name
|
||||
}
|
||||
|
||||
@@ -195,10 +213,11 @@ class NBVReconstructionDataset(BaseDataset):
|
||||
collate_data["scanned_pts"] = [torch.tensor(item['scanned_pts']) for item in batch]
|
||||
collate_data["scanned_n_to_world_pose_9d"] = [torch.tensor(item['scanned_n_to_world_pose_9d']) for item in batch]
|
||||
collate_data["best_to_world_pose_9d"] = torch.stack([torch.tensor(item['best_to_world_pose_9d']) for item in batch])
|
||||
collate_data["combined_scanned_pts"] = torch.stack([torch.tensor(item['combined_scanned_pts']) for item in batch])
|
||||
if "first_frame_to_world" in batch[0]:
|
||||
collate_data["first_frame_to_world"] = torch.stack([torch.tensor(item["first_frame_to_world"]) for item in batch])
|
||||
for key in batch[0].keys():
|
||||
if key not in ["scanned_pts", "scanned_n_to_world_pose_9d", "best_to_world_pose_9d", "first_frame_to_world"]:
|
||||
if key not in ["scanned_pts", "scanned_n_to_world_pose_9d", "best_to_world_pose_9d", "first_frame_to_world", "combined_scanned_pts"]:
|
||||
collate_data[key] = [item[key] for item in batch]
|
||||
return collate_data
|
||||
return collate_fn
|
||||
@@ -211,11 +230,11 @@ if __name__ == "__main__":
|
||||
torch.manual_seed(seed)
|
||||
np.random.seed(seed)
|
||||
config = {
|
||||
"root_dir": "/media/hofee/repository/nbv_reconstruction_data_512",
|
||||
"model_dir": "/media/hofee/data/data/scaled_object_meshes",
|
||||
"root_dir": "/home/data/hofee/project/nbv_rec/data/nbv_rec_data_512_preproc_npy",
|
||||
"model_dir": "/home/data/hofee/project/nbv_rec/data/scaled_object_meshes",
|
||||
"source": "nbv_reconstruction_dataset",
|
||||
"split_file": "/media/hofee/data/project/python/nbv_reconstruction/sample_for_training/OmniObject3d_train.txt",
|
||||
"load_from_preprocess": False,
|
||||
"split_file": "/home/data/hofee/project/nbv_rec/data/OmniObject3d_test.txt",
|
||||
"load_from_preprocess": True,
|
||||
"ratio": 0.5,
|
||||
"batch_size": 2,
|
||||
"filter_degree": 75,
|
||||
|
Reference in New Issue
Block a user