upd
This commit is contained in:
@@ -48,14 +48,17 @@ class SeqReconstructionDataset(BaseDataset):
|
||||
for line in f:
|
||||
scene_name = line.strip()
|
||||
scene_name_list.append(scene_name)
|
||||
return scene_name_list
|
||||
return scene_name_list
|
||||
|
||||
def get_scene_name_list(self):
|
||||
return self.scene_name_list
|
||||
|
||||
|
||||
def get_datalist(self):
|
||||
datalist = []
|
||||
for scene_name in self.scene_name_list:
|
||||
datalist = []
|
||||
total = len(self.scene_name_list)
|
||||
for idx, scene_name in enumerate(self.scene_name_list):
|
||||
print(f"processing {scene_name} ({idx}/{total})")
|
||||
seq_num = DataLoadUtil.get_label_num(self.root_dir, scene_name)
|
||||
scene_max_coverage_rate = 0
|
||||
max_coverage_rate_list = []
|
||||
@@ -178,23 +181,41 @@ class SeqReconstructionDataset(BaseDataset):
|
||||
# -------------- Debug ---------------- #
|
||||
if __name__ == "__main__":
|
||||
import torch
|
||||
from tqdm import tqdm
|
||||
import pickle
|
||||
import os
|
||||
|
||||
seed = 0
|
||||
torch.manual_seed(seed)
|
||||
np.random.seed(seed)
|
||||
|
||||
config = {
|
||||
"root_dir": "/data/hofee/data/new_full_data",
|
||||
"source": "seq_reconstruction_dataset",
|
||||
"split_file": "/data/hofee/data/sample.txt",
|
||||
"split_file": "/data/hofee/data/new_full_data_list/OmniObject3d_test.txt",
|
||||
"load_from_preprocess": True,
|
||||
"ratio": 0.5,
|
||||
"batch_size": 2,
|
||||
"filter_degree": 75,
|
||||
"num_workers": 0,
|
||||
"pts_num": 4096,
|
||||
"type": namespace.Mode.TRAIN,
|
||||
"pts_num": 8192,
|
||||
"type": namespace.Mode.TEST,
|
||||
}
|
||||
ds = SeqReconstructionDataset(config)
|
||||
print(len(ds))
|
||||
print(ds.__getitem__(10))
|
||||
|
||||
output_dir = "/data/hofee/trash_can/output_inference_test"
|
||||
new_output_dir = "/data/hofee/inference_test"
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
os.makedirs(new_output_dir, exist_ok=True)
|
||||
|
||||
ds = SeqReconstructionDataset(config)
|
||||
for i in tqdm(range(len(ds)), desc="processing dataset"):
|
||||
output_path = os.path.join(output_dir, f"item_{i}.pkl")
|
||||
if os.path.exists(output_path):
|
||||
item = pickle.load(open(output_path, "rb"))
|
||||
else:
|
||||
item = ds.__getitem__(i)
|
||||
for key, value in item.items():
|
||||
if isinstance(value, np.ndarray):
|
||||
item[key] = value.tolist()
|
||||
new_output_path = os.path.join(new_output_dir, f"item_{i}.pkl")
|
||||
with open(new_output_path, "wb") as f:
|
||||
pickle.dump(item, f)
|
||||
|
||||
|
Reference in New Issue
Block a user