|
|
|
|
|
import argparse |
|
|
import json |
|
|
import os |
|
|
import time |
|
|
|
|
|
import cv2 |
|
|
import h5py |
|
|
import numpy as np |
|
|
import tqdm |
|
|
|
|
|
def sort_key_hdf5(name): |
|
|
|
|
|
number = int(name.split('_')[-1].split('.')[0]) |
|
|
return number |
|
|
|
|
|
def sort_key_metainfo(name): |
|
|
|
|
|
number = int(name.split('_')[-2].split('.')[0]) |
|
|
return number |
|
|
|
|
|
def recursive_merge(dest, src): |
|
|
for key, value in src.items(): |
|
|
if key in dest and isinstance(dest[key], dict) and isinstance(value, dict): |
|
|
recursive_merge(dest[key], value) |
|
|
else: |
|
|
dest[key] = value |
|
|
|
|
|
def recursive_copy(src, dest): |
|
|
for key in src.keys(): |
|
|
if isinstance(src[key], h5py.Group): |
|
|
new_grp = dest.create_group(key) |
|
|
recursive_copy(src[key], new_grp) |
|
|
elif isinstance(src[key], h5py.Dataset): |
|
|
src.copy(key, dest) |
|
|
for attr_key in src.attrs: |
|
|
dest.attrs[attr_key] = src.attrs[attr_key] |
|
|
|
|
|
def main(args): |
|
|
|
|
|
|
|
|
metainfo_json_dict = {} |
|
|
metainfo_json_out_path = os.path.join(args.out_dir, f"./metainfo.json") |
|
|
with open(metainfo_json_out_path, "w") as f: |
|
|
|
|
|
json.dump(metainfo_json_dict, f) |
|
|
|
|
|
|
|
|
task_suite = ['task1', 'task2', 'task3'] |
|
|
num_tasks_in_suite = 3 |
|
|
|
|
|
|
|
|
for task_id in tqdm.tqdm(range(num_tasks_in_suite)): |
|
|
|
|
|
task = task_suite[task_id] |
|
|
data_dir = os.path.join('./', task) |
|
|
data_files = os.listdir(data_dir) |
|
|
|
|
|
hdf5_files = [_file for _file in data_files if '.hdf5' in _file] |
|
|
hdf5_files = sorted(hdf5_files, key=sort_key_hdf5) |
|
|
meta_files = [_file for _file in data_files if '_metainfo.json' in _file] |
|
|
meta_files = sorted(meta_files, key=sort_key_metainfo) |
|
|
|
|
|
|
|
|
new_data_path = os.path.join(args.out_dir, f"{task}_demo.hdf5") |
|
|
new_data_file = h5py.File(new_data_path, "w") |
|
|
grp = new_data_file.create_group("data") |
|
|
|
|
|
for idx, hdf5_name in tqdm.tqdm(enumerate(hdf5_files)): |
|
|
hdf5_name = os.path.join(data_dir, hdf5_name) |
|
|
traj_data_file = h5py.File(hdf5_name, "r") |
|
|
traj_data = traj_data_file["data"] |
|
|
|
|
|
|
|
|
for ep_key in traj_data.keys(): |
|
|
src_grp = traj_data[ep_key] |
|
|
dest_grp = grp.create_group(ep_key) |
|
|
recursive_copy(src_grp, dest_grp) |
|
|
|
|
|
traj_data_file.close() |
|
|
|
|
|
meta_name = os.path.join(data_dir, meta_files[idx]) |
|
|
with open(meta_name, "r") as f: |
|
|
|
|
|
meta_data = json.load(f) |
|
|
meta_data_key = list(meta_data.keys())[0] |
|
|
demo_data_key = list(meta_data[meta_data_key].keys())[0] |
|
|
indexed_meta_data = meta_data[meta_data_key][demo_data_key] |
|
|
|
|
|
|
|
|
recursive_merge(metainfo_json_dict, meta_data) |
|
|
|
|
|
|
|
|
|
|
|
with open(metainfo_json_out_path, "w") as f: |
|
|
json.dump(metainfo_json_dict, f, indent=2) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
new_data_file.close() |
|
|
|
|
|
if __name__ == '__main__': |
|
|
|
|
|
parser = argparse.ArgumentParser() |
|
|
parser.add_argument("--in_dir", default='./') |
|
|
parser.add_argument("--out_dir", default='./') |
|
|
args = parser.parse_args() |
|
|
|
|
|
main(args) |
|
|
|