eztao commited on
Commit
1ff4ba6
·
verified ·
1 Parent(s): e3e0441

Update RefRef_test.py

Browse files
Files changed (1) hide show
  1. RefRef_test.py +38 -32
RefRef_test.py CHANGED
@@ -51,40 +51,46 @@ class RefRef_test(datasets.GeneratorBasedBuilder):
51
  )
52
 
53
  def _split_generators(self, dl_manager):
 
 
 
 
 
 
 
 
 
 
 
 
54
  return [
55
  datasets.SplitGenerator(
56
- name=datasets.Split.TRAIN,
57
- gen_kwargs={"split": "train"}
58
- ),
59
- datasets.SplitGenerator(
60
- name=datasets.Split.VALIDATION,
61
- gen_kwargs={"split": "val"}
62
- ),
63
- datasets.SplitGenerator(
64
- name=datasets.Split.TEST,
65
- gen_kwargs={"split": "test"}
66
- ),
67
  ]
68
 
69
- def _generate_examples(self, split):
70
- base_path = os.path.join("RefRef_test/") # Update this path
71
- print(os.getcwd())
72
- # Assuming your directory structure has scene folders (ball, ampoule)
73
- # with transforms_{split}.json files
74
- for scene in ["ball", "ampoule"]: # Add all your scene names here
75
- json_path = os.path.join(base_path, scene, f"transforms_{split}.json")
76
-
77
- with open(json_path, "r") as f:
78
  data = json.load(f)
79
-
80
- for idx, frame in enumerate(data["frames"]):
81
- # Construct full paths relative to JSON file location
82
- base_dir = os.path.dirname(json_path)
83
-
84
- yield f"{scene}_{split}_{idx}", {
85
- "image": os.path.join(base_dir, frame["file_path"]),
86
- "depth": os.path.join(base_dir, frame["depth_file_path"]),
87
- "mask": os.path.join(base_dir, frame["mask_file_path"]),
88
- "transform_matrix": frame["transform_matrix"],
89
- "rotation": frame.get("rotation", 0.0)
90
- }
 
 
 
 
 
51
  )
52
 
53
  def _split_generators(self, dl_manager):
54
+ # Automatically find all JSON files matching the split patterns
55
+ data_files = {
56
+ "train": "*/transforms_train.json",
57
+ "validation": "*/transforms_val.json",
58
+ "test": "*/transforms_test.json",
59
+ }
60
+
61
+ downloaded_files = dl_manager.download_and_extract({
62
+ split: dl_manager.glob(os.path.join(self.config.data_dir, pattern))
63
+ for split, pattern in data_files.items()
64
+ })
65
+
66
  return [
67
  datasets.SplitGenerator(
68
+ name=split,
69
+ gen_kwargs={
70
+ "filepaths": downloaded_files[split],
71
+ "split": split
72
+ },
73
+ ) for split in [datasets.Split.TRAIN, datasets.Split.VALIDATION, datasets.Split.TEST]
 
 
 
 
 
74
  ]
75
 
76
+ def _generate_examples(self, filepaths, split):
77
+ # Iterate through all JSON files for this split
78
+ for scene_idx, filepath in enumerate(filepaths):
79
+ with open(filepath, "r", encoding="utf-8") as f:
 
 
 
 
 
80
  data = json.load(f)
81
+ scene_name = os.path.basename(os.path.dirname(filepath))
82
+
83
+ for frame_idx, frame in enumerate(data["frames"]):
84
+ # Build absolute paths relative to JSON file location
85
+ base_dir = os.path.dirname(filepath)
86
+
87
+ # Generate unique key using scene and frame indices
88
+ unique_key = f"{scene_name}_{split}_{scene_idx}_{frame_idx}"
89
+
90
+ yield unique_key, {
91
+ "image": os.path.join(base_dir, frame["file_path"]),
92
+ "depth": os.path.join(base_dir, frame["depth_file_path"]),
93
+ "mask": os.path.join(base_dir, frame["mask_file_path"]),
94
+ "transform_matrix": frame["transform_matrix"],
95
+ "rotation": frame.get("rotation", 0.0)
96
+ }