Datasets:
pierreguillou
commited on
Commit
•
5d79ee8
1
Parent(s):
67a59e0
Update DocLayNet-large.py
Browse files- DocLayNet-large.py +4 -4
DocLayNet-large.py
CHANGED
@@ -176,7 +176,7 @@ class DocLayNet(datasets.GeneratorBasedBuilder):
|
|
176 |
"filepath_1": os.path.join(archive_path["part_dataset_1"], "part_dataset_1/train/"),
|
177 |
"filepath_2": os.path.join(archive_path["part_dataset_2"], "part_dataset_2/train/"),
|
178 |
"filepath_3": os.path.join(archive_path["part_dataset_3"], "part_dataset_3/train/"),
|
179 |
-
|
180 |
},
|
181 |
),
|
182 |
datasets.SplitGenerator(
|
@@ -187,7 +187,7 @@ class DocLayNet(datasets.GeneratorBasedBuilder):
|
|
187 |
"filepath_1": os.path.join(archive_path["part_dataset_1"], "part_dataset_1/val/"),
|
188 |
"filepath_2": os.path.join(archive_path["part_dataset_2"], "part_dataset_2/val/"),
|
189 |
"filepath_3": os.path.join(archive_path["part_dataset_3"], "part_dataset_3/val/"),
|
190 |
-
|
191 |
},
|
192 |
),
|
193 |
datasets.SplitGenerator(
|
@@ -198,13 +198,13 @@ class DocLayNet(datasets.GeneratorBasedBuilder):
|
|
198 |
"filepath_1": os.path.join(archive_path["part_dataset_1"], "part_dataset_1/test/"),
|
199 |
"filepath_2": os.path.join(archive_path["part_dataset_2"], "part_dataset_2/test/"),
|
200 |
"filepath_3": os.path.join(archive_path["part_dataset_3"], "part_dataset_3/test/"),
|
201 |
-
|
202 |
},
|
203 |
),
|
204 |
]
|
205 |
|
206 |
|
207 |
-
def _generate_examples(self, filepath_0, filepath_1, filepath_2, filepath_3):
|
208 |
filepath = (filepath_0, filepath_1, filepath_2, filepath_3)
|
209 |
logger.info("⏳ Generating examples from = %s", filepath)
|
210 |
ann_dirs = [os.path.join(filepath_0, "annotations"), os.path.join(filepath_1, "annotations"), os.path.join(filepath_2, "annotations"), os.path.join(filepath_3, "annotations")]
|
|
|
176 |
"filepath_1": os.path.join(archive_path["part_dataset_1"], "part_dataset_1/train/"),
|
177 |
"filepath_2": os.path.join(archive_path["part_dataset_2"], "part_dataset_2/train/"),
|
178 |
"filepath_3": os.path.join(archive_path["part_dataset_3"], "part_dataset_3/train/"),
|
179 |
+
"split": "train",
|
180 |
},
|
181 |
),
|
182 |
datasets.SplitGenerator(
|
|
|
187 |
"filepath_1": os.path.join(archive_path["part_dataset_1"], "part_dataset_1/val/"),
|
188 |
"filepath_2": os.path.join(archive_path["part_dataset_2"], "part_dataset_2/val/"),
|
189 |
"filepath_3": os.path.join(archive_path["part_dataset_3"], "part_dataset_3/val/"),
|
190 |
+
"split": "validation",
|
191 |
},
|
192 |
),
|
193 |
datasets.SplitGenerator(
|
|
|
198 |
"filepath_1": os.path.join(archive_path["part_dataset_1"], "part_dataset_1/test/"),
|
199 |
"filepath_2": os.path.join(archive_path["part_dataset_2"], "part_dataset_2/test/"),
|
200 |
"filepath_3": os.path.join(archive_path["part_dataset_3"], "part_dataset_3/test/"),
|
201 |
+
"split": "test"
|
202 |
},
|
203 |
),
|
204 |
]
|
205 |
|
206 |
|
207 |
+
def _generate_examples(self, filepath_0, filepath_1, filepath_2, filepath_3, split):
|
208 |
filepath = (filepath_0, filepath_1, filepath_2, filepath_3)
|
209 |
logger.info("⏳ Generating examples from = %s", filepath)
|
210 |
ann_dirs = [os.path.join(filepath_0, "annotations"), os.path.join(filepath_1, "annotations"), os.path.join(filepath_2, "annotations"), os.path.join(filepath_3, "annotations")]
|