lisawen commited on
Commit
e026edc
1 Parent(s): 1d26131

Update soybean_dataset.py

Browse files
Files changed (1) hide show
  1. soybean_dataset.py +24 -27
soybean_dataset.py CHANGED
@@ -250,55 +250,55 @@ class SoybeanDataset(datasets.GeneratorBasedBuilder):
250
  logging.error(f"Error downloading {image_url}: {e}")
251
  return None
252
 
253
- def download_images_concurrently(self, image_urls):
254
  images = {}
255
- with ThreadPoolExecutor(max_workers=5) as executor:
256
  future_to_url = {executor.submit(self.download_image, url): url for url in image_urls}
257
  for future in as_completed(future_to_url):
258
  url = future_to_url[future]
259
  try:
260
  image = future.result()
261
- images[url] = image
262
- except Exception as exc:
263
- logging.error(f'{url} generated an exception: {exc}')
 
264
  return images
265
 
266
  def _generate_examples(self, filepath):
267
- logging.info("generating examples from = %s", filepath)
268
 
269
- image_urls = []
270
  with open(filepath, encoding="utf-8") as f:
271
  data = csv.DictReader(f)
272
- for row in data:
273
- image_urls.append(row['original_image'])
274
- image_urls.append(row['segmentation_image'])
275
 
276
- # Download all images concurrently
277
- downloaded_images = self.download_images_concurrently(set(image_urls)) # Use set to avoid duplicate downloads
278
 
279
- with open(filepath, encoding="utf-8") as f:
 
280
  data = csv.DictReader(f)
 
281
  for row in data:
282
  unique_id = row['unique_id']
283
- original_image_path = row['original_image']
284
- segmentation_image_path = row['segmentation_image']
285
- sets = row['sets']
286
 
287
- original_image = downloaded_images.get(original_image_path)
288
- segmentation_image = downloaded_images.get(segmentation_image_path)
289
 
290
- if original_image is None or segmentation_image is None:
291
- logging.error(f"Missing image for {unique_id}")
292
  continue
293
 
294
  yield unique_id, {
295
  "unique_id": unique_id,
296
- "sets": sets,
297
  "original_image": original_image,
298
  "segmentation_image": segmentation_image,
299
- }
300
-
301
-
302
 
303
 
304
 
@@ -310,7 +310,4 @@ class SoybeanDataset(datasets.GeneratorBasedBuilder):
310
 
311
 
312
 
313
-
314
-
315
-
316
 
 
250
  logging.error(f"Error downloading {image_url}: {e}")
251
  return None
252
 
253
+ def download_images_in_batch(self, image_urls):
254
  images = {}
255
+ with ThreadPoolExecutor() as executor:
256
  future_to_url = {executor.submit(self.download_image, url): url for url in image_urls}
257
  for future in as_completed(future_to_url):
258
  url = future_to_url[future]
259
  try:
260
  image = future.result()
261
+ if image:
262
+ images[url] = image
263
+ except Exception as e:
264
+ logging.error(f"Error processing {url}: {e}")
265
  return images
266
 
267
  def _generate_examples(self, filepath):
268
+ logging.info(f"Generating examples from = {filepath}")
269
 
 
270
  with open(filepath, encoding="utf-8") as f:
271
  data = csv.DictReader(f)
272
+ image_urls = [row['original_image'] for row in data] + [row['segmentation_image'] for row in data]
273
+ # Remove duplicates and None values
274
+ image_urls = list(set(filter(None, image_urls)))
275
 
276
+ # Download images in batch
277
+ images = self.download_images_in_batch(image_urls)
278
 
279
+ # Reset file pointer to the beginning to iterate again
280
+ f.seek(0)
281
  data = csv.DictReader(f)
282
+
283
  for row in data:
284
  unique_id = row['unique_id']
285
+ original_image_url = row['original_image']
286
+ segmentation_image_url = row['segmentation_image']
 
287
 
288
+ original_image = images.get(original_image_url)
289
+ segmentation_image = images.get(segmentation_image_url)
290
 
291
+ if not original_image or not segmentation_image:
292
+ logging.warning(f"Missing image for {unique_id}, skipping example.")
293
  continue
294
 
295
  yield unique_id, {
296
  "unique_id": unique_id,
297
+ "sets": row['sets'],
298
  "original_image": original_image,
299
  "segmentation_image": segmentation_image,
300
+ # ... add other features if necessary
301
+ }
 
302
 
303
 
304
 
 
310
 
311
 
312
 
 
 
 
313