Datasets:
getting error in load_dataset for this dataset
my call of
dataset = load_dataset("fancyzhx/amazon_polarity")
i am getting this error
ValueError Traceback (most recent call last)
Cell In[32], line 5
1 # import the amazon polarity dataset
----> 5 dataset = load_dataset("fancyzhx/amazon_polarity")
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/datasets/load.py:1670, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, **config_kwargs)
1667 ignore_verifications = ignore_verifications or save_infos
1669 # Create a dataset builder
-> 1670 builder_instance = load_dataset_builder(
1671 path=path,
1672 name=name,
1673 data_dir=data_dir,
1674 data_files=data_files,
1675 cache_dir=cache_dir,
1676 features=features,
1677 download_config=download_config,
1678 download_mode=download_mode,
1679 revision=revision,
1680 use_auth_token=use_auth_token,
1681 **config_kwargs,
1682 )
1684 # Return iterable dataset in case of streaming
1685 if streaming:
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/datasets/load.py:1447, in load_dataset_builder(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, use_auth_token, **config_kwargs)
1445 download_config = download_config.copy() if download_config else DownloadConfig()
1446 download_config.use_auth_token = use_auth_token
-> 1447 dataset_module = dataset_module_factory(
1448 path,
1449 revision=revision,
1450 download_config=download_config,
1451 download_mode=download_mode,
1452 data_dir=data_dir,
1453 data_files=data_files,
1454 )
1456 # Get dataset builder class from the processing script
1457 builder_cls = import_main_class(dataset_module.module_path)
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/datasets/load.py:1172, in dataset_module_factory(path, revision, download_config, download_mode, dynamic_modules_path, data_dir, data_files, **download_kwargs)
1167 if isinstance(e1, FileNotFoundError):
1168 raise FileNotFoundError(
1169 f"Couldn't find a dataset script at {relative_to_absolute_path(combined_path)} or any data file in the same directory. "
1170 f"Couldn't find '{path}' on the Hugging Face Hub either: {type(e1).name}: {e1}"
1171 ) from None
-> 1172 raise e1 from None
1173 else:
1174 raise FileNotFoundError(
1175 f"Couldn't find a dataset script at {relative_to_absolute_path(combined_path)} or any data file in the same directory."
1176 )
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/datasets/load.py:1158, in dataset_module_factory(path, revision, download_config, download_mode, dynamic_modules_path, data_dir, data_files, **download_kwargs)
1143 return HubDatasetModuleFactoryWithScript(
1144 path,
1145 revision=revision,
(...)
1148 dynamic_modules_path=dynamic_modules_path,
1149 ).get_module()
1150 else:
1151 return HubDatasetModuleFactoryWithoutScript(
1152 path,
1153 revision=revision,
1154 data_dir=data_dir,
1155 data_files=data_files,
1156 download_config=download_config,
1157 download_mode=download_mode,
-> 1158 ).get_module()
1159 except Exception as e1: # noqa: all the attempts failed, before raising the error we should check if the module is already cached.
1160 try:
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/datasets/load.py:758, in HubDatasetModuleFactoryWithoutScript.get_module(self)
747 def get_module(self) -> DatasetModule:
748 hfh_dataset_info = hf_api_dataset_info(
749 HfApi(config.HF_ENDPOINT),
750 self.name,
(...)
753 timeout=100.0,
754 )
755 patterns = (
756 sanitize_patterns(self.data_files)
757 if self.data_files is not None
--> 758 else get_data_patterns_in_dataset_repository(hfh_dataset_info, self.data_dir)
759 )
760 data_files = DataFilesDict.from_hf_repo(
761 patterns,
762 dataset_info=hfh_dataset_info,
763 base_path=self.data_dir,
764 allowed_extensions=ALL_ALLOWED_EXTENSIONS,
765 )
766 module_names = {
767 key: infer_module_for_data_files(data_files_list, use_auth_token=self.download_config.use_auth_token)
768 for key, data_files_list in data_files.items()
769 }
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/datasets/data_files.py:674, in get_data_patterns_in_dataset_repository(dataset_info, base_path)
672 resolver = partial(_resolve_single_pattern_in_dataset_repository, dataset_info, base_path=base_path)
673 try:
--> 674 return _get_data_files_patterns(resolver)
675 except FileNotFoundError:
676 raise EmptyDatasetError(
677 f"The dataset repository at '{dataset_info.id}' doesn't contain any data files"
678 ) from None
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/datasets/data_files.py:235, in _get_data_files_patterns(pattern_resolver)
233 try:
234 for pattern in patterns:
--> 235 data_files = pattern_resolver(pattern)
236 if len(data_files) > 0:
237 non_empty_splits.append(split)
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/datasets/data_files.py:485, in _resolve_single_pattern_in_dataset_repository(dataset_info, pattern, base_path, allowed_extensions)
483 else:
484 base_path = "/"
--> 485 glob_iter = [PurePath(filepath) for filepath in fs.glob(PurePath(pattern).as_posix()) if fs.isfile(filepath)]
486 matched_paths = [
487 filepath
488 for filepath in glob_iter
(...)
495 )
496 ] # ignore .ipynb and pycache, but keep /../
497 if allowed_extensions is not None:
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/fsspec/spec.py:613, in AbstractFileSystem.glob(self, path, maxdepth, **kwargs)
609 depth = None
611 allpaths = self.find(root, maxdepth=depth, withdirs=True, detail=True, **kwargs)
--> 613 pattern = glob_translate(path + ("/" if ends_with_sep else ""))
614 pattern = re.compile(pattern)
616 out = {
617 p: info
618 for p, info in sorted(allpaths.items())
(...)
625 )
626 }
File ~/anaconda3/envs/pytorch_p310/lib/python3.10/site-packages/fsspec/utils.py:732, in glob_translate(pat)
730 continue
731 elif "" in part:
--> 732 raise ValueError(
733 "Invalid pattern: '' can only be an entire path component"
734 )
735 if part:
736 results.extend(_translate(part, f"{not_sep}*", not_sep))
ValueError: Invalid pattern: '**' can only be an entire path component