error when load mistralai/Mistral-7B-Instruct-v0.2
HTTPError Traceback (most recent call last)
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\utils_errors.py:304, in hf_raise_for_status(response, endpoint_name)
303 try:
--> 304 response.raise_for_status()
305 except HTTPError as e:
File C:\ProgramData\anaconda3\Lib\site-packages\requests\models.py:1024, in Response.raise_for_status(self)
1023 if http_error_msg:
-> 1024 raise HTTPError(http_error_msg, response=self)
HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2/resolve/main/config.json
The above exception was the direct cause of the following exception:
HfHubHTTPError Traceback (most recent call last)
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\file_download.py:1751, in _get_metadata_or_catch_error(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, token, local_files_only, relative_filename, storage_folder)
1750 try:
-> 1751 metadata = get_hf_file_metadata(
1752 url=url, proxies=proxies, timeout=etag_timeout, headers=headers, token=token
1753 )
1754 except EntryNotFoundError as http_error:
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\utils_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\file_download.py:1673, in get_hf_file_metadata(url, token, proxies, timeout, library_name, library_version, user_agent, headers)
1672 # Retrieve metadata
-> 1673 r = _request_wrapper(
1674 method="HEAD",
1675 url=url,
1676 headers=headers,
1677 allow_redirects=False,
1678 follow_relative_redirects=True,
1679 proxies=proxies,
1680 timeout=timeout,
1681 )
1682 hf_raise_for_status(r)
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\file_download.py:376, in _request_wrapper(method, url, follow_relative_redirects, **params)
375 if follow_relative_redirects:
--> 376 response = _request_wrapper(
377 method=method,
378 url=url,
379 follow_relative_redirects=False,
380 **params,
381 )
383 # If redirection, we redirect only relative paths.
384 # This is useful in case of a renamed repository.
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\file_download.py:400, in _request_wrapper(method, url, follow_relative_redirects, **params)
399 response = get_session().request(method=method, url=url, **params)
--> 400 hf_raise_for_status(response)
401 return response
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\utils_errors.py:367, in hf_raise_for_status(response, endpoint_name)
361 message = (
362 f"\n\n{response.status_code} Forbidden: {error_message}."
363 + f"\nCannot access content at: {response.url}."
364 + "\nIf you are trying to create or update content, "
365 + "make sure you have a token with the write
role."
366 )
--> 367 raise HfHubHTTPError(message, response=response) from e
369 # Convert HTTPError
into a HfHubHTTPError
to display request information
370 # as well (request id and/or server error message)
HfHubHTTPError: (Request ID: Root=1-66ce704a-0bd122623565ef552e5af495;89bb2cad-1413-4d10-b464-4a6ed0cf406d)
403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
Cannot access content at: https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2/resolve/main/config.json.
If you are trying to create or update content, make sure you have a token with the write
role.
The above exception was the direct cause of the following exception:
LocalEntryNotFoundError Traceback (most recent call last)
File C:\ProgramData\anaconda3\Lib\site-packages\transformers\utils\hub.py:402, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
400 try:
401 # Load from URL or cache if already cached
--> 402 resolved_file = hf_hub_download(
403 path_or_repo_id,
404 filename,
405 subfolder=None if len(subfolder) == 0 else subfolder,
406 repo_type=repo_type,
407 revision=revision,
408 cache_dir=cache_dir,
409 user_agent=user_agent,
410 force_download=force_download,
411 proxies=proxies,
412 resume_download=resume_download,
413 token=token,
414 local_files_only=local_files_only,
415 )
416 except GatedRepoError as e:
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\utils_deprecation.py:101, in _deprecate_arguments.._inner_deprecate_positional_args..inner_f(*args, **kwargs)
100 warnings.warn(message, FutureWarning)
--> 101 return f(*args, **kwargs)
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\utils_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\file_download.py:1240, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, legacy_cache_layout, resume_download, force_filename, local_dir_use_symlinks)
1239 else:
-> 1240 return _hf_hub_download_to_cache_dir(
1241 # Destination
1242 cache_dir=cache_dir,
1243 # File info
1244 repo_id=repo_id,
1245 filename=filename,
1246 repo_type=repo_type,
1247 revision=revision,
1248 # HTTP info
1249 endpoint=endpoint,
1250 etag_timeout=etag_timeout,
1251 headers=headers,
1252 proxies=proxies,
1253 token=token,
1254 # Additional options
1255 local_files_only=local_files_only,
1256 force_download=force_download,
1257 )
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\file_download.py:1347, in _hf_hub_download_to_cache_dir(cache_dir, repo_id, filename, repo_type, revision, endpoint, etag_timeout, headers, proxies, token, local_files_only, force_download)
1346 # Otherwise, raise appropriate error
-> 1347 _raise_on_head_call_error(head_call_error, force_download, local_files_only)
1349 # From now on, etag, commit_hash, url and size are not None.
File C:\ProgramData\anaconda3\Lib\site-packages\huggingface_hub\file_download.py:1857, in _raise_on_head_call_error(head_call_error, force_download, local_files_only)
1855 else:
1856 # Otherwise: most likely a connection issue or Hub downtime => let's warn the user
-> 1857 raise LocalEntryNotFoundError(
1858 "An error happened while trying to locate the file on the Hub and we cannot find the requested files"
1859 " in the local cache. Please check your connection and try again or make sure your Internet connection"
1860 " is on."
1861 ) from head_call_error
LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on.
The above exception was the direct cause of the following exception:
OSError Traceback (most recent call last)
Cell In[47], line 3
1 # We use Mistral 7B LLM to detect whether the response generated with Phi-2 LM is hallucinated or not using LLM Promting technique.
2 llm_model = "mistralai/Mistral-7B-Instruct-v0.2"
----> 3 selfcheck_prompt = SelfCheckLLMPrompt(llm_model, device)
File C:\ProgramData\anaconda3\Lib\site-packages\selfcheckgpt\modeling_selfcheck.py:391, in SelfCheckLLMPrompt.init(self, model, device)
385 def init(
386 self,
387 model: str = None,
388 device = None
389 ):
390 model = model if model is not None else LLMPromptConfig.model
--> 391 self.tokenizer = AutoTokenizer.from_pretrained(model)
392 self.model = AutoModelForCausalLM.from_pretrained(model, torch_dtype="auto")
393 self.model.eval()
File C:\ProgramData\anaconda3\Lib\site-packages\transformers\models\auto\tokenization_auto.py:854, in AutoTokenizer.from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs)
852 config = AutoConfig.for_model(**config_dict)
853 else:
--> 854 config = AutoConfig.from_pretrained(
855 pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
856 )
857 config_tokenizer_class = config.tokenizer_class
858 if hasattr(config, "auto_map") and "AutoTokenizer" in config.auto_map:
File C:\ProgramData\anaconda3\Lib\site-packages\transformers\models\auto\configuration_auto.py:976, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
973 trust_remote_code = kwargs.pop("trust_remote_code", None)
974 code_revision = kwargs.pop("code_revision", None)
--> 976 config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
977 has_remote_code = "auto_map" in config_dict and "AutoConfig" in config_dict["auto_map"]
978 has_local_code = "model_type" in config_dict and config_dict["model_type"] in CONFIG_MAPPING
File C:\ProgramData\anaconda3\Lib\site-packages\transformers\configuration_utils.py:632, in PretrainedConfig.get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
630 original_kwargs = copy.deepcopy(kwargs)
631 # Get config dict associated with the base config file
--> 632 config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
633 if "_commit_hash" in config_dict:
634 original_kwargs["_commit_hash"] = config_dict["_commit_hash"]
File C:\ProgramData\anaconda3\Lib\site-packages\transformers\configuration_utils.py:689, in PretrainedConfig._get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
685 configuration_file = kwargs.pop("_configuration_file", CONFIG_NAME) if gguf_file is None else gguf_file
687 try:
688 # Load from local folder or from cache or download from model Hub and cache
--> 689 resolved_config_file = cached_file(
690 pretrained_model_name_or_path,
691 configuration_file,
692 cache_dir=cache_dir,
693 force_download=force_download,
694 proxies=proxies,
695 resume_download=resume_download,
696 local_files_only=local_files_only,
697 token=token,
698 user_agent=user_agent,
699 revision=revision,
700 subfolder=subfolder,
701 _commit_hash=commit_hash,
702 )
703 commit_hash = extract_commit_hash(resolved_config_file, commit_hash)
704 except EnvironmentError:
705 # Raise any environment error raise by cached_file
. It will have a helpful error message adapted to
706 # the original exception.
File C:\ProgramData\anaconda3\Lib\site-packages\transformers\utils\hub.py:445, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
439 if (
440 resolved_file is not None
441 or not _raise_exceptions_for_missing_entries
442 or not _raise_exceptions_for_connection_errors
443 ):
444 return resolved_file
--> 445 raise EnvironmentError(
446 f"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load this file, couldn't find it in the"
447 f" cached files and it looks like {path_or_repo_id} is not the path to a directory containing a file named"
448 f" {full_filename}.\nCheckout your internet connection or see how to run the library in offline mode at"
449 " 'https://huggingface.co/docs/transformers/installation#offline-mode'."
450 ) from e
451 except EntryNotFoundError as e:
452 if not _raise_exceptions_for_missing_entries:
OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like mistralai/Mistral-7B-Instruct-v0.2 is not the path to a directory containing a file named config.json.
Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'.