runtime error

9, in _inner_fn return fn(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 1674, in get_hf_file_metadata r = _request_wrapper( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 369, in _request_wrapper response = _request_wrapper( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 393, in _request_wrapper hf_raise_for_status(response) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_errors.py", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6618ebbe-691366382aaf1f4a41d1dc07;96f37721-501c-4d06-9b0e-d70d2bd914d1) Repository Not Found for url: https://huggingface.co/treadon/prompt-fungineer-355M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. If you are trying to access a private or gated repo, make sure you are authenticated. The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/home/user/app/app.py", line 152, in <module> init_model() File "/home/user/app/app.py", line 25, in init_model model = transformers.AutoModelForCausalLM.from_pretrained(model_id, low_cpu_mem_usage=True,use_auth_token=auth_token) File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 484, in from_pretrained resolved_config_file = cached_file( File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 421, in cached_file raise EnvironmentError( OSError: treadon/prompt-fungineer-355M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=<your_token>`

Container logs:

Fetching error logs...