runtime error
13/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 402, in _request_wrapper response = _request_wrapper( File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 426, in _request_wrapper hf_raise_for_status(response) File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/huggingface_hub/utils/_errors.py", line 320, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e huggingface_hub.utils._errors.RepositoryNotFoundError: 401 Client Error. (Request ID: Root=1-65781469-1a4e8e166242866507fbb4be;e17bd3ba-4502-43ae-8d5b-4fc306bd61fd) Repository Not Found for url: https://huggingface.co/decapoda-research/llama-7b-hf/resolve/main/tokenizer.model. Please make sure you specified the correct `repo_id` and `repo_type`. If you are trying to access a private or gated repo, make sure you are authenticated. Invalid username or password. The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/home/user/app/app.py", line 24, in <module> global_vars.initialize_globals() File "/home/user/app/global_vars.py", line 102, in initialize_globals model, tokenizer = load_model( File "/home/user/app/models/guanaco.py", line 16, in load_model tokenizer = LlamaTokenizer.from_pretrained(base) File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 1813, in from_pretrained resolved_vocab_files[file_id] = cached_file( File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/utils/hub.py", line 450, in cached_file raise EnvironmentError( OSError: decapoda-research/llama-7b-hf is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=<your_token>`
Container logs:
Fetching error logs...