runtime error
Exit code: 1. Reason: sr/local/lib/python3.10/site-packages/gradio_client/utils.py", line 517, in synchronize_async return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) # type: ignore File "/usr/local/lib/python3.10/site-packages/fsspec/asyn.py", line 103, in sync raise return_result File "/usr/local/lib/python3.10/site-packages/fsspec/asyn.py", line 56, in _runner result[0] = await coro File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 277, in create await self.cache() File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 333, in cache prediction = await Context.root_block.process_api( File "/usr/local/lib/python3.10/site-packages/gradio/blocks.py", line 1432, in process_api result = await self.call_function( File "/usr/local/lib/python3.10/site-packages/gradio/blocks.py", line 1107, in call_function prediction = await anyio.to_thread.run_sync( File "/usr/local/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync return await get_async_backend().run_sync_in_worker_thread( File "/usr/local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2441, in run_sync_in_worker_thread return await future File "/usr/local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 943, in run result = context.run(func, *args) File "/usr/local/lib/python3.10/site-packages/gradio/utils.py", line 707, in wrapper response = f(*args, **kwargs) File "/home/user/app/app_dialogue.py", line 632, in process_example generated_text = client.generate(prompt=query, **generation_args).generated_text File "/usr/local/lib/python3.10/site-packages/text_generation/client.py", line 284, in generate raise parse_error(resp.status_code, payload) text_generation.errors.ShardNotReadyError: The model HuggingFaceM4/idefics-80b-instruct is too large to be loaded automatically (159GB > 10GB). Please use Spaces (https://huggingface.co/spaces) or Inference Endpoints (https://huggingface.co/inference-endpoints).
Container logs:
Fetching error logs...