Yhhxhfh commited on
Commit
ab812c5
1 Parent(s): 33c778e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -8
app.py CHANGED
@@ -12,7 +12,7 @@ import psutil
12
  import asyncio
13
  import torch
14
  from tenacity import retry, stop_after_attempt, wait_fixed
15
- from huggingface_hub import HfApi
16
  from dotenv import load_dotenv
17
 
18
  load_dotenv()
@@ -37,7 +37,11 @@ def initialize_model():
37
  try:
38
  logger.info("Initializing the base model and tokenizer.")
39
  base_model_repo = "meta-llama/Llama-3.2-1B"
40
- model = AutoModelForCausalLM.from_pretrained(base_model_repo, cache_dir=CACHE_DIR)
 
 
 
 
41
  tokenizer = AutoTokenizer.from_pretrained(base_model_repo, cache_dir=CACHE_DIR)
42
  if tokenizer.pad_token is None:
43
  tokenizer.pad_token = tokenizer.eos_token
@@ -52,8 +56,6 @@ def initialize_model():
52
  logger.error(f"Error initializing model and tokenizer: {e}", exc_info=True)
53
  sys.exit(1)
54
 
55
- initialize_model()
56
-
57
  @retry(stop=stop_after_attempt(3), wait=wait_fixed(5))
58
  def download_dataset(dataset_name):
59
  try:
@@ -139,10 +141,7 @@ async def download_and_process_datasets():
139
  unify_datasets()
140
  upload_model_to_hub()
141
 
142
- async def main():
143
- await download_and_process_datasets()
144
-
145
- asyncio.run(main())
146
 
147
  app = FastAPI()
148
 
@@ -156,6 +155,14 @@ app.add_middleware(
156
 
157
  message_history = []
158
 
 
 
 
 
 
 
 
 
159
  @app.get('/')
160
  async def index():
161
  html_code = """
 
12
  import asyncio
13
  import torch
14
  from tenacity import retry, stop_after_attempt, wait_fixed
15
+ from huggingface_hub import HfApi, RepositoryNotFoundError
16
  from dotenv import load_dotenv
17
 
18
  load_dotenv()
 
37
  try:
38
  logger.info("Initializing the base model and tokenizer.")
39
  base_model_repo = "meta-llama/Llama-3.2-1B"
40
+ model = AutoModelForCausalLM.from_pretrained(
41
+ base_model_repo,
42
+ cache_dir=CACHE_DIR,
43
+ ignore_mismatched_sizes=True # Añadir este parámetro
44
+ )
45
  tokenizer = AutoTokenizer.from_pretrained(base_model_repo, cache_dir=CACHE_DIR)
46
  if tokenizer.pad_token is None:
47
  tokenizer.pad_token = tokenizer.eos_token
 
56
  logger.error(f"Error initializing model and tokenizer: {e}", exc_info=True)
57
  sys.exit(1)
58
 
 
 
59
  @retry(stop=stop_after_attempt(3), wait=wait_fixed(5))
60
  def download_dataset(dataset_name):
61
  try:
 
141
  unify_datasets()
142
  upload_model_to_hub()
143
 
144
+ # Elimina la llamada a asyncio.run(main()) y mueve la inicialización al evento de inicio de FastAPI
 
 
 
145
 
146
  app = FastAPI()
147
 
 
155
 
156
  message_history = []
157
 
158
+ @app.on_event("startup")
159
+ async def startup_event():
160
+ logger.info("Application startup initiated.")
161
+ loop = asyncio.get_event_loop()
162
+ await loop.run_in_executor(None, initialize_model)
163
+ await download_and_process_datasets()
164
+ logger.info("Application startup completed.")
165
+
166
  @app.get('/')
167
  async def index():
168
  html_code = """