KingNish commited on
Commit
397998c
1 Parent(s): 15584ad

Update chatbot.py

Browse files
Files changed (1) hide show
  1. chatbot.py +30 -13
chatbot.py CHANGED
@@ -430,8 +430,8 @@ def model_inference( user_prompt, chat_history):
430
  message_groq.append({"role": "user", "content": f"{str(msg[0])}"})
431
  message_groq.append({"role": "assistant", "content": f"{str(msg[1])}"})
432
  message_groq.append({"role": "user", "content": f"{str(message_text)}"})
433
- # its meta-llama/Meta-Llama-3-8B-Instruct
434
- stream = client_groq.chat.completions.create(model="llama3-8b-8192", messages=message_groq, max_tokens=4096, stream=True)
435
  output = ""
436
  for chunk in stream:
437
  content = chunk.choices[0].delta.content
@@ -440,17 +440,34 @@ def model_inference( user_prompt, chat_history):
440
  yield output
441
  except Exception as e:
442
  print(e)
443
- messages = f"<|im_start|>system\nYou are OpenGPT 4o a helpful and powerful assistant made by KingNish. You answers users query in detail and structured format and style like human. You are also Expert in every field and also learn and try to answer from contexts related to previous question. You also try to show emotions using Emojis and reply like human, use short forms, structured manner, detailed explaination, friendly tone and emotions.<|im_end|>"
444
- for msg in chat_history:
445
- messages += f"\n<|im_start|>user\n{str(msg[0])}<|im_end|>"
446
- messages += f"\n<|im_start|>assistant\n{str(msg[1])}<|im_end|>"
447
- messages+=f"\n<|im_start|>user\n{message_text}<|im_end|>\n<|im_start|>assistant\n"
448
- stream = client_mixtral.text_generation(messages, max_new_tokens=4000, do_sample=True, stream=True, details=True, return_full_text=False)
449
- output = ""
450
- for response in stream:
451
- if not response.token.text == "<|im_end|>":
452
- output += response.token.text
453
- yield output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
454
 
455
  # Create a chatbot interface
456
  chatbot = gr.Chatbot(
 
430
  message_groq.append({"role": "user", "content": f"{str(msg[0])}"})
431
  message_groq.append({"role": "assistant", "content": f"{str(msg[1])}"})
432
  message_groq.append({"role": "user", "content": f"{str(message_text)}"})
433
+ # its meta-llama/Meta-Llama-3-70B-Instruct
434
+ stream = client_groq.chat.completions.create(model="llama3-70b-8192", messages=message_groq, max_tokens=4096, stream=True)
435
  output = ""
436
  for chunk in stream:
437
  content = chunk.choices[0].delta.content
 
440
  yield output
441
  except Exception as e:
442
  print(e)
443
+ try:
444
+ message_groq = []
445
+ message_groq.append({"role":"system", "content": "You are OpenGPT 4o a helpful and powerful assistant made by KingNish. You answers users query in detail and structured format and style like human. You are also Expert in every field and also learn and try to answer from contexts related to previous question. You also try to show emotions using Emojis and reply like human, use short forms, structured manner, detailed explaination, friendly tone and emotions."})
446
+ for msg in chat_history:
447
+ message_groq.append({"role": "user", "content": f"{str(msg[0])}"})
448
+ message_groq.append({"role": "assistant", "content": f"{str(msg[1])}"})
449
+ message_groq.append({"role": "user", "content": f"{str(message_text)}"})
450
+ # its meta-llama/Meta-Llama-3-8B-Instruct
451
+ stream = client_groq.chat.completions.create(model="llama3-8b-8192", messages=message_groq, max_tokens=4096, stream=True)
452
+ output = ""
453
+ for chunk in stream:
454
+ content = chunk.choices[0].delta.content
455
+ if content:
456
+ output += chunk.choices[0].delta.content
457
+ yield output
458
+ except Exception as e:
459
+ print(e)
460
+ messages = f"<|im_start|>system\nYou are OpenGPT 4o a helpful and powerful assistant made by KingNish. You answers users query in detail and structured format and style like human. You are also Expert in every field and also learn and try to answer from contexts related to previous question. You also try to show emotions using Emojis and reply like human, use short forms, structured manner, detailed explaination, friendly tone and emotions.<|im_end|>"
461
+ for msg in chat_history:
462
+ messages += f"\n<|im_start|>user\n{str(msg[0])}<|im_end|>"
463
+ messages += f"\n<|im_start|>assistant\n{str(msg[1])}<|im_end|>"
464
+ messages+=f"\n<|im_start|>user\n{message_text}<|im_end|>\n<|im_start|>assistant\n"
465
+ stream = client_mixtral.text_generation(messages, max_new_tokens=4000, do_sample=True, stream=True, details=True, return_full_text=False)
466
+ output = ""
467
+ for response in stream:
468
+ if not response.token.text == "<|im_end|>":
469
+ output += response.token.text
470
+ yield output
471
 
472
  # Create a chatbot interface
473
  chatbot = gr.Chatbot(