File size: 1,071 Bytes
9551276
 
5e72808
 
 
 
42f9f00
 
 
 
5e72808
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e42b84a
 
 
5e72808
 
 
 
9551276
 
5e72808
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import subprocess
import os
import gradio as gr
from groq import Groq

def generate_response(input_text):
    client = Groq(
    api_key=os.environ.get("Groq_Api_Key"),
    )


    stream = client.chat.completions.create(
        messages=[
            {"role": "system", "content": "you are a helpful assistant."},
            {"role": "user", "content": input_text}
        ],
        model="mixtral-8x7b-32768",
        temperature=0.5,
        max_tokens=1024,
        top_p=1,
        stop=None,
        stream=True,
    )

    response = ""
    for chunk in stream:
        delta_content = chunk.choices[0].delta.content
        if delta_content is not None:
            response += delta_content

    return response

# Define the Gradio UI
inputs = gr.Textbox(label="Enter your question")
outputs = gr.Textbox(label="Model Response")

gr.Interface(
    fn=generate_response,
    inputs=inputs,
    outputs=outputs,
    title="Language Model Assistant",
    description="Ask questions and get responses from a language model.",
).launch(show_api=False, share=True)