File size: 2,632 Bytes
aa0eed8
12338d7
92fb7b8
031211a
aa0eed8
53ed856
 
 
ee66ad7
d06678c
92fb7b8
d06678c
53ed856
 
12c2b66
 
92fb7b8
12c2b66
68cb77a
53ed856
5295650
53ed856
 
68cb77a
53ed856
df144ba
53ed856
453ee12
53ed856
 
9ea93d2
453ee12
d06678c
453ee12
 
12c2b66
24755bb
38dfd80
 
 
 
24755bb
3017744
d42fd6b
df144ba
7fd13ba
0231016
53ed856
 
 
 
 
 
 
 
bd96781
06d9591
12c2b66
4117a83
06d9591
68cb77a
06cc452
bd96781
12c2b66
 
06d9591
68cb77a
06d9591
53ed856
a030fa6
 
4d58b67
 
 
4514b12
24755bb
12c2b66
805ff2f
19797f3
 
8186c74
431abc0
c4a7aa8
bd63387
19797f3
8fae4d3
19797f3
bd63387
 
d42fd6b
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import gradio as gr
import json, openai, os, time

from openai import OpenAI

_client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))

_assistant, _thread = None, None

def show_json(str, obj):
    print(f"=> {str}\n{json.loads(obj.model_dump_json())}")

def init_assistant(client):
    assistant = client.beta.assistants.create(
        name="Math Tutor",
        instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
        model="gpt-4-1106-preview",
    )
    show_json("assistant", assistant)
    return assistant
    
def init_thread(client):
    thread = client.beta.threads.create()
    show_json("thread", thread)
    return thread
    
def wait_on_run(client, thread, run):
    while run.status == "queued" or run.status == "in_progress":
        run = client.beta.threads.runs.retrieve(
            thread_id=thread.id,
            run_id=run.id,
        )
        time.sleep(0.25)
    return run

def extract_content_values(data):
    content_values = []
    for item in data.data:
        for content in item.content:
            if content.type == 'text':
                content_values.append(content.text.value)
    return content_values

def chat(message, history, openai_api_key):
    global _client, _assistant, _thread     
       
    if _client == None:
        _client = init_client()

    if _assistant == None:
        _assitant = init_assistant(_client)

    if _thread == None:
        _thread = init_thread(_client)
        
    message = _client.beta.threads.messages.create(
        role="user",
        thread_id=_thread.id,
        content=message,
    )
    show_json("message", message)
    
    run = _client.beta.threads.runs.create(
        assistant_id=_assistant.id,
        thread_id=_thread.id,
    )
    show_json("run", run)
    
    run = wait_on_run(_client, _thread, run)
    show_json("run", run)
    
    messages = _client.beta.threads.messages.list(
        thread_id=_thread.id
    )
    show_json("messages", messages)

    return extract_content_values(messages)[0]
        
gr.ChatInterface(
    chat,
    chatbot=gr.Chatbot(height=300),
    textbox=gr.Textbox(placeholder="Question", container=False, scale=7),
    title="Multi-Assistant Demo",
    description="Ask AAA Assistant, BBB Assistant, and CCC Assistant any question",
    retry_btn=None,
    undo_btn=None,
    clear_btn="Clear",
    examples=[["I need to solve the equation '2x + 10 = 20'. Can you help me?", "sk-<BringYourOwn>"]],
    cache_examples=False,
    additional_inputs=[
        gr.Textbox("sk-", label="OpenAI API Key", type = "password"),
    ],
).launch()