File size: 3,734 Bytes
aa0eed8
12338d7
92fb7b8
14087df
031211a
aa0eed8
53ed856
 
 
ee66ad7
14087df
1d1469b
a867d70
1d1469b
a867d70
1d1469b
a867d70
14087df
 
29455b5
53ed856
12c2b66
 
92fb7b8
e4cf670
12c2b66
68cb77a
53ed856
5295650
29455b5
53ed856
68cb77a
53ed856
aaf4e3a
 
 
 
 
 
 
 
 
881c209
 
 
 
 
 
 
 
 
53ed856
453ee12
53ed856
 
9ea93d2
453ee12
d06678c
881c209
453ee12
 
51c03d5
 
 
 
 
 
 
 
8f66d0d
 
51c03d5
 
cee6a57
 
 
 
 
 
 
12c2b66
24755bb
38dfd80
 
 
 
24755bb
3017744
cee6a57
 
 
d42fd6b
df144ba
7fd13ba
53ed856
29455b5
53ed856
 
29455b5
53ed856
cee6a57
 
 
881c209
53ed856
51c03d5
 
a030fa6
377c496
24755bb
12c2b66
805ff2f
19797f3
 
8186c74
431abc0
1d1469b
 
19797f3
8fae4d3
19797f3
1d1469b
bd63387
d42fd6b
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
import gradio as gr
import json, openai, os, time

from agents import create_triage_agent, create_sales_agent, create_issues_repairs_agent
from openai import OpenAI

_client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))

_assistant, _thread = None, None

###
triage_agent = create_triage_agent(_client)
show_json("triage_agent", triage_agent)
sales_agent = create_sales_agent(_client)
show_json("sales_agent", sales_agent)
issues_repairs_agent = create_issues_repairs_agent(_client)
show_json("issues_repairs_agent", issues_repairs_agent)
###

def create_assistant(client):
    assistant = client.beta.assistants.create(
        name="Math Tutor",
        instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
        model="gpt-4-1106-preview",
        tools=[{"type": "code_interpreter"}],
    )
    show_json("assistant", assistant)
    return assistant
    
def create_thread(client):
    thread = client.beta.threads.create()
    show_json("thread", thread)
    return thread

def create_message(client, thread, msg):
    message = client.beta.threads.messages.create(
        role="user",
        thread_id=thread.id,
        content=msg,
    )
    show_json("message", message)
    return message

def create_run(client, assistant, thread):
    run = client.beta.threads.runs.create(
        assistant_id=assistant.id,
        thread_id=thread.id,
    )
    show_json("run", run)
    return run

def wait_on_run(client, thread, run):
    while run.status == "queued" or run.status == "in_progress":
        run = client.beta.threads.runs.retrieve(
            thread_id=thread.id,
            run_id=run.id,
        )
        time.sleep(0.25)
    show_json("run", run)
    return run

def list_run_steps(client, thread, run):
    run_steps = client.beta.threads.runs.steps.list(
        thread_id=thread.id,
        run_id=run.id,
        order="asc",
    )
    for step in run_steps.data:
        step_details = step.step_details
        #print(json.dumps(show_json("step_details", step_details), indent=4))
        show_json("step_details", step_details)
    return run_steps
    
def list_messages(client, thread):
    messages = client.beta.threads.messages.list(
        thread_id=thread.id
    )
    show_json("messages", messages)
    return messages
    
def extract_content_values(data):
    content_values = []
    for item in data.data:
        for content in item.content:
            if content.type == 'text':
                content_values.append(content.text.value)
    return content_values

def show_json(str, obj):
    print(f"=> {str}\n{json.loads(obj.model_dump_json())}")

def chat(message, history, openai_api_key):
    global _client, _assistant, _thread     
       
    if _assistant == None:
        _assistant = create_assistant(_client)

    if _thread == None:
        _thread = create_thread(_client)
        
    create_message(_client, _thread, message)

    # async
    run = create_run(_client, _assistant, _thread)
    run = wait_on_run(_client, _thread, run)

    list_run_steps(_client, _thread, run)
    
    messages = list_messages(_client, _thread)

    return extract_content_values(messages)[0]
        
gr.ChatInterface(
    chat,
    chatbot=gr.Chatbot(height=300),
    textbox=gr.Textbox(placeholder="Question", container=False, scale=7),
    title="Multi-Agent Demo",
    description="Triage agent, Sales agent, and Issues & Repairs agent",
    retry_btn=None,
    undo_btn=None,
    clear_btn="Clear",
    examples=[["Generate the first 10 Fibbonaci numbers with code.", "sk-<BringYourOwn>"]],
    cache_examples=False,
    additional_inputs=[
        gr.Textbox("sk-", label="OpenAI API Key", type = "password"),
    ],
).launch()