File size: 3,806 Bytes
aa0eed8
bf9cd7f
92fb7b8
14087df
dd2cf74
031211a
9d3e5aa
aa0eed8
348cc85
 
 
 
 
 
 
 
 
5295650
29455b5
53ed856
39bf620
53ed856
aaf4e3a
 
 
 
 
 
 
39bf620
aaf4e3a
881c209
 
 
 
 
 
39bf620
881c209
 
53ed856
453ee12
53ed856
 
9ea93d2
453ee12
d06678c
39bf620
453ee12
 
51c03d5
 
 
 
 
 
 
 
8f66d0d
51c03d5
 
cee6a57
 
 
 
39bf620
cee6a57
 
12c2b66
24755bb
38dfd80
 
 
 
24755bb
3017744
348cc85
 
 
 
 
 
 
 
 
cdb88a2
 
348cc85
 
 
 
cdb88a2
348cc85
 
d42fd6b
df144ba
7fd13ba
348cc85
 
53ed856
348cc85
 
53ed856
cee6a57
 
 
881c209
53ed856
51c03d5
 
a030fa6
377c496
24755bb
12c2b66
805ff2f
19797f3
 
8186c74
431abc0
c097c3c
39bf620
19797f3
8fae4d3
19797f3
c097c3c
 
d42fd6b
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
import gradio as gr
import openai, os, time

from agents import create_triage_agent, create_sales_agent, create_issues_repairs_agent
from agents import get_current_agent, get_current_thread, set_current_agent, set_current_thread
from openai import OpenAI
from utils import show_json

#def create_assistant(client):
#    assistant = client.beta.assistants.create(
#        name="Math Tutor",
#        instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
#        model="gpt-4-1106-preview",
#        tools=[{"type": "code_interpreter"}],
#    )
#    show_json("assistant", assistant)
#    return assistant
    
def create_thread(client):
    thread = client.beta.threads.create()
    #show_json("thread", thread)
    return thread

def create_message(client, thread, msg):
    message = client.beta.threads.messages.create(
        role="user",
        thread_id=thread.id,
        content=msg,
    )
    #show_json("message", message)
    return message

def create_run(client, assistant, thread):
    run = client.beta.threads.runs.create(
        assistant_id=assistant.id,
        thread_id=thread.id,
    )
    #show_json("run", run)
    return run

def wait_on_run(client, thread, run):
    while run.status == "queued" or run.status == "in_progress":
        run = client.beta.threads.runs.retrieve(
            thread_id=thread.id,
            run_id=run.id,
        )
        time.sleep(0.25)
    #show_json("run", run)
    return run

def list_run_steps(client, thread, run):
    run_steps = client.beta.threads.runs.steps.list(
        thread_id=thread.id,
        run_id=run.id,
        order="asc",
    )
    for step in run_steps.data:
        step_details = step.step_details
        show_json("step_details", step_details)
    return run_steps
    
def list_messages(client, thread):
    messages = client.beta.threads.messages.list(
        thread_id=thread.id
    )
    #show_json("messages", messages)
    return messages
    
def extract_content_values(data):
    content_values = []
    for item in data.data:
        for content in item.content:
            if content.type == 'text':
                content_values.append(content.text.value)
    return content_values

_client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))

#_assistant, _thread = None, None

###
triage_agent = create_triage_agent(_client)
sales_agent = create_sales_agent(_client)
issues_repairs_agent = create_issues_repairs_agent(_client)

set_current_agent(triage_agent)

triage_thread = create_thread(_client)
sales_thread = create_thread(_client)
issues_repairs_thread = create_thread(_client)

set_current_thread(triage_thread)
###

def chat(message, history, openai_api_key):
    global _client, _assistant, _thread     
       
    #if _assistant == None:
    #    _assistant = create_assistant(_client)

    #if _thread == None:
    #    _thread = create_thread(_client)
        
    create_message(_client, _thread, message)

    # async
    run = create_run(_client, _assistant, _thread)
    run = wait_on_run(_client, _thread, run)

    list_run_steps(_client, _thread, run)
    
    messages = list_messages(_client, _thread)

    return extract_content_values(messages)[0]
        
gr.ChatInterface(
    chat,
    chatbot=gr.Chatbot(height=300),
    textbox=gr.Textbox(placeholder="Question", container=False, scale=7),
    title="Multi-Agent Orchestration",
    description="Demo using hand-off pattern: triage agent, sales agent, and issues & repairs agent",
    retry_btn=None,
    undo_btn=None,
    clear_btn="Clear",
    #examples=[["Generate the first 10 Fibbonaci numbers with code.", "sk-<BringYourOwn>"]],
    #cache_examples=False,
    additional_inputs=[
        gr.Textbox("sk-", label="OpenAI API Key", type = "password"),
    ],
).launch()