bstraehle's picture
Update app.py
cdb88a2 verified
raw
history blame
3.71 kB
import gradio as gr
import openai, os, time
from agents import create_triage_agent, create_sales_agent, create_issues_repairs_agent
from openai import OpenAI
from utils import show_json
#def create_assistant(client):
# assistant = client.beta.assistants.create(
# name="Math Tutor",
# instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
# model="gpt-4-1106-preview",
# tools=[{"type": "code_interpreter"}],
# )
# show_json("assistant", assistant)
# return assistant
def create_thread(client):
thread = client.beta.threads.create()
#show_json("thread", thread)
return thread
def create_message(client, thread, msg):
message = client.beta.threads.messages.create(
role="user",
thread_id=thread.id,
content=msg,
)
#show_json("message", message)
return message
def create_run(client, assistant, thread):
run = client.beta.threads.runs.create(
assistant_id=assistant.id,
thread_id=thread.id,
)
#show_json("run", run)
return run
def wait_on_run(client, thread, run):
while run.status == "queued" or run.status == "in_progress":
run = client.beta.threads.runs.retrieve(
thread_id=thread.id,
run_id=run.id,
)
time.sleep(0.25)
#show_json("run", run)
return run
def list_run_steps(client, thread, run):
run_steps = client.beta.threads.runs.steps.list(
thread_id=thread.id,
run_id=run.id,
order="asc",
)
for step in run_steps.data:
step_details = step.step_details
show_json("step_details", step_details)
return run_steps
def list_messages(client, thread):
messages = client.beta.threads.messages.list(
thread_id=thread.id
)
#show_json("messages", messages)
return messages
def extract_content_values(data):
content_values = []
for item in data.data:
for content in item.content:
if content.type == 'text':
content_values.append(content.text.value)
return content_values
_client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
#_assistant, _thread = None, None
###
triage_agent = create_triage_agent(_client)
sales_agent = create_sales_agent(_client)
issues_repairs_agent = create_issues_repairs_agent(_client)
set_current_agent(triage_agent)
triage_thread = create_thread(_client)
sales_thread = create_thread(_client)
issues_repairs_thread = create_thread(_client)
set_current_thread(triage_thread)
###
def chat(message, history, openai_api_key):
global _client, _assistant, _thread
#if _assistant == None:
# _assistant = create_assistant(_client)
#if _thread == None:
# _thread = create_thread(_client)
create_message(_client, _thread, message)
# async
run = create_run(_client, _assistant, _thread)
run = wait_on_run(_client, _thread, run)
list_run_steps(_client, _thread, run)
messages = list_messages(_client, _thread)
return extract_content_values(messages)[0]
gr.ChatInterface(
chat,
chatbot=gr.Chatbot(height=300),
textbox=gr.Textbox(placeholder="Question", container=False, scale=7),
title="Multi-Agent Orchestration",
description="Demo using hand-off pattern: triage agent, sales agent, and issues & repairs agent",
retry_btn=None,
undo_btn=None,
clear_btn="Clear",
#examples=[["Generate the first 10 Fibbonaci numbers with code.", "sk-<BringYourOwn>"]],
#cache_examples=False,
additional_inputs=[
gr.Textbox("sk-", label="OpenAI API Key", type = "password"),
],
).launch()