Spaces:
Running
Running
File size: 2,603 Bytes
aa0eed8 12338d7 92fb7b8 031211a aa0eed8 4d58b67 ee66ad7 d06678c 92fb7b8 d06678c 4d58b67 12c2b66 273f79a 12c2b66 5295650 bd96781 12c2b66 92fb7b8 12c2b66 b2173c3 5295650 c2a459e b2173c3 5295650 df144ba 453ee12 bd96781 df144ba 9ea93d2 453ee12 4d58b67 d06678c df144ba 453ee12 12c2b66 24755bb df144ba 38dfd80 df144ba 24755bb 3017744 d42fd6b df144ba 7fd13ba 0231016 4d58b67 bd96781 06d9591 12c2b66 4117a83 06d9591 b2173c3 06cc452 bd96781 12c2b66 06d9591 b2173c3 06d9591 df144ba a030fa6 4d58b67 4514b12 24755bb 12c2b66 805ff2f 19797f3 431abc0 c4a7aa8 19797f3 c4a7aa8 19797f3 8fae4d3 19797f3 d42fd6b c4a7aa8 d42fd6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 |
import gradio as gr
import json, openai, os, time
from openai import OpenAI
_client, _assistant, _thread = None, None, None
def show_json(str, obj):
print(f"=> {str}\n{json.loads(obj.model_dump_json())}")
def init_client():
global _client, _assistant, _thread
_client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
_assistant = _client.beta.assistants.create(
name="Math Tutor",
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
model="gpt-4-1106-preview",
)
#show_json("assistant", _assistant)
_thread = _client.beta.threads.create()
#show_json("thread", _thread)
def wait_on_run(run):
global _client, _thread
while run.status == "queued" or run.status == "in_progress":
run = _client.beta.threads.runs.retrieve(
thread_id=_thread.id,
run_id=run.id,
)
time.sleep(0.25)
return run
def extract_content_values(data):
content_values = []
for item in data.data:
for content in item.content:
if content.type == 'text':
content_values.append(content.text.value)
return content_values
def chat(message, history, openai_api_key):
global _client, _assistant, _thread
if _client == None:
init_client()
message = _client.beta.threads.messages.create(
role="user",
thread_id=_thread.id,
content=message,
)
#show_json("message", message)
run = _client.beta.threads.runs.create(
assistant_id=_assistant.id,
thread_id=_thread.id,
)
#show_json("run", run)
run = wait_on_run(run)
show_json("run", run)
messages = _client.beta.threads.messages.list(
thread_id=_thread.id
)
show_json("messages", messages)
return extract_content_values(messages)[0]
gr.ChatInterface(
chat,
chatbot=gr.Chatbot(height=300),
textbox=gr.Textbox(placeholder="Question", container=False, scale=7),
title="Multi-Assistant Demo",
description="Ask Coding Assistant and Math Assistant and any question",
theme="soft",
cache_examples=False,
retry_btn=None,
undo_btn=None,
clear_btn="Clear",
additional_inputs=[
gr.Textbox("sk-", label="OpenAI API Key", type = "password"),
],
examples=[["Write a 'Hello World' Python program", "sk-<BringYourOwn>"],
["I need to solve the equation '2x + 10 = 20'. Can you help me?", "sk-<BringYourOwn>"]],
).launch() |