Spaces:
Running
Running
import gradio as gr | |
import json, openai, os | |
from openai import OpenAI | |
def show_json(str, obj): | |
print(f"### {str}") | |
print(json.loads(obj.model_dump_json())) | |
def init_assistant(): | |
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY")) | |
assistant = client.beta.assistants.create( | |
name="Math Tutor", | |
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.", | |
model="gpt-4-1106-preview", | |
) | |
show_json("assistant", assistant) | |
thread = client.beta.threads.create() | |
show_json("thread", thread) | |
return client, assistant, thread | |
def wait_on_run(client, run, thread): | |
while run.status == "queued" or run.status == "in_progress": | |
run = client.beta.threads.runs.retrieve( | |
thread_id=thread.id, | |
run_id=run.id, | |
) | |
time.sleep(0.25) | |
return run | |
def extract_content_value(data): | |
content_values = [] | |
for item in data.data: | |
for content in item.content: | |
if content.type == 'text': | |
content_values.append(content.text.value) | |
return content_values | |
def chat(message, history): | |
client, assistant, thread = init_assistant() | |
history_openai_format = [] | |
for human, assistant in history: | |
history_openai_format.append({"role": "user", "content": human}) | |
history_openai_format.append({"role": "assistant", "content":assistant}) | |
history_openai_format.append({"role": "user", "content": message}) | |
print("### history") | |
print(history_openai_format) | |
messages = client.beta.threads.messages.create( | |
thread_id=thread.id, | |
role="user", | |
content=message, | |
) | |
show_json("messages", messages) | |
runs = client.beta.threads.runs.create( | |
thread_id=thread.id, | |
assistant_id=assistant.id, | |
) | |
show_json("runs", runs) | |
run = wait_on_run(client, run, thread) | |
show_json("runs", runs) | |
messages = client.beta.threads.messages.list(thread_id=thread.id) | |
show_json("messages", messages) | |
return extract_content_value(messages)[0] | |
gr.ChatInterface( | |
chat, | |
chatbot=gr.Chatbot(height=300), | |
textbox=gr.Textbox(placeholder="Ask Math Tutor any question", container=False, scale=7), | |
title="Math Tutor", | |
description="Question", | |
theme="soft", | |
examples=["I need to solve the equation `3x + 12 = 14`. Can you help me?"], | |
cache_examples=False, | |
retry_btn=None, | |
undo_btn=None, | |
clear_btn="Clear", | |
#multimodal=True, | |
#additional_inputs=[ | |
# gr.Textbox("You are a personal math tutor. Answer questions briefly, in a sentence or less.", label="System Prompt"), | |
#], | |
).launch() |