Spaces:
Running
Running
import gradio as gr | |
import json, openai, os, time | |
from openai import OpenAI | |
def init_assistant(): | |
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY")) | |
assistant = client.beta.assistants.create( | |
name="Math Tutor", | |
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.", | |
model="gpt-4-1106-preview", | |
) | |
show_json(assistant) | |
thread = client.beta.threads.create() | |
show_json(thread) | |
return assistant, thread | |
def show_json(obj): | |
print("###") | |
print(json.loads(obj.model_dump_json())) | |
print("###") | |
def wait_on_run(run, thread): | |
while run.status == "queued" or run.status == "in_progress": | |
run = client.beta.threads.runs.retrieve( | |
thread_id=thread.id, | |
run_id=run.id, | |
) | |
time.sleep(0.5) | |
return run | |
def extract_content_value(data): | |
content_values = [] | |
for item in data.data: | |
for content in item.content: | |
if content.type == 'text': | |
content_values.append(content.text.value) | |
return content_values | |
def chat(message, history): | |
assistant, thread = init_assistant() | |
history_openai_format = [] | |
for human, assistant in history: | |
history_openai_format.append({"role": "user", "content": human }) | |
history_openai_format.append({"role": "assistant", "content":assistant}) | |
history_openai_format.append({"role": "user", "content": message}) | |
print("###") | |
print(history_openai_format) | |
print("###") | |
message = client.beta.threads.messages.create( | |
thread_id=thread.id, | |
role="user", | |
content=message, | |
) | |
show_json(message) | |
print("###") | |
print(thread.id) | |
print("###") | |
print(assistant.id) | |
run = client.beta.threads.runs.create( | |
thread_id=thread.id, | |
assistant_id=assistant.id, | |
) | |
show_json(run) | |
run = wait_on_run(run, thread) | |
show_json(run) | |
messages = client.beta.threads.messages.list(thread_id=thread.id) | |
show_json(messages) | |
return extract_content_value(messages)[0] | |
gr.ChatInterface( | |
chat, | |
chatbot=gr.Chatbot(height=300), | |
textbox=gr.Textbox(placeholder="Ask Math Tutor any question", container=False, scale=7), | |
title="Math Tutor", | |
description="Question", | |
theme="soft", | |
examples=["I need to solve the equation `3x + 12 = 14`. Can you help me?"], | |
cache_examples=False, | |
retry_btn=None, | |
undo_btn=None, | |
clear_btn="Clear", | |
#multimodal=True, | |
#additional_inputs=[ | |
# gr.Textbox("You are a personal math tutor. Answer questions briefly, in a sentence or less.", label="System Prompt"), | |
#], | |
).launch() |