Spaces:
Running
Running
import gradio as gr | |
import json, openai, os, time | |
from openai import OpenAI | |
_client = None | |
_assistant = None | |
_thread = None | |
def show_json(str, obj): | |
print(f"===> {str}\n{json.loads(obj.model_dump_json())}") | |
def init_assistant(): | |
global _client, _assistant, _thread | |
_client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY")) | |
_assistant = _client.beta.assistants.create( | |
name="Math Tutor", | |
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.", | |
model="gpt-4-1106-preview", | |
) | |
_thread = _client.beta.threads.create() | |
def wait_on_run(run, thread): | |
global _client | |
while run.status == "queued" or run.status == "in_progress": | |
run = _client.beta.threads.runs.retrieve( | |
run_id=run.id, | |
thread_id=thread.id, | |
) | |
time.sleep(0.25) | |
return run | |
def extract_content_values(data): | |
content_values = [] | |
for item in data.data: | |
for content in item.content: | |
if content.type == 'text': | |
content_values.append(content.text.value) | |
return content_values | |
def chat(message, history): | |
global _client | |
global _assistant | |
global _thread | |
history_openai_format = [] | |
for human, assistant in history: | |
history_openai_format.append({"role": "user", "content": human}) | |
history_openai_format.append({"role": "assistant", "content":assistant}) | |
history_openai_format.append({"role": "user", "content": message}) | |
if len(history_openai_format) == 1: | |
init_assistant() | |
show_json("assistant", _assistant) | |
show_json("thread", _thread) | |
#print("### history") | |
#print(len(history_openai_format)) | |
#print(history_openai_format) | |
message = _client.beta.threads.messages.create( | |
role="user", | |
thread_id=_thread.id, | |
content=history_openai_format, | |
) | |
#show_json("message", message) | |
run = _client.beta.threads.runs.create( | |
assistant_id=_assistant.id, | |
thread_id=_thread.id, | |
) | |
run = wait_on_run(run, thread) | |
#show_json("run", run) | |
messages = _client.beta.threads.messages.list(thread_id=_thread.id) | |
#show_json("messages", messages) | |
return extract_content_values(messages)[0] | |
gr.ChatInterface( | |
chat, | |
chatbot=gr.Chatbot(height=300), | |
textbox=gr.Textbox(placeholder="Ask Math Tutor any question", container=False, scale=7), | |
title="Math Tutor", | |
description="Question", | |
theme="soft", | |
examples=["I need to solve the equation `3x + 13 = 11`. Can you help me?"], | |
cache_examples=False, | |
retry_btn=None, | |
undo_btn=None, | |
clear_btn="Clear", | |
#multimodal=True, | |
#additional_inputs=[ | |
# gr.Textbox("You are a personal math tutor. Answer questions briefly, in a sentence or less.", label="System Prompt"), | |
#], | |
).launch() |