Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -7,9 +7,6 @@ _client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
|
|
7 |
|
8 |
_assistant, _thread = None, None
|
9 |
|
10 |
-
def show_json(str, obj):
|
11 |
-
print(f"=> {str}\n{json.loads(obj.model_dump_json())}")
|
12 |
-
|
13 |
def create_assistant(client):
|
14 |
assistant = client.beta.assistants.create(
|
15 |
name="Math Tutor",
|
@@ -51,6 +48,13 @@ def wait_on_run(client, thread, run):
|
|
51 |
show_json("run", run)
|
52 |
return run
|
53 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
54 |
def extract_content_values(data):
|
55 |
content_values = []
|
56 |
for item in data.data:
|
@@ -59,6 +63,9 @@ def extract_content_values(data):
|
|
59 |
content_values.append(content.text.value)
|
60 |
return content_values
|
61 |
|
|
|
|
|
|
|
62 |
def chat(message, history, openai_api_key):
|
63 |
global _client, _assistant, _thread
|
64 |
|
@@ -68,15 +75,13 @@ def chat(message, history, openai_api_key):
|
|
68 |
if _thread == None:
|
69 |
_thread = create_thread(_client)
|
70 |
|
71 |
-
|
72 |
-
|
|
|
73 |
run = create_run(_client, _assistant, _thread)
|
74 |
run = wait_on_run(_client, _thread, run)
|
75 |
|
76 |
-
messages =
|
77 |
-
thread_id=_thread.id
|
78 |
-
)
|
79 |
-
show_json("messages", messages)
|
80 |
|
81 |
return extract_content_values(messages)[0]
|
82 |
|
|
|
7 |
|
8 |
_assistant, _thread = None, None
|
9 |
|
|
|
|
|
|
|
10 |
def create_assistant(client):
|
11 |
assistant = client.beta.assistants.create(
|
12 |
name="Math Tutor",
|
|
|
48 |
show_json("run", run)
|
49 |
return run
|
50 |
|
51 |
+
def list_messages(client, thread):
|
52 |
+
messages = client.beta.threads.messages.list(
|
53 |
+
thread_id=thread.id
|
54 |
+
)
|
55 |
+
show_json("messages", messages)
|
56 |
+
return messages
|
57 |
+
|
58 |
def extract_content_values(data):
|
59 |
content_values = []
|
60 |
for item in data.data:
|
|
|
63 |
content_values.append(content.text.value)
|
64 |
return content_values
|
65 |
|
66 |
+
def show_json(str, obj):
|
67 |
+
print(f"=> {str}\n{json.loads(obj.model_dump_json())}")
|
68 |
+
|
69 |
def chat(message, history, openai_api_key):
|
70 |
global _client, _assistant, _thread
|
71 |
|
|
|
75 |
if _thread == None:
|
76 |
_thread = create_thread(_client)
|
77 |
|
78 |
+
create_message(_client, _thread, message)
|
79 |
+
|
80 |
+
# async
|
81 |
run = create_run(_client, _assistant, _thread)
|
82 |
run = wait_on_run(_client, _thread, run)
|
83 |
|
84 |
+
messages = list_messages(client, thread)
|
|
|
|
|
|
|
85 |
|
86 |
return extract_content_values(messages)[0]
|
87 |
|