Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -2,6 +2,8 @@ import gradio as gr
|
|
2 |
import json, openai, os, time
|
3 |
from openai import OpenAI
|
4 |
|
|
|
|
|
5 |
def show_json(str, obj):
|
6 |
print(f"### {str}")
|
7 |
print(json.loads(obj.model_dump_json()))
|
@@ -14,10 +16,8 @@ def init_assistant():
|
|
14 |
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
|
15 |
model="gpt-4-1106-preview",
|
16 |
)
|
17 |
-
show_json("assistant", assistant)
|
18 |
|
19 |
thread = client.beta.threads.create()
|
20 |
-
show_json("thread", thread)
|
21 |
|
22 |
return client, assistant, thread
|
23 |
|
@@ -38,10 +38,6 @@ def extract_content_value(data):
|
|
38 |
content_values.append(content.text.value)
|
39 |
return content_values
|
40 |
|
41 |
-
client = None
|
42 |
-
assistant = None
|
43 |
-
thread = None
|
44 |
-
|
45 |
def chat(message, history):
|
46 |
global client
|
47 |
global assistant
|
@@ -57,10 +53,13 @@ def chat(message, history):
|
|
57 |
|
58 |
if len(history_openai_format) == 1:
|
59 |
client, assistant, thread = init_assistant()
|
|
|
|
|
|
|
60 |
|
61 |
-
print("### history")
|
62 |
-
print(len(history_openai_format))
|
63 |
-
print(history_openai_format)
|
64 |
|
65 |
message = client.beta.threads.messages.create(
|
66 |
thread_id=thread.id,
|
@@ -68,22 +67,22 @@ def chat(message, history):
|
|
68 |
content=message,
|
69 |
)
|
70 |
|
71 |
-
show_json("message", message)
|
72 |
|
73 |
run = client.beta.threads.runs.create(
|
74 |
thread_id=thread.id,
|
75 |
assistant_id=assistant.id,
|
76 |
)
|
77 |
|
78 |
-
show_json("
|
79 |
|
80 |
run = wait_on_run(client, run, thread)
|
81 |
|
82 |
-
show_json("
|
83 |
|
84 |
messages = client.beta.threads.messages.list(thread_id=thread.id)
|
85 |
|
86 |
-
show_json("messages", messages)
|
87 |
|
88 |
return extract_content_value(messages)[0]
|
89 |
|
|
|
2 |
import json, openai, os, time
|
3 |
from openai import OpenAI
|
4 |
|
5 |
+
client, assistant, thread
|
6 |
+
|
7 |
def show_json(str, obj):
|
8 |
print(f"### {str}")
|
9 |
print(json.loads(obj.model_dump_json()))
|
|
|
16 |
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
|
17 |
model="gpt-4-1106-preview",
|
18 |
)
|
|
|
19 |
|
20 |
thread = client.beta.threads.create()
|
|
|
21 |
|
22 |
return client, assistant, thread
|
23 |
|
|
|
38 |
content_values.append(content.text.value)
|
39 |
return content_values
|
40 |
|
|
|
|
|
|
|
|
|
41 |
def chat(message, history):
|
42 |
global client
|
43 |
global assistant
|
|
|
53 |
|
54 |
if len(history_openai_format) == 1:
|
55 |
client, assistant, thread = init_assistant()
|
56 |
+
|
57 |
+
show_json("assistant", assistant)
|
58 |
+
show_json("thread", thread)
|
59 |
|
60 |
+
#print("### history")
|
61 |
+
#print(len(history_openai_format))
|
62 |
+
#print(history_openai_format)
|
63 |
|
64 |
message = client.beta.threads.messages.create(
|
65 |
thread_id=thread.id,
|
|
|
67 |
content=message,
|
68 |
)
|
69 |
|
70 |
+
#show_json("message", message)
|
71 |
|
72 |
run = client.beta.threads.runs.create(
|
73 |
thread_id=thread.id,
|
74 |
assistant_id=assistant.id,
|
75 |
)
|
76 |
|
77 |
+
#show_json("run", run)
|
78 |
|
79 |
run = wait_on_run(client, run, thread)
|
80 |
|
81 |
+
#show_json("run", run)
|
82 |
|
83 |
messages = client.beta.threads.messages.list(thread_id=thread.id)
|
84 |
|
85 |
+
#show_json("messages", messages)
|
86 |
|
87 |
return extract_content_value(messages)[0]
|
88 |
|