Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,4 @@
|
|
1 |
-
# TODO:
|
2 |
-
#
|
3 |
-
# 1. Gradio session / multi-user thread
|
4 |
|
5 |
# Reference:
|
6 |
#
|
@@ -78,16 +76,12 @@ def create_assistant(openai_client):
|
|
78 |
|
79 |
def load_assistant(openai_client):
|
80 |
assistant = openai_client.beta.assistants.retrieve(assistant_id)
|
81 |
-
|
82 |
show_json("assistant", assistant)
|
83 |
-
|
84 |
return assistant
|
85 |
|
86 |
def create_thread(openai_client):
|
87 |
thread = openai_client.beta.threads.create()
|
88 |
-
|
89 |
show_json("thread", thread)
|
90 |
-
|
91 |
return thread
|
92 |
|
93 |
def create_message(openai_client, thread, msg):
|
@@ -98,7 +92,6 @@ def create_message(openai_client, thread, msg):
|
|
98 |
)
|
99 |
|
100 |
show_json("message", message)
|
101 |
-
|
102 |
return message
|
103 |
|
104 |
def create_run(openai_client, assistant, thread):
|
@@ -109,7 +102,6 @@ def create_run(openai_client, assistant, thread):
|
|
109 |
)
|
110 |
|
111 |
show_json("run", run)
|
112 |
-
|
113 |
return run
|
114 |
|
115 |
def wait_on_run(openai_client, thread, run):
|
@@ -136,7 +128,6 @@ def get_run_steps(openai_client, thread, run):
|
|
136 |
)
|
137 |
|
138 |
show_json("run_steps", run_steps)
|
139 |
-
|
140 |
return run_steps
|
141 |
|
142 |
def execute_tool_call(tool_call):
|
@@ -175,7 +166,6 @@ def get_messages(openai_client, thread):
|
|
175 |
)
|
176 |
|
177 |
show_json("messages", messages)
|
178 |
-
|
179 |
return messages
|
180 |
|
181 |
def extract_content_values(data):
|
@@ -204,21 +194,11 @@ def generate_tool_outputs(tool_call_ids, tool_call_results):
|
|
204 |
"tool_call_id": tool_call_id,
|
205 |
"output": tool_call_result.to_json()
|
206 |
}
|
207 |
-
|
208 |
-
print("###")
|
209 |
-
print(tool_call_id)
|
210 |
-
print(tool_call_result.to_json())
|
211 |
-
print("###")
|
212 |
except AttributeError:
|
213 |
tool_output = {
|
214 |
"tool_call_id": tool_call_id,
|
215 |
"output": tool_call_result
|
216 |
}
|
217 |
-
|
218 |
-
print("###")
|
219 |
-
print(tool_call_id)
|
220 |
-
print(tool_call_result)
|
221 |
-
print("###")
|
222 |
|
223 |
tool_outputs.append(tool_output)
|
224 |
|
@@ -250,14 +230,6 @@ def chat(message, history):
|
|
250 |
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
251 |
|
252 |
if len(tool_call_ids) > 0:
|
253 |
-
print("###")
|
254 |
-
print(len(tool_call_ids))
|
255 |
-
print(tool_call_ids)
|
256 |
-
print(tool_call_ids[0])
|
257 |
-
print(tool_call_results)
|
258 |
-
print(tool_call_results[0])
|
259 |
-
print("###")
|
260 |
-
|
261 |
# https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
|
262 |
tool_output = {}
|
263 |
|
@@ -284,14 +256,6 @@ def chat(message, history):
|
|
284 |
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
285 |
|
286 |
if len(tool_call_ids) > 1:
|
287 |
-
print("###")
|
288 |
-
print(len(tool_call_ids))
|
289 |
-
print(tool_call_ids)
|
290 |
-
print(tool_call_ids[1])
|
291 |
-
print(tool_call_results)
|
292 |
-
print(tool_call_results[1])
|
293 |
-
print("###")
|
294 |
-
|
295 |
# https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
|
296 |
tool_output = {}
|
297 |
|
@@ -325,7 +289,6 @@ def chat(message, history):
|
|
325 |
if len(image_values) > 0:
|
326 |
download_link = f"<p>Download: https://platform.openai.com/storage/files/{image_values[0]}</p>"
|
327 |
|
328 |
-
#return f"{text_values[0]}{download_link}"
|
329 |
return f"{'<hr>'.join(list(reversed(text_values))[1:])}{download_link}"
|
330 |
|
331 |
gr.ChatInterface(
|
@@ -334,8 +297,8 @@ gr.ChatInterface(
|
|
334 |
textbox=gr.Textbox(placeholder="Ask anything", container=False, scale=7),
|
335 |
title="Python Coding Assistant",
|
336 |
description=(
|
337 |
-
"The assistant can **generate, explain, fix, optimize
|
338 |
-
"create unit test cases
|
339 |
"It can also **execute code**. "
|
340 |
"The assistant has access to a <b>today tool</b> (get current date), to a "
|
341 |
"**yfinance download tool** (get stock data), and to a "
|
@@ -353,5 +316,5 @@ gr.ChatInterface(
|
|
353 |
["Execute with tools: Create a plot showing stock gain QTD for NVDA and AMD, x-axis is \"Day\" and y-axis is \"Gain %\""],
|
354 |
["Execute with tools: Get key announcements from the latest OpenAI Dev Day"]
|
355 |
],
|
356 |
-
cache_examples=
|
357 |
).launch()
|
|
|
1 |
+
# TODO: Gradio session / multi-user thread
|
|
|
|
|
2 |
|
3 |
# Reference:
|
4 |
#
|
|
|
76 |
|
77 |
def load_assistant(openai_client):
|
78 |
assistant = openai_client.beta.assistants.retrieve(assistant_id)
|
|
|
79 |
show_json("assistant", assistant)
|
|
|
80 |
return assistant
|
81 |
|
82 |
def create_thread(openai_client):
|
83 |
thread = openai_client.beta.threads.create()
|
|
|
84 |
show_json("thread", thread)
|
|
|
85 |
return thread
|
86 |
|
87 |
def create_message(openai_client, thread, msg):
|
|
|
92 |
)
|
93 |
|
94 |
show_json("message", message)
|
|
|
95 |
return message
|
96 |
|
97 |
def create_run(openai_client, assistant, thread):
|
|
|
102 |
)
|
103 |
|
104 |
show_json("run", run)
|
|
|
105 |
return run
|
106 |
|
107 |
def wait_on_run(openai_client, thread, run):
|
|
|
128 |
)
|
129 |
|
130 |
show_json("run_steps", run_steps)
|
|
|
131 |
return run_steps
|
132 |
|
133 |
def execute_tool_call(tool_call):
|
|
|
166 |
)
|
167 |
|
168 |
show_json("messages", messages)
|
|
|
169 |
return messages
|
170 |
|
171 |
def extract_content_values(data):
|
|
|
194 |
"tool_call_id": tool_call_id,
|
195 |
"output": tool_call_result.to_json()
|
196 |
}
|
|
|
|
|
|
|
|
|
|
|
197 |
except AttributeError:
|
198 |
tool_output = {
|
199 |
"tool_call_id": tool_call_id,
|
200 |
"output": tool_call_result
|
201 |
}
|
|
|
|
|
|
|
|
|
|
|
202 |
|
203 |
tool_outputs.append(tool_output)
|
204 |
|
|
|
230 |
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
231 |
|
232 |
if len(tool_call_ids) > 0:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
233 |
# https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
|
234 |
tool_output = {}
|
235 |
|
|
|
256 |
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
257 |
|
258 |
if len(tool_call_ids) > 1:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
259 |
# https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
|
260 |
tool_output = {}
|
261 |
|
|
|
289 |
if len(image_values) > 0:
|
290 |
download_link = f"<p>Download: https://platform.openai.com/storage/files/{image_values[0]}</p>"
|
291 |
|
|
|
292 |
return f"{'<hr>'.join(list(reversed(text_values))[1:])}{download_link}"
|
293 |
|
294 |
gr.ChatInterface(
|
|
|
297 |
textbox=gr.Textbox(placeholder="Ask anything", container=False, scale=7),
|
298 |
title="Python Coding Assistant",
|
299 |
description=(
|
300 |
+
"The assistant can **generate, explain, fix, optimize,** and **document Python code, "
|
301 |
+
"create unit test cases,** and **answer general coding-related questions.** "
|
302 |
"It can also **execute code**. "
|
303 |
"The assistant has access to a <b>today tool</b> (get current date), to a "
|
304 |
"**yfinance download tool** (get stock data), and to a "
|
|
|
316 |
["Execute with tools: Create a plot showing stock gain QTD for NVDA and AMD, x-axis is \"Day\" and y-axis is \"Gain %\""],
|
317 |
["Execute with tools: Get key announcements from the latest OpenAI Dev Day"]
|
318 |
],
|
319 |
+
cache_examples=False,
|
320 |
).launch()
|