Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -136,8 +136,8 @@ def execute_tool_call(tool_call):
|
|
136 |
def execute_tool_calls(run_steps):
|
137 |
run_step_details = []
|
138 |
|
139 |
-
|
140 |
-
|
141 |
|
142 |
for step in run_steps.data:
|
143 |
step_details = step.step_details
|
@@ -149,10 +149,10 @@ def execute_tool_calls(run_steps):
|
|
149 |
show_json("tool_call", tool_call)
|
150 |
|
151 |
if hasattr(tool_call, "function"):
|
152 |
-
|
153 |
-
|
154 |
|
155 |
-
return
|
156 |
|
157 |
def get_messages(client, thread):
|
158 |
messages = client.beta.threads.messages.list(
|
@@ -177,6 +177,28 @@ def extract_content_values(data):
|
|
177 |
|
178 |
return text_values, image_values
|
179 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
def chat(message, history):
|
181 |
if not message:
|
182 |
raise gr.Error("Message is required.")
|
@@ -196,23 +218,15 @@ def chat(message, history):
|
|
196 |
|
197 |
run_steps = get_run_steps(client, thread, run)
|
198 |
|
199 |
-
|
200 |
|
201 |
### TODO
|
202 |
-
if
|
203 |
-
print(tool_call_id)
|
204 |
-
print(tool_call_result.to_json())
|
205 |
-
|
206 |
# https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
|
207 |
run = client.beta.threads.runs.submit_tool_outputs(
|
208 |
thread_id=thread.id,
|
209 |
run_id=run.id,
|
210 |
-
tool_outputs=
|
211 |
-
{
|
212 |
-
"tool_call_id": tool_call_id,
|
213 |
-
"output": tool_call_result
|
214 |
-
}
|
215 |
-
]
|
216 |
)
|
217 |
|
218 |
run = wait_on_run(client, thread, run)
|
|
|
136 |
def execute_tool_calls(run_steps):
|
137 |
run_step_details = []
|
138 |
|
139 |
+
tool_call_ids = []
|
140 |
+
tool_call_results = []
|
141 |
|
142 |
for step in run_steps.data:
|
143 |
step_details = step.step_details
|
|
|
149 |
show_json("tool_call", tool_call)
|
150 |
|
151 |
if hasattr(tool_call, "function"):
|
152 |
+
tool_call_ids.append(tool_call.id)
|
153 |
+
tool_call_results.append(execute_tool_call(tool_call))
|
154 |
|
155 |
+
return tool_call_ids, tool_call_results
|
156 |
|
157 |
def get_messages(client, thread):
|
158 |
messages = client.beta.threads.messages.list(
|
|
|
177 |
|
178 |
return text_values, image_values
|
179 |
|
180 |
+
###
|
181 |
+
def generate_tool_outputs(tool_call_ids, tool_call_results):
|
182 |
+
"""
|
183 |
+
Generate tool outputs from tool call IDs and results.
|
184 |
+
|
185 |
+
Args:
|
186 |
+
tool_call_ids (list): List of tool call IDs.
|
187 |
+
tool_call_results (list): List of tool call results.
|
188 |
+
|
189 |
+
Returns:
|
190 |
+
list: List of tool outputs.
|
191 |
+
"""
|
192 |
+
tool_outputs = []
|
193 |
+
for tool_call_id, tool_call_result in zip(tool_call_ids, tool_call_results):
|
194 |
+
tool_output = {
|
195 |
+
"tool_call_id": tool_call_id,
|
196 |
+
"output": tool_call_result
|
197 |
+
}
|
198 |
+
tool_outputs.append(tool_output)
|
199 |
+
return tool_outputs
|
200 |
+
###
|
201 |
+
|
202 |
def chat(message, history):
|
203 |
if not message:
|
204 |
raise gr.Error("Message is required.")
|
|
|
218 |
|
219 |
run_steps = get_run_steps(client, thread, run)
|
220 |
|
221 |
+
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
222 |
|
223 |
### TODO
|
224 |
+
if tool_call_ids[0]:
|
|
|
|
|
|
|
225 |
# https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
|
226 |
run = client.beta.threads.runs.submit_tool_outputs(
|
227 |
thread_id=thread.id,
|
228 |
run_id=run.id,
|
229 |
+
tool_outputs=generate_tool_outputs(tool_call_ids, tool_call_results)
|
|
|
|
|
|
|
|
|
|
|
230 |
)
|
231 |
|
232 |
run = wait_on_run(client, thread, run)
|