Spaces:
Running
Running
File size: 4,974 Bytes
cfa124c 5eae7c2 cfa124c 78a42dd 9064b67 a31fc9a 8c8464b 9064b67 764e0ce d1f4761 764e0ce 5befa9c 50ddfc1 fcce641 50ddfc1 536b36d 37a1f88 5da2b8f 37a1f88 188583e 37a1f88 e893203 73899aa 9064b67 73899aa 9064b67 73899aa 9064b67 73899aa 7bf3f3d 536b36d f422c9f 0741172 e7c3210 55b9732 c066ca5 a31fc9a 686539e 73899aa a31fc9a 686539e a31fc9a 73899aa 0741172 1b1181f 0741172 536b36d 3216382 c066ca5 536b36d 686539e 73899aa 536b36d 686539e 536b36d 73899aa 0741172 0150bec 9064b67 73899aa 9064b67 29d58d0 29028c0 7ddca6e 188583e a2df0ee 19bfc9a 2de5e80 21ce7f1 1057e6a 5e0acb9 c40c2ce d92a321 edd99e4 70bf8f7 18eed82 7fe3430 3f12f24 d0298be 1292850 9247b68 1292850 da3afee 73899aa ce17c83 3f12f24 aa7d8c4 952a213 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 |
# Reference:
#
# https://vimeo.com/990334325/56b552bc7a
# https://platform.openai.com/playground/assistants
# https://cookbook.openai.com/examples/assistants_api_overview_python
# https://platform.openai.com/docs/api-reference/assistants/createAssistant
# https://platform.openai.com/docs/assistants/tools
import gradio as gr
import json, os
from assistants import (
openai_client,
assistant,
thread,
create_assistant,
load_assistant,
create_thread,
create_message,
create_run,
wait_on_run,
get_run_steps,
execute_tool_calls,
get_messages,
recurse_execute_tool_calls,
extract_content_values,
)
def chat(message, history):
if not message:
raise gr.Error("Message is required.")
#raise gr.Error("Please clone and bring your own OpenAI and Tavily credentials.")
global assistant, thread
# On first run, create assistant and update assistant_id,
# see https://platform.openai.com/playground/assistants.
# On subsequent runs, load assistant.
if assistant == None:
#assistant = create_assistant(openai_client)
assistant = load_assistant(openai_client)
# TODO: Use Gradio session to support multiple users
if thread == None or len(history) == 0:
thread = create_thread(openai_client)
create_message(openai_client, thread, message)
run = create_run(openai_client, assistant, thread)
run = wait_on_run(openai_client, thread, run)
run_steps = get_run_steps(openai_client, thread, run)
### TODO
recurse_execute_tool_calls(thread, run, run_steps, 0)
"""
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
if len(tool_call_ids) > 0:
# https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
tool_output = {}
try:
tool_output = {
"tool_call_id": tool_call_ids[0],
"output": tool_call_results[0].to_json()
}
except AttributeError:
tool_output = {
"tool_call_id": tool_call_ids[0],
"output": tool_call_results[0]
}
run = openai_client.beta.threads.runs.submit_tool_outputs(
thread_id=thread.id,
run_id=run.id,
tool_outputs=[tool_output]
)
run = wait_on_run(openai_client, thread, run)
run_steps = get_run_steps(openai_client, thread, run)
"""
###
"""
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
if len(tool_call_ids) > 1:
# https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
tool_output = {}
try:
tool_output = {
"tool_call_id": tool_call_ids[1],
"output": tool_call_results[1].to_json()
}
except AttributeError:
tool_output = {
"tool_call_id": tool_call_ids[1],
"output": tool_call_results[1]
}
run = openai_client.beta.threads.runs.submit_tool_outputs(
thread_id=thread.id,
run_id=run.id,
tool_outputs=[tool_output]
)
run = wait_on_run(openai_client, thread, run)
run_steps = get_run_steps(openai_client, thread, run)
"""
###
messages = get_messages(openai_client, thread)
text_values, image_values = extract_content_values(messages)
download_link = ""
# TODO: Handle multiple images and other file types
if len(image_values) > 0:
download_link = f"<p>Download: https://platform.openai.com/storage/files/{image_values[0]}</p>"
return f"{'<hr>'.join(list(reversed(text_values))[1:])}{download_link}"
gr.ChatInterface(
fn=chat,
chatbot=gr.Chatbot(height=350),
textbox=gr.Textbox(placeholder="Ask anything", container=False, scale=7),
title="Python Coding Assistant",
description=os.environ.get("DESCRIPTION"),
clear_btn="Clear",
retry_btn=None,
undo_btn=None,
examples=[
["Generate: Code to fine-tune model meta-llama/Meta-Llama-3.1-8B on dataset gretelai/synthetic_text_to_sql using QLoRA"],
["Explain: r\"^(?=.*[A-Z])(?=.*[a-z])(?=.*[0-9])(?=.*[\\W]).{8,}$\""],
["Fix: x = [5, 2, 1, 3, 4]; print(x.sort())"],
["Optimize: x = []; for i in range(0, 10000): x.append(i)"],
["Execute: First 25 Fibbonaci numbers"],
["Execute with tools: Create a plot showing stock gain QTD for NVDA and AMD, x-axis is \"Day\" and y-axis is \"Gain %\""],
["Execute with tools: Get key announcements from latest OpenAI Dev Day"]
],
cache_examples=False,
).launch() |