File size: 10,193 Bytes
cfa124c
 
51d9ec8
 
5eae7c2
cfa124c
 
 
5eae7c2
 
cfa124c
 
78a42dd
9064b67
6a5793a
a31fc9a
 
2a3fc7b
9064b67
16d66ad
9064b67
6a5793a
2db705f
9064b67
5da2b8f
9064b67
10d6c27
 
5da2b8f
9064b67
74d0b36
8b7dba2
 
16d66ad
2db705f
70b1d7d
6a5793a
 
 
 
 
a31fc9a
 
1d42f25
 
a31fc9a
1d42f25
 
9064b67
 
2de5e80
0b33796
10d6c27
 
 
 
59c15ca
7accea0
2db705f
16d66ad
a31fc9a
2db705f
9064b67
2de5e80
8153096
2de5e80
9064b67
fc30f91
10d6c27
 
0784f56
8153096
0784f56
 
fc30f91
9064b67
 
2de5e80
8153096
2de5e80
9064b67
 
5fca11d
9064b67
 
 
39b970f
9064b67
2de5e80
8153096
2de5e80
9064b67
 
 
 
 
 
ee9f7f7
9064b67
2de5e80
8153096
2de5e80
9064b67
 
 
 
 
 
 
 
b511565
 
 
a31fc9a
2de5e80
8153096
2798fff
55021f1
 
b511565
9064b67
 
fc30f91
9064b67
 
 
 
 
fc30f91
8153096
2de5e80
fc30f91
 
67b3b6b
 
 
8b7dba2
1d42f25
67b3b6b
42e9172
7ddca6e
1012371
e7c3210
 
7ddca6e
9064b67
 
7ddca6e
5fcd91e
fc30f91
67b3b6b
 
 
42e9172
0d6f576
e7c3210
 
67b3b6b
e7c3210
7ddca6e
fc30f91
9064b67
 
 
2de5e80
8153096
2de5e80
9064b67
6e6e7d5
9064b67
03869b0
2de5e80
9064b67
 
 
7ddca6e
 
29d58d0
7ddca6e
 
2de5e80
29d58d0
9064b67
e7c3210
 
 
c865dde
e7c3210
5e2a083
 
6e17db2
 
 
 
 
dbcf960
 
5e2a083
dbcf960
 
6e17db2
 
 
 
 
dbcf960
 
5e2a083
dbcf960
 
6e17db2
e7c3210
c865dde
e7c3210
 
 
5befa9c
50ddfc1
 
 
536b36d
e04bd50
5da2b8f
9d0ab7a
42c9326
e893203
d747e39
9064b67
5da2b8f
9064b67
5da2b8f
9064b67
536b36d
fc30f91
7bf3f3d
536b36d
e7c3210
55b9732
c066ca5
3216382
 
 
 
 
 
 
 
a31fc9a
 
 
 
261ec6b
27f2ee0
 
 
b1356b5
27f2ee0
 
a31fc9a
 
 
 
1b1181f
536b36d
3216382
c066ca5
3216382
 
 
 
 
 
 
 
536b36d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0150bec
9064b67
fc30f91
9064b67
29d58d0
29028c0
7ddca6e
 
a2df0ee
19bfc9a
2de5e80
6b06428
21ce7f1
1057e6a
5e0acb9
c40c2ce
d92a321
edd99e4
b184639
10d6c27
 
536b36d
 
 
10d6c27
7fe3430
 
 
3f12f24
1292850
 
9247b68
1292850
da3afee
20a0be5
3f12f24
325a748
952a213
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
# TODO:
#
# 1. Function calling - https://platform.openai.com/docs/assistants/tools/function-calling
# 2. Gradio session / multi-user thread

# Reference:
#
# https://vimeo.com/990334325/56b552bc7a
# https://platform.openai.com/playground/assistants
# https://cookbook.openai.com/examples/assistants_api_overview_python
# https://platform.openai.com/docs/api-reference/assistants/createAssistant
# https://platform.openai.com/docs/assistants/tools

import gradio as gr
import pandas as pd
import yfinance as yf

import json, openai, os, time

from datetime import date
from openai import OpenAI
from typing import List
from utils import function_to_schema, show_json

client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))

assistant_id = "asst_ypbcWnilAd60bc2DQ8haDL5P"

assistant, thread = None, None

def today_tool() -> str:
    """Returns today's date. Use this function for any questions related to knowing today's date. 
       There is no input. This function always returns today's date."""
    return str(date.today())

def yf_download_tool(tickers: List[str], start_date: date, end_date: date) -> pd.DataFrame:
    """Returns historical stock data for given tickers from a start date to an end date 
       using the yfinance library download function. 
       Use this function for any questions related to getting historical stock data. 
       The input should be the tickers as a List of strings, a start date, and an end date. 
       This function always returns a pandas DataFrame."""
    return yf.download(tickers, start=start_date, end=end_date)

tools = {
    "today_tool": today_tool,
    "yf_download_tool": yf_download_tool,
}

def create_assistant(client):
    assistant = client.beta.assistants.create(
        name="Python Code Generator",
        instructions=(
             "You are a Python programming language expert that "
             "generates Pylint-compliant code and explains it. "
             "Execute code when explicitly asked to."
        ),
        model="gpt-4o",
        tools=[
            {"type": "code_interpreter"},
            {"type": "function", "function": function_to_schema(today_tool)},
            {"type": "function", "function": function_to_schema(yf_download_tool)},
        ],
    )
    
    show_json("assistant", assistant)
    
    return assistant

def load_assistant(client):   
    assistant = client.beta.assistants.retrieve(assistant_id)

    show_json("assistant", assistant)
    
    return assistant

def create_thread(client):
    thread = client.beta.threads.create()
    
    show_json("thread", thread)
    
    return thread

def create_message(client, thread, msg):        
    message = client.beta.threads.messages.create(
        role="user",
        thread_id=thread.id,
        content=msg,
    )
    
    show_json("message", message)
    
    return message

def create_run(client, assistant, thread):
    run = client.beta.threads.runs.create(
        assistant_id=assistant.id,
        thread_id=thread.id,
        parallel_tool_calls=False,
    )
    
    show_json("run", run)
    
    return run

def wait_on_run(client, thread, run):
    while run.status == "queued" or run.status == "in_progress":
        run = client.beta.threads.runs.retrieve(
            thread_id=thread.id,
            run_id=run.id,
        )

        print(run.status)

        time.sleep(0.5)
    
    show_json("run", run)

    if hasattr(run, "last_error") and run.last_error:
        raise gr.Error(run.last_error)

    return run

def get_run_steps(client, thread, run):
    run_steps = client.beta.threads.runs.steps.list(
        thread_id=thread.id,
        run_id=run.id,
        order="asc",
    )

    show_json("run_steps", run_steps)
    
    return run_steps

def execute_tool_call(tool_call):
    name = tool_call.function.name
    args = json.loads(tool_call.function.arguments)

    return tools[name](**args)

def execute_tool_calls(run_steps):
    run_step_details = []

    tool_call_ids = []
    tool_call_results = []
    
    for step in run_steps.data:
        step_details = step.step_details
        run_step_details.append(step_details)
        show_json("step_details", step_details)
        
        if hasattr(step_details, "tool_calls"):
            for tool_call in step_details.tool_calls:
                show_json("tool_call", tool_call)
                
                if hasattr(tool_call, "function"):
                    tool_call_ids.append(tool_call.id)
                    tool_call_results.append(execute_tool_call(tool_call))

    return tool_call_ids, tool_call_results

def get_messages(client, thread):
    messages = client.beta.threads.messages.list(
        thread_id=thread.id
    )
    
    show_json("messages", messages)
    
    return messages
                        
def extract_content_values(data):
    text_values, image_values = [], []
    
    for item in data.data:
        for content in item.content:
            if content.type == "text":
                text_value = content.text.value
                text_values.append(text_value)
            if content.type == "image_file":
                image_value = content.image_file.file_id
                image_values.append(image_value)
    
    return text_values, image_values

###
def generate_tool_outputs(tool_call_ids, tool_call_results):
    tool_outputs = []
    
    for tool_call_id, tool_call_result in zip(tool_call_ids, tool_call_results):
        tool_output = {}
        
        try:
            tool_output = {
                "tool_call_id": tool_call_id,
                "output": tool_call_result.to_json()
            }

            print("###")
            print(tool_call_id)
            print(tool_call_result.to_json())
            print("###")
        except AttributeError:
            tool_output = {
                "tool_call_id": tool_call_id,
                "output": tool_call_result
            }

            print("###")
            print(tool_call_id)
            print(tool_call_result)
            print("###")
            
        tool_outputs.append(tool_output)
    
    return tool_outputs
###

def chat(message, history):
    if not message:
        raise gr.Error("Message is required.")
    
    global assistant, thread     
    
    if assistant == None:
        assistant = load_assistant(client)
    
    if thread == None or len(history) == 0:
        thread = create_thread(client)
        
    create_message(client, thread, message)

    run = create_run(client, assistant, thread)

    run = wait_on_run(client, thread, run)
    run_steps = get_run_steps(client, thread, run)

    ### TODO
    tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
    
    if len(tool_call_ids) > 0:
        print("###")
        print(len(tool_call_ids))
        print(tool_call_ids)
        print(tool_call_ids[0])
        print(tool_call_results)
        print(tool_call_results[0])
        print("###")
        
        # https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
        run = client.beta.threads.runs.submit_tool_outputs(
            thread_id=thread.id,
            run_id=run.id,
            #tool_outputs=generate_tool_outputs(tool_call_ids, tool_call_results)
            tool_outputs=[
                {
                    "tool_call_id": tool_call_ids[0],
                    "output": tool_call_results[0]
                }
            ]
        )
    
        run = wait_on_run(client, thread, run)
        run_steps = get_run_steps(client, thread, run)
    ###
        tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
            
        if len(tool_call_ids) > 1:
            print("###")
            print(len(tool_call_ids))
            print(tool_call_ids)
            print(tool_call_ids[1])
            print(tool_call_results)
            print(tool_call_results[1])
            print("###")

            # https://platform.openai.com/docs/api-reference/runs/submitToolOutputs
            run = client.beta.threads.runs.submit_tool_outputs(
                thread_id=thread.id,
                run_id=run.id,
                #tool_outputs=generate_tool_outputs(tool_call_ids, tool_call_results)
                tool_outputs=[
                    {
                        "tool_call_id": tool_call_ids[1],
                        "output": tool_call_results[1].to_json()
                    }
                ]
            )
        
            run = wait_on_run(client, thread, run)
            run_steps = get_run_steps(client, thread, run)    
    ###
    
    messages = get_messages(client, thread)

    text_values, image_values = extract_content_values(messages)

    download_link = ""
    
    if len(image_values) > 0:
        download_link = f"<p>Download: https://platform.openai.com/storage/files/{image_values[0]}</p>"
    
    #return f"{text_values[0]}{download_link}"
    return f"{'<hr>'.join(list(reversed(text_values))[1:])}{download_link}"

gr.ChatInterface(
        fn=chat,
        chatbot=gr.Chatbot(height=350),
        textbox=gr.Textbox(placeholder="Ask anything", container=False, scale=7),
        title="Python Code Generator",
        description=(
            "The assistant can **generate, explain, fix, optimize, document, and test code**. "
            "It can also **execute code**. "
            "It has access to <b>today tool</b> (get current date) and "
            "to **yfinance download tool** (get stock data)."
        ),
        clear_btn="Clear",
        retry_btn=None,
        undo_btn=None,
        examples=[
                  ["Generate: Python code to fine-tune model meta-llama/Meta-Llama-3.1-8B on dataset gretelai/synthetic_text_to_sql using QLoRA"],
                  ["Explain: r\"^(?=.*[A-Z])(?=.*[a-z])(?=.*[0-9])(?=.*[\\W]).{8,}$\""],
                  ["Fix: x = [5, 2, 1, 3, 4]; print(x.sort())"],
                  ["Optimize: x = []; for i in range(0, 10000): x.append(i)"],
                  ["Execute: First 25 Fibbonaci numbers"],
                  ["Create a plot showing stock gain QTD for NVDA and MSFT, x-axis is 'Day' and y-axis is 'QTD Gain %'"]
                 ],
        cache_examples=False,
    ).launch()