File size: 2,912 Bytes
aa0eed8
12338d7
92fb7b8
031211a
aa0eed8
4d58b67
ee66ad7
d06678c
92fb7b8
d06678c
4d58b67
12c2b66
273f79a
12c2b66
5295650
bd96781
12c2b66
 
92fb7b8
12c2b66
b2173c3
5295650
c2a459e
b2173c3
5295650
df144ba
 
 
453ee12
bd96781
df144ba
9ea93d2
453ee12
4d58b67
d06678c
df144ba
453ee12
 
12c2b66
24755bb
df144ba
38dfd80
 
 
 
df144ba
24755bb
3017744
0ba3638
df144ba
7fd13ba
0231016
4d58b67
 
bd96781
06d9591
12c2b66
4117a83
06d9591
b2173c3
06cc452
bd96781
12c2b66
 
06d9591
b2173c3
06d9591
df144ba
a030fa6
 
4d58b67
 
 
4514b12
24755bb
12c2b66
aa0eed8
805ff2f
3267c67
805ff2f
 
 
 
 
 
19797f3
 
 
431abc0
8fae4d3
431abc0
19797f3
9d3d926
8c69d06
19797f3
8fae4d3
19797f3
f4e2bc7
0ba3638
 
 
805ff2f
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import gradio as gr
import json, openai, os, time

from openai import OpenAI

_client, _assistant, _thread = None, None, None

def show_json(str, obj):
    print(f"=> {str}\n{json.loads(obj.model_dump_json())}")

def init_client():
    global _client, _assistant, _thread
    
    _client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
    
    _assistant = _client.beta.assistants.create(
        name="Math Tutor",
        instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
        model="gpt-4-1106-preview",
    )
    #show_json("assistant", _assistant)
    
    _thread = _client.beta.threads.create()
    #show_json("thread", _thread)

def wait_on_run(run):
    global _client, _thread
    
    while run.status == "queued" or run.status == "in_progress":
        run = _client.beta.threads.runs.retrieve(
            thread_id=_thread.id,
            run_id=run.id,
        )
        
        time.sleep(0.25)
    
    return run

def extract_content_values(data):
    content_values = []
    
    for item in data.data:
        for content in item.content:
            if content.type == 'text':
                content_values.append(content.text.value)
    
    return content_values

def chat(message, msg2, history):
    global _client, _assistant, _thread     
       
    if _client == None:
        init_client()
            
    message = _client.beta.threads.messages.create(
        role="user",
        thread_id=_thread.id,
        content=message,
    )
    #show_json("message", message)
    
    run = _client.beta.threads.runs.create(
        assistant_id=_assistant.id,
        thread_id=_thread.id,
    )
    #show_json("run", run)
    
    run = wait_on_run(run)
    show_json("run", run)
    
    messages = _client.beta.threads.messages.list(
        thread_id=_thread.id
    )
    show_json("messages", messages)

    return extract_content_values(messages)[0]

def vote(data: gr.LikeData):
    print(data)
    if data.liked:
        print("You upvoted this response: " + data.value["value"])
    else:
        print("You downvoted this response: " + data.value["value"])
        
"""
gr.ChatInterface(
    chat,
    chatbot=gr.Chatbot(height=300),
    textbox=gr.Textbox(placeholder="Question", container=False, scale=7),
    title="Math Tutor",
    description="Ask Math Tutor any question",
    theme="soft",
    examples=["I need to solve the equation '3x + 13 = 11'. Can you help me?"],
    cache_examples=True,
    retry_btn=None,
    undo_btn=None,
    clear_btn="Clear",
    #multimodal=True,
    #additional_inputs=[
    #    gr.Textbox("sk-", label="OpenAI API Key", type = "password"),
    #],
).launch()
"""

with gr.Blocks() as demo:
    chatbot = gr.Chatbot(placeholder="<strong>Math Tutor</strong><br>Ask Me Anything")
    chatbot.like(vote, None, None)
    gr.ChatInterface(fn=chat, chatbot=chatbot)
    
demo.launch()