Spaces:
Runtime error
Runtime error
abnerguzman
commited on
Commit
•
1c72597
1
Parent(s):
7cf1a8c
Update app.py
Browse files
app.py
CHANGED
@@ -38,16 +38,16 @@ octoai = OctoAI(
|
|
38 |
|
39 |
from llama_index.core.memory import ChatMemoryBuffer
|
40 |
|
41 |
-
memory = ChatMemoryBuffer.from_defaults(token_limit=5000)
|
42 |
-
|
43 |
-
chat_engine = vindex.as_chat_engine(
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
)
|
51 |
|
52 |
|
53 |
|
@@ -68,25 +68,29 @@ with gr.Blocks() as demo:
|
|
68 |
msg = gr.Textbox()
|
69 |
clear = gr.Button("Clear")
|
70 |
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
|
|
|
|
|
|
|
|
77 |
|
78 |
def user(user_message, history):
|
79 |
return "", history + [[user_message, None]]
|
80 |
|
81 |
-
def bot(history):
|
82 |
response = chat_engine.stream_chat(history[-1][0])
|
83 |
history[-1][1] = ""
|
84 |
for token in response.response_gen:
|
85 |
history[-1][1] += token
|
86 |
yield history
|
87 |
|
88 |
-
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot, chatbot, chatbot)
|
89 |
-
clear.click(lambda:
|
90 |
|
91 |
if __name__ == "__main__":
|
92 |
demo.queue()
|
|
|
38 |
|
39 |
from llama_index.core.memory import ChatMemoryBuffer
|
40 |
|
41 |
+
# memory = ChatMemoryBuffer.from_defaults(token_limit=5000)
|
42 |
+
|
43 |
+
# chat_engine = vindex.as_chat_engine(
|
44 |
+
# chat_mode="context",
|
45 |
+
# llm=octoai,
|
46 |
+
# memory=memory,
|
47 |
+
# system_prompt=(
|
48 |
+
# "You are a chatbot, able to have normal interactions, as well as talk about news events."
|
49 |
+
# ),
|
50 |
+
# )
|
51 |
|
52 |
|
53 |
|
|
|
68 |
msg = gr.Textbox()
|
69 |
clear = gr.Button("Clear")
|
70 |
|
71 |
+
def get_chat_engine():
|
72 |
+
memory = ChatMemoryBuffer.from_defaults(token_limit=5000)
|
73 |
+
return vindex.as_chat_engine(
|
74 |
+
chat_mode="context",
|
75 |
+
llm=octoai,
|
76 |
+
memory=memory,
|
77 |
+
system_prompt="You are a chatbot, able to have normal interactions, as well as talk about news events.",
|
78 |
+
)
|
79 |
+
|
80 |
+
chat_engine_var = gr.State(get_chat_engine)
|
81 |
|
82 |
def user(user_message, history):
|
83 |
return "", history + [[user_message, None]]
|
84 |
|
85 |
+
def bot(history, chat_engine):
|
86 |
response = chat_engine.stream_chat(history[-1][0])
|
87 |
history[-1][1] = ""
|
88 |
for token in response.response_gen:
|
89 |
history[-1][1] += token
|
90 |
yield history
|
91 |
|
92 |
+
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot, [chatbot, chat_engine_var], chatbot)
|
93 |
+
clear.click(lambda: chat_engine_var.reset(), None, chatbot, queue=False)
|
94 |
|
95 |
if __name__ == "__main__":
|
96 |
demo.queue()
|