Spaces:
Runtime error
Runtime error
Kushwanth Chowday Kandala
commited on
Commit
•
8ce3d9b
1
Parent(s):
8eb3e51
update app.py
Browse files
app.py
CHANGED
@@ -23,10 +23,34 @@ import torch
|
|
23 |
device = 'cuda' if torch.cuda.is_available() else 'cpu'
|
24 |
|
25 |
if device != 'cuda':
|
26 |
-
st.
|
27 |
"a CUDA-enabled GPU. If on colab you can chnage this by "
|
28 |
"clicking Runtime > change runtime type > GPU.")
|
29 |
|
30 |
model = SentenceTransformer("all-MiniLM-L6-v2", device=device)
|
|
|
31 |
|
32 |
# Creating a Index(Pinecone Vector Database)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
device = 'cuda' if torch.cuda.is_available() else 'cpu'
|
24 |
|
25 |
if device != 'cuda':
|
26 |
+
st.text(f"you are using {device}. This is much slower than using "
|
27 |
"a CUDA-enabled GPU. If on colab you can chnage this by "
|
28 |
"clicking Runtime > change runtime type > GPU.")
|
29 |
|
30 |
model = SentenceTransformer("all-MiniLM-L6-v2", device=device)
|
31 |
+
st.divider()
|
32 |
|
33 |
# Creating a Index(Pinecone Vector Database)
|
34 |
+
def chat_actions():
|
35 |
+
st.session_state["chat_history"].append(
|
36 |
+
{"role": "user", "content": st.session_state["chat_input"]},
|
37 |
+
)
|
38 |
+
|
39 |
+
response = model.generate_content(st.session_state["chat_input"])
|
40 |
+
st.session_state["chat_history"].append(
|
41 |
+
{
|
42 |
+
"role": "assistant",
|
43 |
+
"content": response.text,
|
44 |
+
}, # This can be replaced with your chat response logic
|
45 |
+
)
|
46 |
+
|
47 |
+
|
48 |
+
if "chat_history" not in st.session_state:
|
49 |
+
st.session_state["chat_history"] = []
|
50 |
+
|
51 |
+
|
52 |
+
st.chat_input("Enter your message", on_submit=chat_actions, key="chat_input")
|
53 |
+
|
54 |
+
for i in st.session_state["chat_history"]:
|
55 |
+
with st.chat_message(name=i["role"]):
|
56 |
+
st.write(i["content"])
|