abnerguzman commited on
Commit
f8fe9c9
1 Parent(s): ec353c3

Create app_old.py

Browse files
Files changed (1) hide show
  1. app_old.py +74 -0
app_old.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ import octoai
4
+ octoai_client = octoai.client.Client(token=os.getenv('OCTOML_KEY'))
5
+
6
+ from pinecone import Pinecone, ServerlessSpec
7
+ pc = Pinecone(api_key=os.getenv('PINECONE_API_KEY'))
8
+
9
+
10
+ from llama_index.vector_stores.pinecone import PineconeVectorStore
11
+ from llama_index.core import VectorStoreIndex
12
+ from llama_index.core.response.pprint_utils import pprint_source_node
13
+ from llama_index.llms.octoai import OctoAI
14
+
15
+ octoai = OctoAI(
16
+ token=os.getenv('OCTOML_KEY'),
17
+ model="meta-llama-3-70b-instruct",
18
+ max_tokens=512,
19
+ temperature=0.1,
20
+ )
21
+
22
+
23
+ from llama_index.core.memory import ChatMemoryBuffer
24
+
25
+ import gradio as gr
26
+ from io import StringIO
27
+
28
+ def get_credit_dist(history):
29
+ _out = StringIO()
30
+ print("Disabled momentarily...", file=_out)
31
+
32
+ return _out.getvalue()
33
+
34
+
35
+ with gr.Blocks() as demo:
36
+ chatbot = gr.Chatbot(height=800)
37
+ msg = gr.Textbox()
38
+ clear = gr.Button("Clear")
39
+
40
+ credit_box = gr.Textbox(label="Credit distribution", lines=20, autoscroll=False)
41
+ credit_btn = gr.Button("Credit response")
42
+
43
+ def get_chat_engine():
44
+ vector_store = PineconeVectorStore(pinecone_index=pc.Index('prorata-postman-ds-256'))
45
+ vindex = VectorStoreIndex.from_vector_store(vector_store)
46
+
47
+ memory = ChatMemoryBuffer.from_defaults(token_limit=5000)
48
+ return vindex.as_chat_engine(
49
+ chat_mode="context",
50
+ llm=octoai,
51
+ memory=memory,
52
+ system_prompt="You are a chatbot, able to have normal interactions, as well as talk about news events provided in the context of the conversation.",
53
+ )
54
+
55
+ chat_engine_var = gr.State(get_chat_engine)
56
+
57
+ def user(user_message, history):
58
+ return "", history + [[user_message, None]]
59
+
60
+ def bot(history, chat_engine):
61
+ response = chat_engine.stream_chat(history[-1][0])
62
+ history[-1][1] = ""
63
+ for token in response.response_gen:
64
+ history[-1][1] += token
65
+ yield history
66
+
67
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot, [chatbot, chat_engine_var], chatbot)
68
+ clear.click(lambda x: x.reset(), chat_engine_var, chatbot, queue=False)
69
+
70
+ credit_btn.click(get_credit_dist, chatbot, credit_box)
71
+
72
+ if __name__ == "__main__":
73
+ demo.queue()
74
+ demo.launch()