File size: 2,277 Bytes
f8fe9c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import os

import octoai
octoai_client = octoai.client.Client(token=os.getenv('OCTOML_KEY'))

from pinecone import Pinecone, ServerlessSpec
pc = Pinecone(api_key=os.getenv('PINECONE_API_KEY'))


from llama_index.vector_stores.pinecone import PineconeVectorStore
from llama_index.core import VectorStoreIndex
from llama_index.core.response.pprint_utils import pprint_source_node
from llama_index.llms.octoai import OctoAI

octoai = OctoAI(
    token=os.getenv('OCTOML_KEY'),
    model="meta-llama-3-70b-instruct",
    max_tokens=512,
    temperature=0.1,
)


from llama_index.core.memory import ChatMemoryBuffer

import gradio as gr
from io import StringIO

def get_credit_dist(history):
    _out = StringIO()
    print("Disabled momentarily...", file=_out)

    return _out.getvalue()


with gr.Blocks() as demo:
    chatbot = gr.Chatbot(height=800)
    msg = gr.Textbox()
    clear = gr.Button("Clear")

    credit_box = gr.Textbox(label="Credit distribution", lines=20, autoscroll=False)
    credit_btn = gr.Button("Credit response")

    def get_chat_engine():
        vector_store = PineconeVectorStore(pinecone_index=pc.Index('prorata-postman-ds-256'))
        vindex = VectorStoreIndex.from_vector_store(vector_store)
        
        memory = ChatMemoryBuffer.from_defaults(token_limit=5000)
        return vindex.as_chat_engine(
            chat_mode="context",
            llm=octoai,
            memory=memory,
            system_prompt="You are a chatbot, able to have normal interactions, as well as talk about news events provided in the context of the conversation.",
        )

    chat_engine_var = gr.State(get_chat_engine)

    def user(user_message, history):
        return "", history + [[user_message, None]]

    def bot(history, chat_engine):
        response = chat_engine.stream_chat(history[-1][0])
        history[-1][1] = ""
        for token in response.response_gen:
            history[-1][1] += token
            yield history

    msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot, [chatbot, chat_engine_var], chatbot)
    clear.click(lambda x: x.reset(), chat_engine_var, chatbot, queue=False)

    credit_btn.click(get_credit_dist, chatbot, credit_box)
    
if __name__ == "__main__":
    demo.queue()
    demo.launch()