Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,63 +1,59 @@
|
|
|
|
1 |
import gradio as gr
|
2 |
-
from
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
"""
|
43 |
-
|
44 |
-
""
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
)
|
60 |
-
|
61 |
-
|
62 |
-
if __name__ == "__main__":
|
63 |
-
demo.launch()
|
|
|
1 |
+
import os
|
2 |
import gradio as gr
|
3 |
+
from langchain.prompts import ChatPromptTemplate
|
4 |
+
from langchain.chains import RetrievalQA, ConversationalRetrievalChain
|
5 |
+
from langchain.memory import ConversationBufferMemory
|
6 |
+
|
7 |
+
def rag_retriever(message, history, system_prompt, num_sources=4, temperature=0):
|
8 |
+
chat = ChatGroq(temperature=temperature, model_name="llama3-70b-8192", api_key=os.getenv("GROQ_API_KEY"))
|
9 |
+
|
10 |
+
prompt_template = ChatPromptTemplate.from_messages([
|
11 |
+
("system", system_prompt+"""
|
12 |
+
|
13 |
+
Use the following pieces of context to answer the user's question.
|
14 |
+
----------------
|
15 |
+
{context}"""),
|
16 |
+
("human", "{question}")
|
17 |
+
])
|
18 |
+
|
19 |
+
memory = ConversationBufferMemory(memory_key="chat_history", output_key="answer", return_messages=True)
|
20 |
+
|
21 |
+
retriever = store.as_retriever(search_type="similarity", search_kwargs={'k': num_sources})
|
22 |
+
|
23 |
+
chain = ConversationalRetrievalChain.from_llm(llm=chat,
|
24 |
+
retriever=retriever,
|
25 |
+
return_source_documents=True,
|
26 |
+
memory=memory,
|
27 |
+
combine_docs_chain_kwargs={"prompt": prompt_template})
|
28 |
+
|
29 |
+
|
30 |
+
output = chain.invoke({"question": message})
|
31 |
+
|
32 |
+
sources = ""
|
33 |
+
for doc in output['source_documents']:
|
34 |
+
source_content = doc.page_content.strip().replace("\r\n", " ").replace("\r", " ").replace("\n", " ")
|
35 |
+
sources += f'<span style="color:green">Страница: {doc.metadata["page"]+1}</span><br><span style="color:gray">{source_content}</span><br><br>'
|
36 |
+
|
37 |
+
response = f"""<h5>Отговор:</h5>{output['answer']}<br><h5>Източници:</h5>{sources}"""
|
38 |
+
return response
|
39 |
+
|
40 |
+
|
41 |
+
rag = gr.ChatInterface(rag_retriever,
|
42 |
+
examples=[["Каква е целта на настоящия регламент", "You are an expert assistant in Bulgarian regulations. Provide precise and clear answers. Provide a detailed and comprehensive answer, incorporating as much relevant information as possible. Always respond in Bulgarian, regardless of the language used in the question."],
|
43 |
+
["Какво са Системите с ИИ", "You are an expert assistant in Bulgarian regulations. Provide precise and clear answers. Always respond in Bulgarian, regardless of the language used in the question."],
|
44 |
+
["Какво е равнище на технологично развитие", "You are an expert assistant in Bulgarian regulations. Provide precise and clear answers. Always respond in Bulgarian, regardless of the language used in the question."]],
|
45 |
+
title="Чатене с документа AI Act",
|
46 |
+
description="Питайте каквото пожелаете, но пишете на български.",
|
47 |
+
chatbot=gr.Chatbot(placeholder="<strong>Вашият личен AI Act помощник</strong><br>Питайте каквото пожелаете, но пишете на български."),
|
48 |
+
textbox=gr.Textbox(placeholder="Задайте своя въпрос...", container=False, scale=7),
|
49 |
+
retry_btn="Отново",
|
50 |
+
undo_btn="Назад",
|
51 |
+
clear_btn="Изчистете",
|
52 |
+
submit_btn="Изпрати",
|
53 |
+
additional_inputs=[gr.components.Textbox("You are an expert assistant in Bulgarian regulations. Provide precise and clear answers. Always respond in Bulgarian, regardless of the language used in the question.", label="System Prompt"),
|
54 |
+
gr.components.Slider(minimum=1, maximum=10, value=4, step=1, label="Брой препратки"),
|
55 |
+
gr.components.Slider(minimum=0, maximum=2, value=0, label="Креативност на модела", info="Ако е много високо моделът си измисля, но може да напише интересни неща."),],
|
56 |
+
additional_inputs_accordion=gr.Accordion("Допълнителни настройки", open=False),
|
57 |
+
)
|
58 |
+
|
59 |
+
rag.launch()
|
|
|
|
|
|
|
|
|
|