moriire commited on
Commit
1a4faaf
1 Parent(s): 97b1337

Update app/llm.py

Browse files
Files changed (1) hide show
  1. app/llm.py +17 -1
app/llm.py CHANGED
@@ -15,7 +15,14 @@ from langchain_community.document_loaders import WebBaseLoader
15
  from langchain_text_splitters import RecursiveCharacterTextSplitter
16
  from langchain_chroma import Chroma
17
  from langchain_community.embeddings import GPT4AllEmbeddings
 
 
18
 
 
 
 
 
 
19
 
20
  class RagChat:
21
  def agent(self):
@@ -28,10 +35,19 @@ class RagChat:
28
  def download_embedding(self):
29
  vectorstore = Chroma.from_documents(documents=self.agent, embedding=GPT4AllEmbeddings())
30
  return vectorstore
 
 
 
 
 
 
 
 
 
31
 
32
  def search(self, question):
33
  docs = vectorstore.similarity_search(question)
34
- return len(docs)
35
 
36
 
37
 
 
15
  from langchain_text_splitters import RecursiveCharacterTextSplitter
16
  from langchain_chroma import Chroma
17
  from langchain_community.embeddings import GPT4AllEmbeddings
18
+ from langchain_core.output_parsers import StrOutputParser
19
+ from langchain_core.prompts import PromptTemplate
20
 
21
+ from langchain import hub
22
+ from langchain_core.runnables import RunnablePassthrough, RunnablePick
23
+
24
+ rag_prompt = hub.pull("rlm/rag-prompt")
25
+ rag_prompt.messages
26
 
27
  class RagChat:
28
  def agent(self):
 
35
  def download_embedding(self):
36
  vectorstore = Chroma.from_documents(documents=self.agent, embedding=GPT4AllEmbeddings())
37
  return vectorstore
38
+
39
+ def prompt_template(self, question):
40
+ chain = (
41
+ RunnablePassthrough.assign(context=RunnablePick("context") | format_docs)
42
+ | rag_prompt
43
+ | llm
44
+ | StrOutputParser()
45
+ )
46
+ chain.invoke({"context": self.search, "question": question})
47
 
48
  def search(self, question):
49
  docs = vectorstore.similarity_search(question)
50
+ return docs
51
 
52
 
53