Sbnos commited on
Commit
4cc5acb
1 Parent(s): 5817080

mainfile cgpt 3

Browse files
Files changed (1) hide show
  1. app.py +23 -14
app.py CHANGED
@@ -57,7 +57,29 @@ def _combine_documents(docs, document_prompt=PromptTemplate.from_template("{page
57
  doc_strings = [format_document(doc, document_prompt) for doc in docs]
58
  return document_separator.join(doc_strings)
59
 
 
 
60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
  # Define the Streamlit app
63
  def app():
@@ -67,8 +89,6 @@ def app():
67
  'Which retriever would you like to use?',
68
  ('General Medicine', 'RespiratoryFishman', 'RespiratoryMurray', 'MedMRCP2', 'OldMedicine')
69
  )
70
-
71
-
72
 
73
  # Define retrievers based on option
74
  persist_directory = {
@@ -90,18 +110,6 @@ def app():
90
  vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding_function, collection_name=collection_name)
91
  retriever = vectordb.as_retriever(search_kwargs={"k": 5})
92
 
93
-
94
- # Define the chain using LCEL
95
- condense_question_chain = RunnableLambda(lambda x: {"chat_history": chistory, "question": x}) | CONDENSE_QUESTION_PROMPT | llmc
96
- retriever_chain = RunnableLambda(lambda x: {"standalone_question": x}) | retriever | _combine_documents
97
- answer_chain = ANSWER_PROMPT | llm
98
-
99
- conversational_qa_chain = RunnableParallel(
100
- condense_question=condense_question_chain,
101
- retrieve=retriever_chain,
102
- generate_answer=answer_chain
103
- )
104
-
105
  if "messages" not in st.session_state:
106
  st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}]
107
 
@@ -119,6 +127,7 @@ def app():
119
  st.write(prompts2)
120
 
121
  if st.session_state.messages[-1]["role"] != "assistant":
 
122
  with st.chat_message("assistant"):
123
  with st.spinner("Thinking..."):
124
  response = conversational_qa_chain.invoke(
 
57
  doc_strings = [format_document(doc, document_prompt) for doc in docs]
58
  return document_separator.join(doc_strings)
59
 
60
+ # Function to store chat history
61
+ chistory = []
62
 
63
+ def store_chat_history(role: str, content: str):
64
+ chistory.append({"role": role, "content": content})
65
+
66
+ # Define the chain using LCEL
67
+ def create_conversational_qa_chain(retriever, condense_llm, answer_llm):
68
+ condense_question_chain = RunnableLambda(
69
+ lambda x: {"chat_history": chistory, "question": x['question']}
70
+ ) | CONDENSE_QUESTION_PROMPT | condense_llm
71
+
72
+ retrieval_chain = RunnableLambda(
73
+ lambda x: {"standalone_question": x}
74
+ ) | retriever | _combine_documents
75
+
76
+ answer_chain = ANSWER_PROMPT | answer_llm
77
+
78
+ return RunnableParallel(
79
+ condense_question=condense_question_chain,
80
+ retrieve=retrieval_chain,
81
+ generate_answer=answer_chain
82
+ )
83
 
84
  # Define the Streamlit app
85
  def app():
 
89
  'Which retriever would you like to use?',
90
  ('General Medicine', 'RespiratoryFishman', 'RespiratoryMurray', 'MedMRCP2', 'OldMedicine')
91
  )
 
 
92
 
93
  # Define retrievers based on option
94
  persist_directory = {
 
110
  vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding_function, collection_name=collection_name)
111
  retriever = vectordb.as_retriever(search_kwargs={"k": 5})
112
 
 
 
 
 
 
 
 
 
 
 
 
 
113
  if "messages" not in st.session_state:
114
  st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}]
115
 
 
127
  st.write(prompts2)
128
 
129
  if st.session_state.messages[-1]["role"] != "assistant":
130
+ conversational_qa_chain = create_conversational_qa_chain(retriever, llmc, llm)
131
  with st.chat_message("assistant"):
132
  with st.spinner("Thinking..."):
133
  response = conversational_qa_chain.invoke(