petrojm commited on
Commit
e443083
1 Parent(s): aa94ed8

changes to app

Browse files
Files changed (1) hide show
  1. app.py +8 -24
app.py CHANGED
@@ -4,7 +4,6 @@ import yaml
4
  import gradio as gr
5
 
6
  current_dir = os.path.dirname(os.path.abspath(__file__))
7
- print(current_dir)
8
 
9
  from src.document_retrieval import DocumentRetrieval
10
  from utils.visual.env_utils import env_input_fields, initialize_env_variables, are_credentials_set, save_credentials
@@ -14,25 +13,20 @@ from utils.vectordb.vector_db import VectorDb
14
  CONFIG_PATH = os.path.join(current_dir,'config.yaml')
15
  PERSIST_DIRECTORY = os.path.join(current_dir,f"data/my-vector-db") # changed to current_dir
16
 
17
- chat_history = gr.State()
18
- chat_history = []
19
-
20
- def handle_userinput(user_question, conversation):
21
  if user_question:
22
  try:
 
23
  response = conversation.invoke({"question": user_question})
24
- chat_history.append((user_question, response["answer"]))
25
 
26
- #sources = set([f'{sd.metadata["filename"]}' for sd in response["source_documents"]])
27
- #sources_text = "\n".join([f"{i+1}. {source}" for i, source in enumerate(sources)])
28
- #state.sources_history.append(sources_text)
29
 
30
- return chat_history, "" #, state.sources_history
31
  except Exception as e:
32
- return f"An error occurred: {str(e)}", "" #, state.sources_history
33
  else:
34
- return "An error occurred", ""
35
- #return chat_history, "" #, state.sources_history
36
 
37
  def process_documents(files, document_retrieval, vectorstore, conversation, save_location=None):
38
  try:
@@ -49,10 +43,6 @@ def process_documents(files, document_retrieval, vectorstore, conversation, save
49
  except Exception as e:
50
  return conversation, vectorstore, document_retrieval, f"An error occurred while processing: {str(e)}"
51
 
52
- def reset_conversation(chat_history):
53
- chat_history = []
54
- return chat_history, ""
55
-
56
  # Read config file
57
  with open(CONFIG_PATH, 'r') as yaml_file:
58
  config = yaml.safe_load(yaml_file)
@@ -91,23 +81,17 @@ with gr.Blocks() as demo:
91
 
92
  # Preprocessing events
93
  process_btn.click(process_documents, inputs=[docs, document_retrieval, vectorstore, conversation], outputs=[conversation, vectorstore, document_retrieval, setup_output], concurrency_limit=10)
94
- #process_save_btn.click(process_documents, inputs=[file_upload, save_location], outputs=setup_output)
95
- #load_db_btn.click(load_existing_db, inputs=[db_path], outputs=setup_output)
96
 
97
  # Step 3: Chat with your data
98
  gr.Markdown("## 3️⃣ Chat with your document")
99
  chatbot = gr.Chatbot(label="Chatbot", show_label=True, show_share_button=False, show_copy_button=True, likeable=True)
100
  msg = gr.Textbox(label="Ask questions about your data", show_label=True, placeholder="Enter your message...")
101
  clear_btn = gr.Button("Clear chat")
102
- #show_sources = gr.Checkbox(label="Show sources", value=True)
103
  sources_output = gr.Textbox(label="Sources", visible=False)
104
 
105
  # Chatbot events
106
- #msg.submit(handle_userinput, inputs=[msg], outputs=[chatbot, sources_output])
107
- msg.submit(handle_userinput, inputs=[msg, conversation], outputs=[chatbot, msg], queue=False)
108
  clear_btn.click(lambda: [None, ""], inputs=None, outputs=[chatbot, msg], queue=False)
109
- #clear_btn.click(reset_conversation, inputs=[], outputs=[chatbot,msg])
110
- #show_sources.change(lambda x: gr.update(visible=x), show_sources, sources_output)
111
 
112
  if __name__ == "__main__":
113
  demo.launch()
 
4
  import gradio as gr
5
 
6
  current_dir = os.path.dirname(os.path.abspath(__file__))
 
7
 
8
  from src.document_retrieval import DocumentRetrieval
9
  from utils.visual.env_utils import env_input_fields, initialize_env_variables, are_credentials_set, save_credentials
 
13
  CONFIG_PATH = os.path.join(current_dir,'config.yaml')
14
  PERSIST_DIRECTORY = os.path.join(current_dir,f"data/my-vector-db") # changed to current_dir
15
 
16
+ def handle_userinput(user_question, conversation, history):
 
 
 
17
  if user_question:
18
  try:
19
+ # Generate response
20
  response = conversation.invoke({"question": user_question})
 
21
 
22
+ # Append user message and response to chat history
23
+ history = history + [(user_question, response["answer"])]
 
24
 
25
+ return history, ""
26
  except Exception as e:
27
+ return f"An error occurred: {str(e)}", ""
28
  else:
29
+ return history, ""
 
30
 
31
  def process_documents(files, document_retrieval, vectorstore, conversation, save_location=None):
32
  try:
 
43
  except Exception as e:
44
  return conversation, vectorstore, document_retrieval, f"An error occurred while processing: {str(e)}"
45
 
 
 
 
 
46
  # Read config file
47
  with open(CONFIG_PATH, 'r') as yaml_file:
48
  config = yaml.safe_load(yaml_file)
 
81
 
82
  # Preprocessing events
83
  process_btn.click(process_documents, inputs=[docs, document_retrieval, vectorstore, conversation], outputs=[conversation, vectorstore, document_retrieval, setup_output], concurrency_limit=10)
 
 
84
 
85
  # Step 3: Chat with your data
86
  gr.Markdown("## 3️⃣ Chat with your document")
87
  chatbot = gr.Chatbot(label="Chatbot", show_label=True, show_share_button=False, show_copy_button=True, likeable=True)
88
  msg = gr.Textbox(label="Ask questions about your data", show_label=True, placeholder="Enter your message...")
89
  clear_btn = gr.Button("Clear chat")
 
90
  sources_output = gr.Textbox(label="Sources", visible=False)
91
 
92
  # Chatbot events
93
+ msg.submit(handle_userinput, inputs=[msg, conversation, chatbot], outputs=[chatbot, msg], queue=False)
 
94
  clear_btn.click(lambda: [None, ""], inputs=None, outputs=[chatbot, msg], queue=False)
 
 
95
 
96
  if __name__ == "__main__":
97
  demo.launch()