KushwanthK commited on
Commit
a695e9a
β€’
1 Parent(s): 0f3245e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -4
app.py CHANGED
@@ -53,6 +53,10 @@ PINECONE_ENVIRONMENT=os.getenv("PINECONE_ENVIRONMENT")
53
 
54
  # pc = PineconeGRPC( api_key=os.environ.get("PINECONE_API_KEY") ) # Now do stuff if 'my_index' not in pc.list_indexes().names(): pc.create_index( name='my_index', dimension=1536, metric='euclidean', spec=ServerlessSpec( cloud='aws', region='us-west-2' ) )
55
 
 
 
 
 
56
  def connect_pinecone():
57
  pinecone = PineconeGRPC(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
58
  # st.code(pinecone)
@@ -81,7 +85,7 @@ def get_pinecone_semantic_index(pinecone):
81
 
82
 
83
 
84
- def prompt_engineer(text, query):
85
  summary_prompt_template = """
86
  write a concise summary of the following text delimited by triple backquotes.
87
  return your response in bullet points which convers the key points of the text.
@@ -121,7 +125,7 @@ def prompt_engineer(text, query):
121
  """
122
 
123
  prompt_template = ChatPromptTemplate.from_template(GENERATION_PROMPT_TEMPLATE)
124
- prompt = prompt_template.format(context=text, question=query)
125
  response_text = ""
126
  result = ""
127
 
@@ -132,7 +136,8 @@ def prompt_engineer(text, query):
132
  response_text = llm.invoke(prompt)
133
  escaped_query = re.escape(query)
134
  result = re.split(f'Answer the question based on the above context: {escaped_query}\n',response_text)[-1]
135
- st.write(result)
 
136
  except Exception as e:
137
  st.error(f"Error invoke: {e}")
138
 
@@ -173,7 +178,7 @@ def chat_actions():
173
  p = math.pow(1024, 2)
174
  mbsize = round(len(bytesize) / p, 2)
175
  st.write(f"Text length of {len(consolidated_text)} characters with {mbsize}MB size")
176
- summary, response = prompt_engineer(consolidated_text[:1024], query)
177
 
178
  for res in result['matches']:
179
  st.session_state["chat_history"].append(
 
53
 
54
  # pc = PineconeGRPC( api_key=os.environ.get("PINECONE_API_KEY") ) # Now do stuff if 'my_index' not in pc.list_indexes().names(): pc.create_index( name='my_index', dimension=1536, metric='euclidean', spec=ServerlessSpec( cloud='aws', region='us-west-2' ) )
55
 
56
+ # Load environment variables from .env file
57
+ load_dotenv()
58
+ HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
59
+
60
  def connect_pinecone():
61
  pinecone = PineconeGRPC(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
62
  # st.code(pinecone)
 
85
 
86
 
87
 
88
+ def prompt_engineer(text, longtext, query):
89
  summary_prompt_template = """
90
  write a concise summary of the following text delimited by triple backquotes.
91
  return your response in bullet points which convers the key points of the text.
 
125
  """
126
 
127
  prompt_template = ChatPromptTemplate.from_template(GENERATION_PROMPT_TEMPLATE)
128
+ prompt = prompt_template.format(context=longtext, question=query)
129
  response_text = ""
130
  result = ""
131
 
 
136
  response_text = llm.invoke(prompt)
137
  escaped_query = re.escape(query)
138
  result = re.split(f'Answer the question based on the above context: {escaped_query}\n',response_text)[-1]
139
+ st.write("reponse generated see chat window πŸ‘‰πŸ»")
140
+ st.divider()
141
  except Exception as e:
142
  st.error(f"Error invoke: {e}")
143
 
 
178
  p = math.pow(1024, 2)
179
  mbsize = round(len(bytesize) / p, 2)
180
  st.write(f"Text length of {len(consolidated_text)} characters with {mbsize}MB size")
181
+ summary, response = prompt_engineer(consolidated_text[:1024], consolidated_text, query)
182
 
183
  for res in result['matches']:
184
  st.session_state["chat_history"].append(