|
import streamlit as st |
|
from langchain.schema import HumanMessage, SystemMessage, AIMessage |
|
from langchain.chat_models import ChatOpenAI |
|
from langchain_groq import ChatGroq |
|
import os |
|
from constant import Groq_Api_Key |
|
|
|
st.set_page_config(page_title="Conversational Q&A Chatbot") |
|
st.header("Hey, Let's Chat") |
|
os.environ["GROQ_API_KEY"] = Groq_Api_Key |
|
chat = ChatGroq(temperature=0.5,model="llama3-8b-8192") |
|
|
|
if 'flowmessages' not in st.session_state: |
|
st.session_state['flowmessages'] = [ |
|
SystemMessage(content="you are an Ai assistant answer every question from searching the person name and context from linkdn and based on linkdn give response and link with heading") |
|
] |
|
|
|
|
|
def get_chatmodel_response(question): |
|
st.session_state['flowmessages'].append(HumanMessage(content=question)) |
|
answer = chat(st.session_state['flowmessages']) |
|
st.session_state['flowmessages'].append(AIMessage(content=answer.content)) |
|
return answer.content |
|
|
|
input_text = st.text_input("Input: ", key="input") |
|
submit = st.button("Ask the question") |
|
|
|
|
|
if submit and input_text: |
|
response = get_chatmodel_response(input_text) |
|
st.subheader("The Response is") |
|
st.write(response) |
|
|