|
import os |
|
import streamlit as st |
|
|
|
|
|
from dotenv import load_dotenv |
|
from langchain.llms import HuggingFaceEndpoint |
|
|
|
load_dotenv() |
|
|
|
os.environ["HUGGINGFACEHUB_API_TOKEN"]=os.getenv("HF_TOKEN") |
|
|
|
huggingface_token = os.environ["HUGGINGFACEHUB_API_TOKEN"] |
|
|
|
|
|
def load_answer(question): |
|
|
|
if question: |
|
llm = HuggingFaceEndpoint(repo_id="mistralai/Mistral-7B-Instruct-v0.2") |
|
|
|
|
|
answer=llm.invoke(question) |
|
return answer |
|
|
|
|
|
|
|
st.set_page_config(page_title="LangChain Demo - Mistral", page_icon=":robot:") |
|
st.header("LangChain Demo - Mistral") |
|
|
|
|
|
def get_text(): |
|
input_text = st.text_input("You: ", key="input") |
|
return input_text |
|
|
|
|
|
user_input=get_text() |
|
response = load_answer(user_input) |
|
|
|
submit = st.button('Generate') |
|
|
|
|
|
if submit: |
|
|
|
st.subheader("Answer:") |
|
|
|
st.write(response) |
|
|
|
|