tahirsher's picture
Create app.py
4051d52 verified
import streamlit as st
from peft import PeftModel, PeftConfig
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
# Load the chatbot model with PEFT
@st.cache_resource
def load_chatbot_model():
# Load the Peft configuration and base model
config = PeftConfig.from_pretrained("langtest/falcon-llama3-finetuned-mental-health-hf-plus-dsm5-new-mistral")
base_model = AutoModelForCausalLM.from_pretrained("meta-llama/Meta-Llama-3-8B-Instruct")
peft_model = PeftModel.from_pretrained(base_model, "langtest/falcon-llama3-finetuned-mental-health-hf-plus-dsm5-new-mistral")
# Load the tokenizer for generating the text
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B-Instruct")
# Create a text generation pipeline using the model and tokenizer
return pipeline("text-generation", model=peft_model, tokenizer=tokenizer)
# Initialize the chatbot
chatbot = load_chatbot_model()
# Function to generate a response from the chatbot
def generate_response(user_input):
# Generate the response using the chatbot model
response = chatbot(user_input, max_length=100, num_return_sequences=1)
return response[0]['generated_text']
# Streamlit UI setup
st.title("Mental Health Chatbot")
st.write("""
This chatbot is designed to provide empathetic responses to mental health issues.
It is not a replacement for professional help, but it aims to offer support.
""")
# Input from the user
user_input = st.text_input("You: ", placeholder="How are you feeling today?")
# Display chat history and chatbot responses
if user_input:
with st.spinner("The chatbot is thinking..."):
response = generate_response(user_input)
st.text_area("Chatbot:", value=response, height=200)
# Provide some mental health support resources
st.markdown("""
### Mental Health Resources:
- [National Alliance on Mental Illness (NAMI)](https://www.nami.org/Home)
- [Mental Health America](https://www.mhanational.org/)
- [Crisis Text Line](https://www.crisistextline.org/)
""")