|
|
|
"""app.ipynb |
|
|
|
Automatically generated by Colab. |
|
|
|
Original file is located at |
|
https://colab.research.google.com/drive/1qIFntwH-_zF7GkQbgjKoXMXnQpZ4HVse |
|
""" |
|
|
|
import gradio as gr |
|
import streamlit as st |
|
from transformers import AutoTokenizer, AutoModelForSequenceClassification |
|
|
|
|
|
base_model_name = "Preetham04/Preetham04-sentiment-analysis" |
|
tokenizer = AutoTokenizer.from_pretrained(base_model_name) |
|
model = AutoModelForSequenceClassification.from_pretrained(base_model_name) |
|
|
|
|
|
adapter_config_path = "config.json" |
|
adapter_model_path = "model.safetensors" |
|
|
|
|
|
adapter_name = "custom_adapter" |
|
model.load_adapter(adapter_config_path, model_file=adapter_model_path, load_as=adapter_name) |
|
|
|
|
|
model.set_active_adapters(adapter_name) |
|
|
|
st.title("🤖 Chatbot with Adapter-Enhanced Model") |
|
st.write("Interact with your custom adapter-enhanced model. Type a message and get responses!") |
|
|
|
|
|
if 'history' not in st.session_state: |
|
st.session_state['history'] = [] |
|
|
|
|
|
chatbot = Gradio(model=model, tokenizer=tokenizer) |
|
|
|
|
|
@chatbot.on_event("welcome") |
|
def welcome_handler(payload): |
|
return "Welcome! Type a message and get responses from the chatbot." |
|
|
|
|
|
@chatbot.on_message |
|
def message_handler(payload): |
|
user_input = payload["message"] |
|
response = chatbot.generate_response(user_input) |
|
return response |
|
|
|
|
|
if __name__ == "__main__": |
|
chatbot.run() |
|
|
|
|