hosseinhimself's picture
Create app.py
2d64bbc verified
raw
history blame
965 Bytes
from transformers import AutoTokenizer, AutoModelForQuestionAnswering, pipeline
import gradio as gr
model = "hosseinhimself/tara-roberta-base-fa-qa"
# Load the tokenizer and model
tokenizer = AutoTokenizer.from_pretrained(model)
model = AutoModelForQuestionAnswering.from_pretrained(model)
# Create a QA pipeline
qa_pipeline = pipeline("question-answering", model=model, tokenizer=tokenizer)
def answer_question(context, question):
response = qa_pipeline(question=question, context=context)
return response['answer']
# Define the Gradio interface
interface = gr.Interface(
fn=answer_question,
inputs=[
gr.inputs.Textbox(lines=10, placeholder="Enter context here..."),
gr.inputs.Textbox(lines=1, placeholder="Enter question here...")
],
outputs="text",
title="Tara Question Answering Model",
description="This model answers questions based on the provided context."
)
# Launch the interface
interface.launch()