quiz_gen / app.py
Doom008's picture
Update app.py
a7c95f2 verified
raw
history blame contribute delete
No virus
2.06 kB
import streamlit as st
from transformers import AutoModelForCausalLM, AutoTokenizer
# Load pre-trained model and tokenizer
model = AutoModelForCausalLM.from_pretrained("TheBloke/dolphin-2.2.1-mistral-7B-GGUF", model_file="dolphin-2.2.1-mistral-7b.Q4_K_M.gguf", model_type="llama")
tokenizer = AutoTokenizer.from_pretrained("TheBloke/dolphin-2.2.1-mistral-7B-GGUF", model_file="dolphin-2.2.1-mistral-7b.Q4_K_M.gguf", model_type="llama")
def generate_mcqs(paragraph):
sentences = paragraph.split('. ')
mcqs = []
for sentence in sentences:
if sentence:
prompt = f"Generate a multiple-choice question based on the following sentence: {sentence}"
inputs = tokenizer.encode(prompt, return_tensors="pt")
outputs = model.generate(inputs, max_length=50, num_return_sequences=1)
question = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Generate options (this is a simplified example)
options = ["Option A", "Option B", "Option C", "Option D"]
correct_answer = options[0]
mcqs.append({
"mcq": question,
"options": {
"a": options[0],
"b": options[1],
"c": options[2],
"d": options[3]
},
"correct": "a"
})
return mcqs
# Streamlit UI
st.title("MCQ Generator")
st.write("Enter a paragraph to generate multiple-choice questions.")
paragraph = st.text_area("Paragraph", height=200)
if st.button("Generate MCQs"):
if paragraph:
mcqs = generate_mcqs(paragraph)
for i, mcq in enumerate(mcqs):
st.write(f"**Question {i+1}:** {mcq['mcq']}")
st.write(f"a. {mcq['options']['a']}")
st.write(f"b. {mcq['options']['b']}")
st.write(f"c. {mcq['options']['c']}")
st.write(f"d. {mcq['options']['d']}")
st.write(f"**Correct Answer:** {mcq['correct']}")
else:
st.write("Please enter a paragraph.")