|
import gradio as gr |
|
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline |
|
import torch |
|
|
|
model = AutoModelForSeq2SeqLM.from_pretrained("KarmaCST/nllb-200-distilled-600M-dz-to-en") |
|
tokenizer = AutoTokenizer.from_pretrained("KarmaCST/nllb-200-distilled-600M-dz-to-en") |
|
|
|
|
|
src_lang = "dzo_Tibt" |
|
tgt_lang = "eng_Latn" |
|
|
|
|
|
def translate(text): |
|
translation_pipeline = pipeline("translation", |
|
model=model, |
|
tokenizer=tokenizer, |
|
src_lang=src_lang, |
|
tgt_lang=tgt_lang) |
|
|
|
result = translation_pipeline(text) |
|
return result[0]['translation_text'] |
|
|
|
|
|
gr.Interface( |
|
translate, |
|
[ |
|
gr.components.Textbox(label="Input Sentence", placeholder = " Enter Dzongkha sentence here ...") |
|
], |
|
["text"], |
|
).launch() |