import gradio as gr
title = "XLM-RoBERTa"
description = "Gradio Demo for XLM-RoBERTa. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
article = "
Unsupervised Cross-lingual Representation Learning at Scale
"
examples = [
["Hello I'm a model.","xlm-roberta-base"]
]
io1 = gr.Interface.load("huggingface/xlm-roberta-base")
io2 = gr.Interface.load("huggingface/xlm-roberta-large")
def inference(inputtext, model):
if model == "xlm-roberta-base":
outlabel = io1(inputtext)
else:
outlabel = io2(inputtext)
return outlabel
gr.Interface(
inference,
[gr.inputs.Textbox(label="Context",lines=10),gr.inputs.Dropdown(choices=["xlm-roberta-base","xlm-roberta-large"], type="value", default="xlm-roberta-base", label="model")],
[gr.outputs.Label(label="Output")],
examples=examples,
article=article,
title=title,
description=description).launch(enable_queue=True)