from transformers import AutoTokenizer, AutoModelForTokenClassification from transformers.pipelines import pipeline import gradio as gr from huggingface_hub import login import os login(os.environ['HF_Token']) masker = pipeline( task="token-classification", model="Isotonic/distilbert_finetuned_ai4privacy", device="cpu", ) def ai4p_gradio(input): entities = masker(input) return {"text": input, "entities": entities} demo = gr.Interface( fn=ai4p_gradio, inputs=gr.Textbox(lines=5, label="Input"), outputs=gr.HighlightedText(label="output"), ) demo.launch()