import gradio as gr import torch import os import transformers from transformers import ( AutoModelForSequenceClassification, AutoTokenizer, ) from deployment import preprocess, detect device = 'cpu' model_dir = "nealcly/detection-longformer" # load the Longformer detector tokenizer = AutoTokenizer.from_pretrained(model_dir) model = AutoModelForSequenceClassification.from_pretrained(model_dir).to(device) def detect(input_text,device,th=-3.08583984375): label2decisions = { 0: "machine-generated", 1: "human-written", } tokenize_input = tokenizer(input_text) tensor_input = torch.tensor([tokenize_input["input_ids"]]).to(device) outputs = model(tensor_input) is_machine = -outputs.logits[0][0].item() if is_machine < th: decision = 0 else: decision = 1 return label2decisions[decision] iface = gr.Interface(fn=greet, inputs="text", outputs="text") iface.launch()