bart_scitldr / README.md
sobamchan's picture
Create README.md
47eaafa
|
raw
history blame
410 Bytes
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM

tokenizer = AutoTokenizer.from_pretrained("facebook/bart-large")
model = AutoModelForSeq2SeqLM.from_pretrained("vadis/bart_scitldr", use_auth_token=True)

text = "Abstract of a paper."
batch = tok(text, return_tensors="pt")
generated_ids = model.generate(batch["input_ids"])
print(tok.batch_decode(generated_ids, skip_special_tokens=True))