toncho11 commited on
Commit
9fcdd11
1 Parent(s): a0e5677

Demo code for GPT-NEO 2.7B

Browse files
Files changed (1) hide show
  1. demo.py +39 -0
demo.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Wed Mar 29 16:01:44 2023
4
+
5
+ Source: https://huggingface.co/EleutherAI/gpt-neo-2.7B
6
+
7
+ GPT-Neo 2.7B - a transformer model designed using EleutherAI's replication of the
8
+ GPT-3 architecture. The model is available on HuggingFace. Although it can be used
9
+ for different tasks, the model is best at what it was pretrained for, which is
10
+ generating texts from a prompt.
11
+
12
+ The task in this script is text generation.
13
+
14
+ There are also a 1.3B and 6B versions.
15
+
16
+ """
17
+
18
+ import torch
19
+ from transformers import AutoTokenizer, GPTNeoForCausalLM
20
+
21
+ tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
22
+ model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")
23
+
24
+ inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
25
+ outputs = model(**inputs, labels=inputs["input_ids"])
26
+
27
+ input_ids = inputs["input_ids"]
28
+
29
+ gen_tokens = model.generate(
30
+ input_ids,
31
+ do_sample=True,
32
+ temperature=0.9,
33
+ max_length=100,
34
+ )
35
+
36
+ gen_text = tokenizer.batch_decode(gen_tokens)[0]
37
+
38
+ print("=========================================================")
39
+ print(gen_text)