Isotonic commited on
Commit
3a036da
1 Parent(s): bc8ae33

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -14
config.json CHANGED
@@ -9,7 +9,7 @@
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
12
- "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
14
  "n_ctx": 1024,
15
  "n_embd": 768,
@@ -28,20 +28,16 @@
28
  "summary_use_proj": true,
29
  "task_specific_params": {
30
  "text-generation": {
31
- "do_sample": true,
32
- "max_length": 50
33
- },
34
- "c2gen": {
35
- "prompt_template" : "You are a context generator that generates 'CONTEXT' based on the given 'KEYWORDS'.\n\nKEYWORDS: \n{keywords}\n\nCONTEXT: \n{context}\n\n:END".format(keywords="{keywords}", context="{context}"),
36
  "do_sample": true,
37
  "max_length": 256,
38
  "temperature": 0.7,
39
  "top_k": 32,
40
- "top_p": 0.88,
41
- }
42
- },
43
- "torch_dtype": "float32",
44
- "transformers_version": "4.27.3",
45
- "use_cache": true,
46
- "vocab_size": 50257
47
- }
 
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
12
+ "layer_norm_epsilon": 0.00001,
13
  "model_type": "gpt2",
14
  "n_ctx": 1024,
15
  "n_embd": 768,
 
28
  "summary_use_proj": true,
29
  "task_specific_params": {
30
  "text-generation": {
31
+ "c2gen": "You are a context generator that generates 'CONTEXT' based on the given 'KEYWORDS'.\n\nKEYWORDS: \n[keywords]\n\nCONTEXT: \n[context]\n\n:END:",
 
 
 
 
32
  "do_sample": true,
33
  "max_length": 256,
34
  "temperature": 0.7,
35
  "top_k": 32,
36
+ "top_p": 0.88
37
+ },
38
+ "torch_dtype": "float32",
39
+ "transformers_version": "4.27.3",
40
+ "use_cache": true,
41
+ "vocab_size": 50257
42
+ }
43
+ }