Alignment-Lab-AI commited on
Commit
c81ef53
1 Parent(s): 6a4431d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -9
config.json CHANGED
@@ -1,7 +1,5 @@
1
  {
2
- "architectures": [
3
- "Qwen2ForCausalLM"
4
- ],
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
  "eos_token_id": 151645,
@@ -17,11 +15,11 @@
17
  "num_key_value_heads": 8,
18
  "rms_norm_eps": 1e-06,
19
  "rope_theta": 1000000.0,
20
- "rope_scaling": {
 
21
  "factor": 4.0,
22
- "original_max_position_embeddings": 32768,
23
- "type": "yarn"
24
- }
25
  "sliding_window": 131072,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "bfloat16",
@@ -29,5 +27,4 @@
29
  "use_cache": true,
30
  "use_sliding_window": false,
31
  "vocab_size": 152064
32
-
33
- }
 
1
  {
2
+ "architectures": ["Qwen2ForCausalLM"],
 
 
3
  "attention_dropout": 0.0,
4
  "bos_token_id": 151643,
5
  "eos_token_id": 151645,
 
15
  "num_key_value_heads": 8,
16
  "rms_norm_eps": 1e-06,
17
  "rope_theta": 1000000.0,
18
+ "rope_scaling": {
19
+ "type": "yarn",
20
  "factor": 4.0,
21
+ "original_max_position_embeddings": 32768
22
+ },
 
23
  "sliding_window": 131072,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
 
27
  "use_cache": true,
28
  "use_sliding_window": false,
29
  "vocab_size": 152064
30
+ }