phoebeklett commited on
Commit
3cc4f4a
1 Parent(s): f73b01f

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "mosaicml/mpt-7b",
3
  "architectures": [
4
  "ExtendedMPTForCausalLM"
5
  ],
@@ -21,8 +21,8 @@
21
  "use_active_externalism": true
22
  },
23
  "auto_map": {
24
- "AutoConfig": "configuration.ExtendedMPTConfig",
25
- "AutoModelForCausalLM": "modeling_mpt.ExtendedMPTForCausalLM"
26
  },
27
  "d_model": 4096,
28
  "emb_pdrop": 0,
@@ -52,7 +52,7 @@
52
  "resid_pdrop": 0,
53
  "tokenizer_name": "EleutherAI/gpt-neox-20b",
54
  "torch_dtype": "float32",
55
- "transformers_version": "4.33.3",
56
  "use_active_externalism_by_layer": [
57
  true,
58
  true,
@@ -87,7 +87,7 @@
87
  true,
88
  true
89
  ],
90
- "use_cache": false,
91
  "verbose": 0,
92
  "vocab_size": 50432
93
  }
 
1
  {
2
+ "_name_or_path": "normalcomputing/extended-mind-mpt-7b",
3
  "architectures": [
4
  "ExtendedMPTForCausalLM"
5
  ],
 
21
  "use_active_externalism": true
22
  },
23
  "auto_map": {
24
+ "AutoConfig": "configuration.ExtendedMptConfig",
25
+ "AutoModelForCausalLM": "modeling.ExtendedMptForCausalLM"
26
  },
27
  "d_model": 4096,
28
  "emb_pdrop": 0,
 
52
  "resid_pdrop": 0,
53
  "tokenizer_name": "EleutherAI/gpt-neox-20b",
54
  "torch_dtype": "float32",
55
+ "transformers_version": "4.33.0",
56
  "use_active_externalism_by_layer": [
57
  true,
58
  true,
 
87
  true,
88
  true
89
  ],
90
+ "use_cache": true,
91
  "verbose": 0,
92
  "vocab_size": 50432
93
  }