TroyDoesAI commited on
Commit
b6ee697
1 Parent(s): 325b93e

Really interesting Personality that seems to scale as the model gets larger really strangely

Browse files
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "DigitalSouls/BlackSheep-DigitalSoul",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 6144,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 16384,
14
+ "max_position_embeddings": 131072,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 48,
17
+ "num_hidden_layers": 70,
18
+ "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.40.2",
25
+ "use_cache": false,
26
+ "vocab_size": 32768
27
+ }
mergekit_config.yml ADDED
@@ -0,0 +1,254 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # slices:
2
+ # - sources:
3
+ # - model: failspy/kappa-3-phi-abliterated
4
+ # layer_range: [0, 1]
5
+ # - sources:
6
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
7
+ # layer_range: [0, 1]
8
+
9
+ # - sources:
10
+ # - model: failspy/kappa-3-phi-abliterated
11
+ # layer_range: [1, 2]
12
+ # - sources:
13
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
14
+ # layer_range: [1, 2]
15
+
16
+ # - sources:
17
+ # - model: failspy/kappa-3-phi-abliterated
18
+ # layer_range: [2, 3]
19
+ # - sources:
20
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
21
+ # layer_range: [2, 3]
22
+
23
+ # - sources:
24
+ # - model: failspy/kappa-3-phi-abliterated
25
+ # layer_range: [3, 4]
26
+ # - sources:
27
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
28
+ # layer_range: [3, 4]
29
+
30
+ # - sources:
31
+ # - model: failspy/kappa-3-phi-abliterated
32
+ # layer_range: [4, 5]
33
+ # - sources:
34
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
35
+ # layer_range: [4, 5]
36
+
37
+ # - sources:
38
+ # - model: failspy/kappa-3-phi-abliterated
39
+ # layer_range: [5, 6]
40
+ # - sources:
41
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
42
+ # layer_range: [5, 6]
43
+
44
+ # - sources:
45
+ # - model: failspy/kappa-3-phi-abliterated
46
+ # layer_range: [6, 7]
47
+ # - sources:
48
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
49
+ # layer_range: [6, 7]
50
+
51
+ # - sources:
52
+ # - model: failspy/kappa-3-phi-abliterated
53
+ # layer_range: [7, 8]
54
+ # - sources:
55
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
56
+ # layer_range: [7, 8]
57
+
58
+ # - sources:
59
+ # - model: failspy/kappa-3-phi-abliterated
60
+ # layer_range: [8, 9]
61
+ # - sources:
62
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
63
+ # layer_range: [8, 9]
64
+
65
+ # - sources:
66
+ # - model: failspy/kappa-3-phi-abliterated
67
+ # layer_range: [9, 10]
68
+ # - sources:
69
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
70
+ # layer_range: [9, 10]
71
+
72
+ # - sources:
73
+ # - model: failspy/kappa-3-phi-abliterated
74
+ # layer_range: [10, 11]
75
+ # - sources:
76
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
77
+ # layer_range: [10, 11]
78
+
79
+ # - sources:
80
+ # - model: failspy/kappa-3-phi-abliterated
81
+ # layer_range: [11, 12]
82
+ # - sources:
83
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
84
+ # layer_range: [11, 12]
85
+
86
+ # - sources:
87
+ # - model: failspy/kappa-3-phi-abliterated
88
+ # layer_range: [12, 13]
89
+ # - sources:
90
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
91
+ # layer_range: [12, 13]
92
+
93
+ # - sources:
94
+ # - model: failspy/kappa-3-phi-abliterated
95
+ # layer_range: [13, 14]
96
+ # - sources:
97
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
98
+ # layer_range: [13, 14]
99
+
100
+ # - sources:
101
+ # - model: failspy/kappa-3-phi-abliterated
102
+ # layer_range: [14, 15]
103
+ # - sources:
104
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
105
+ # layer_range: [14, 15]
106
+
107
+ # - sources:
108
+ # - model: failspy/kappa-3-phi-abliterated
109
+ # layer_range: [15, 16]
110
+ # - sources:
111
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
112
+ # layer_range: [15, 16]
113
+
114
+ # - sources:
115
+ # - model: failspy/kappa-3-phi-abliterated
116
+ # layer_range: [16, 17]
117
+ # - sources:
118
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
119
+ # layer_range: [16, 17]
120
+
121
+ # - sources:
122
+ # - model: failspy/kappa-3-phi-abliterated
123
+ # layer_range: [17, 18]
124
+ # - sources:
125
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
126
+ # layer_range: [17, 18]
127
+
128
+ # - sources:
129
+ # - model: failspy/kappa-3-phi-abliterated
130
+ # layer_range: [18, 19]
131
+ # - sources:
132
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
133
+ # layer_range: [18, 19]
134
+
135
+ # - sources:
136
+ # - model: failspy/kappa-3-phi-abliterated
137
+ # layer_range: [19, 20]
138
+ # - sources:
139
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
140
+ # layer_range: [19, 20]
141
+
142
+ # - sources:
143
+ # - model: failspy/kappa-3-phi-abliterated
144
+ # layer_range: [20, 21]
145
+ # - sources:
146
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
147
+ # layer_range: [20, 21]
148
+
149
+ # - sources:
150
+ # - model: failspy/kappa-3-phi-abliterated
151
+ # layer_range: [21, 22]
152
+ # - sources:
153
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
154
+ # layer_range: [21, 22]
155
+
156
+ # - sources:
157
+ # - model: failspy/kappa-3-phi-abliterated
158
+ # layer_range: [22, 23]
159
+ # - sources:
160
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
161
+ # layer_range: [22, 23]
162
+
163
+ # - sources:
164
+ # - model: failspy/kappa-3-phi-abliterated
165
+ # layer_range: [23, 24]
166
+ # - sources:
167
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
168
+ # layer_range: [23, 24]
169
+
170
+ # - sources:
171
+ # - model: failspy/kappa-3-phi-abliterated
172
+ # layer_range: [24, 25]
173
+ # - sources:
174
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
175
+ # layer_range: [24, 25]
176
+
177
+ # - sources:
178
+ # - model: failspy/kappa-3-phi-abliterated
179
+ # layer_range: [25, 26]
180
+ # - sources:
181
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
182
+ # layer_range: [25, 26]
183
+
184
+ # - sources:
185
+ # - model: failspy/kappa-3-phi-abliterated
186
+ # layer_range: [26, 27]
187
+ # - sources:
188
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
189
+ # layer_range: [26, 27]
190
+
191
+ # - sources:
192
+ # - model: failspy/kappa-3-phi-abliterated
193
+ # layer_range: [27, 28]
194
+ # - sources:
195
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
196
+ # layer_range: [27, 28]
197
+
198
+ # - sources:
199
+ # - model: failspy/kappa-3-phi-abliterated
200
+ # layer_range: [28, 29]
201
+ # - sources:
202
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
203
+ # layer_range: [28, 29]
204
+
205
+ # - sources:
206
+ # - model: failspy/kappa-3-phi-abliterated
207
+ # layer_range: [29, 30]
208
+ # - sources:
209
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
210
+ # layer_range: [29, 30]
211
+
212
+ # - sources:
213
+ # - model: failspy/kappa-3-phi-abliterated
214
+ # layer_range: [30, 31]
215
+ # - sources:
216
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
217
+ # layer_range: [30, 31]
218
+
219
+ # - sources:
220
+ # - model: failspy/kappa-3-phi-abliterated
221
+ # layer_range: [31, 32]
222
+ # - sources:
223
+ # - model: TroyDoesAI/Phi-3-Context-Obedient-RAG
224
+ # layer_range: [31, 32]
225
+
226
+
227
+
228
+
229
+
230
+
231
+
232
+
233
+
234
+
235
+
236
+
237
+
238
+
239
+
240
+
241
+
242
+ slices:
243
+ - sources:
244
+ - model: DigitalSouls/BlackSheep-DigitalSoul
245
+ layer_range: [0, 32]
246
+ - sources:
247
+ - model: TroyDoesAI/BlackSheep-MermaidMistral-22B
248
+ layer_range: [24, 42]
249
+ - sources:
250
+ - model: DigitalSouls/BlackSheep-DigitalSoul
251
+ layer_range: [32, 56]
252
+
253
+ merge_method: passthrough
254
+ dtype: bfloat16
model-00001-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0280fc856fd8aeddd7b769900cf904dc50b14ae21b47c3045df7a1e002318d3
3
+ size 4907476544
model-00002-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c084c8c971ba45cedc9d3c16d91500bc4e474d208983502b1fb74b10b1d41fb
3
+ size 4882323752
model-00003-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b69c9584183ec944ea03f46556379bb9e979dcb558dffcd053499a6cc6d60a1
3
+ size 4970416760
model-00004-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edce3204e5ace361b95787ab0edf468fab63ce85d8fd6413d22a2606d448dae9
3
+ size 4995569816
model-00005-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57f809776e64426998e3227cc084f136357be234f31606f92fd7fd5aa35d6a4a
3
+ size 4857183080
model-00006-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dca7c03a6e395d862d7e9bf0ce85d5325b1283a386b87940c0ba20a445a52849
3
+ size 4882323752
model-00007-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68264ef0e715451fe7176313ea1042ec1fa3ef669c88d90e98e59b74fdf95709
3
+ size 4882323752
model-00008-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:105f12bd51688db3bde86284db93564c1c17ad8e1934c9d72fa7ee12fc277260
3
+ size 4857183064
model-00009-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3044086425dbfaca1b77f4a19d99c9e5adebfabf1320e722a05495096669d678
3
+ size 4882311352
model-00010-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f042f9895009ee9e94ac037bd9d3ab9b4b1ea668c1a908191efa7238bd37839
3
+ size 4970416752
model-00011-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e1a564c52767764b92d0b42243f8596e04824de6f8afd9b4894fe915da11de5
3
+ size 4970416736
model-00012-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a99fa77340b53a55ef55094fc13b4818cec1b5b70003c2828d681a41ed4ae25c
3
+ size 1359005608
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.4.4", "total_size": 55416877056}, "weight_map": {"lm_head.weight": "model-00001-of-00012.safetensors", "model.embed_tokens.weight": "model-00001-of-00012.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00012.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00012.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00001-of-00012.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00012.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00012.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00012.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00012.safetensors", "model.layers.1.input_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00001-of-00012.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00012.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00001-of-00012.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00012.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00012.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00012.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00012.safetensors", "model.layers.10.input_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00001-of-00012.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00012.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00001-of-00012.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00012.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00012.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00012.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00012.safetensors", "model.layers.11.input_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00001-of-00012.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00012.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00001-of-00012.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00012.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00012.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00012.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00012.safetensors", "model.layers.12.input_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00001-of-00012.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00012.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00001-of-00012.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00012.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00012.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00012.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00012.safetensors", "model.layers.13.input_layernorm.weight": "model-00001-of-00012.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00001-of-00012.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00012.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00002-of-00012.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00012.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00012.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00012.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00012.safetensors", "model.layers.14.input_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00002-of-00012.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00012.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00002-of-00012.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00012.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00012.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00012.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00012.safetensors", "model.layers.15.input_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00002-of-00012.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00012.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00002-of-00012.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00012.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00012.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00012.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00012.safetensors", "model.layers.16.input_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00002-of-00012.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00012.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00002-of-00012.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00012.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00012.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00012.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00012.safetensors", "model.layers.17.input_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00002-of-00012.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00012.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00002-of-00012.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00012.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00012.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00012.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00012.safetensors", "model.layers.18.input_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00002-of-00012.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00012.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00002-of-00012.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00012.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00012.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00012.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00012.safetensors", "model.layers.19.input_layernorm.weight": "model-00002-of-00012.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00002-of-00012.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00012.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00003-of-00012.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00012.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00012.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00012.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00012.safetensors", "model.layers.2.input_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00003-of-00012.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00003-of-00012.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00003-of-00012.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00003-of-00012.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00003-of-00012.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00003-of-00012.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00003-of-00012.safetensors", "model.layers.20.input_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00003-of-00012.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00012.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00003-of-00012.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00012.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00012.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00012.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00012.safetensors", "model.layers.21.input_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00003-of-00012.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00012.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00003-of-00012.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00012.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00012.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00012.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00012.safetensors", "model.layers.22.input_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00003-of-00012.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00012.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00003-of-00012.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00012.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00012.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00012.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00012.safetensors", "model.layers.23.input_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00003-of-00012.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00012.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00003-of-00012.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00012.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00012.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00012.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00012.safetensors", "model.layers.24.input_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00003-of-00012.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00012.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00003-of-00012.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00012.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00012.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00012.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00004-of-00012.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00004-of-00012.safetensors", "model.layers.25.input_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00004-of-00012.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00012.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00004-of-00012.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00012.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00012.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00012.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00012.safetensors", "model.layers.26.input_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00004-of-00012.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00012.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00004-of-00012.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00012.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00012.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00012.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00012.safetensors", "model.layers.27.input_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00004-of-00012.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00012.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00004-of-00012.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00012.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00012.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00012.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00012.safetensors", "model.layers.32.input_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.28.input_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00004-of-00012.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00004-of-00012.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00012.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00012.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00004-of-00012.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00004-of-00012.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00012.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00012.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00012.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00012.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00012.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00012.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00012.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00012.safetensors", "model.layers.33.input_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.29.input_layernorm.weight": "model-00004-of-00012.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00004-of-00012.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00004-of-00012.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00012.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00012.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00004-of-00012.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00005-of-00012.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00005-of-00012.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00005-of-00012.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00012.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00005-of-00012.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00005-of-00012.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00005-of-00012.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00005-of-00012.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00005-of-00012.safetensors", "model.layers.3.input_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00005-of-00012.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00005-of-00012.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00005-of-00012.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00005-of-00012.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00005-of-00012.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00005-of-00012.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00005-of-00012.safetensors", "model.layers.34.input_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.30.input_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00005-of-00012.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00005-of-00012.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00012.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00005-of-00012.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00005-of-00012.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00005-of-00012.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00012.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00005-of-00012.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00012.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00005-of-00012.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00012.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00005-of-00012.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00012.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00005-of-00012.safetensors", "model.layers.35.input_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.31.input_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00005-of-00012.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00005-of-00012.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00012.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00012.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00005-of-00012.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00005-of-00012.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00012.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00012.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00012.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00012.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00012.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00012.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00012.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00012.safetensors", "model.layers.46.input_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.36.input_layernorm.weight": "model-00005-of-00012.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00005-of-00012.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00005-of-00012.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00006-of-00012.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00006-of-00012.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00006-of-00012.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00006-of-00012.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00006-of-00012.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00006-of-00012.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00006-of-00012.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00006-of-00012.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00006-of-00012.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00006-of-00012.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00006-of-00012.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00006-of-00012.safetensors", "model.layers.47.input_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.37.input_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00006-of-00012.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00006-of-00012.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00006-of-00012.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00006-of-00012.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00006-of-00012.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00006-of-00012.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00006-of-00012.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00006-of-00012.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00006-of-00012.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00006-of-00012.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00006-of-00012.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00006-of-00012.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00006-of-00012.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00006-of-00012.safetensors", "model.layers.48.input_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.38.input_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00006-of-00012.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00006-of-00012.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00006-of-00012.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00006-of-00012.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00006-of-00012.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00006-of-00012.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00006-of-00012.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00006-of-00012.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00006-of-00012.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00006-of-00012.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00006-of-00012.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00006-of-00012.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00006-of-00012.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00006-of-00012.safetensors", "model.layers.49.input_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.39.input_layernorm.weight": "model-00006-of-00012.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00006-of-00012.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00006-of-00012.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00006-of-00012.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00007-of-00012.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00007-of-00012.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00007-of-00012.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00007-of-00012.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00007-of-00012.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00007-of-00012.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00007-of-00012.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00007-of-00012.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00007-of-00012.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00007-of-00012.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00007-of-00012.safetensors", "model.layers.50.input_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.40.input_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00007-of-00012.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00007-of-00012.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00007-of-00012.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00007-of-00012.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00007-of-00012.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00007-of-00012.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00007-of-00012.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00007-of-00012.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00007-of-00012.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00007-of-00012.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00007-of-00012.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00007-of-00012.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00007-of-00012.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00007-of-00012.safetensors", "model.layers.51.input_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.41.input_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00007-of-00012.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00007-of-00012.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00007-of-00012.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00007-of-00012.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00007-of-00012.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00007-of-00012.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00007-of-00012.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00007-of-00012.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00007-of-00012.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00007-of-00012.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00007-of-00012.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00007-of-00012.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00007-of-00012.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00007-of-00012.safetensors", "model.layers.52.input_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.42.input_layernorm.weight": "model-00007-of-00012.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00007-of-00012.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00007-of-00012.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00007-of-00012.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00007-of-00012.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00008-of-00012.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00008-of-00012.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00008-of-00012.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00008-of-00012.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00008-of-00012.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00008-of-00012.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00008-of-00012.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00008-of-00012.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00008-of-00012.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00008-of-00012.safetensors", "model.layers.53.input_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.43.input_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00008-of-00012.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00008-of-00012.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00008-of-00012.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00008-of-00012.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00008-of-00012.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00008-of-00012.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00008-of-00012.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00008-of-00012.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00008-of-00012.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00008-of-00012.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00008-of-00012.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00008-of-00012.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00008-of-00012.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00008-of-00012.safetensors", "model.layers.4.input_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00008-of-00012.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00008-of-00012.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00008-of-00012.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00008-of-00012.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00008-of-00012.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00008-of-00012.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00008-of-00012.safetensors", "model.layers.54.input_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.44.input_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00008-of-00012.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00008-of-00012.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00008-of-00012.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00008-of-00012.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00008-of-00012.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00008-of-00012.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00008-of-00012.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00008-of-00012.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00008-of-00012.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00008-of-00012.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00008-of-00012.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00008-of-00012.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00008-of-00012.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00008-of-00012.safetensors", "model.layers.55.input_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.45.input_layernorm.weight": "model-00008-of-00012.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00008-of-00012.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00009-of-00012.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00009-of-00012.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00009-of-00012.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00009-of-00012.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00009-of-00012.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00009-of-00012.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00009-of-00012.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00009-of-00012.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00009-of-00012.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00009-of-00012.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00009-of-00012.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00009-of-00012.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00009-of-00012.safetensors", "model.layers.56.input_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00009-of-00012.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00009-of-00012.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00009-of-00012.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00009-of-00012.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00009-of-00012.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00009-of-00012.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00009-of-00012.safetensors", "model.layers.57.input_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00009-of-00012.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00009-of-00012.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00009-of-00012.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00009-of-00012.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00009-of-00012.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00009-of-00012.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00009-of-00012.safetensors", "model.layers.58.input_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00009-of-00012.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00009-of-00012.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00009-of-00012.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00009-of-00012.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00009-of-00012.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00009-of-00012.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00009-of-00012.safetensors", "model.layers.59.input_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00009-of-00012.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00009-of-00012.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00009-of-00012.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00009-of-00012.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00009-of-00012.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00009-of-00012.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00009-of-00012.safetensors", "model.layers.60.input_layernorm.weight": "model-00009-of-00012.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00009-of-00012.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00009-of-00012.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00010-of-00012.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00010-of-00012.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00010-of-00012.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00010-of-00012.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00010-of-00012.safetensors", "model.layers.61.input_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00010-of-00012.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00010-of-00012.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00010-of-00012.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00010-of-00012.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00010-of-00012.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00010-of-00012.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00010-of-00012.safetensors", "model.layers.62.input_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00010-of-00012.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00010-of-00012.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00010-of-00012.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00010-of-00012.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00010-of-00012.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00010-of-00012.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00010-of-00012.safetensors", "model.layers.63.input_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00010-of-00012.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00010-of-00012.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00010-of-00012.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00010-of-00012.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00010-of-00012.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00010-of-00012.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00010-of-00012.safetensors", "model.layers.5.input_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00010-of-00012.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00010-of-00012.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00010-of-00012.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00010-of-00012.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00010-of-00012.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00010-of-00012.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00010-of-00012.safetensors", "model.layers.64.input_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00010-of-00012.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00010-of-00012.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00010-of-00012.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00010-of-00012.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00010-of-00012.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00010-of-00012.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00010-of-00012.safetensors", "model.layers.65.input_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00010-of-00012.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00010-of-00012.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00010-of-00012.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00010-of-00012.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00010-of-00012.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00010-of-00012.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00011-of-00012.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00011-of-00012.safetensors", "model.layers.66.input_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00011-of-00012.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00011-of-00012.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00011-of-00012.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00011-of-00012.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00011-of-00012.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00011-of-00012.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00011-of-00012.safetensors", "model.layers.67.input_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00011-of-00012.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00011-of-00012.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00011-of-00012.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00011-of-00012.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00011-of-00012.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00011-of-00012.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00011-of-00012.safetensors", "model.layers.68.input_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00011-of-00012.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00011-of-00012.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00011-of-00012.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00011-of-00012.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00011-of-00012.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00011-of-00012.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00011-of-00012.safetensors", "model.layers.69.input_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00011-of-00012.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00011-of-00012.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00011-of-00012.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00011-of-00012.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00011-of-00012.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00011-of-00012.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00011-of-00012.safetensors", "model.layers.6.input_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00011-of-00012.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00011-of-00012.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00011-of-00012.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00011-of-00012.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00011-of-00012.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00011-of-00012.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00011-of-00012.safetensors", "model.layers.7.input_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00011-of-00012.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00011-of-00012.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00011-of-00012.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00011-of-00012.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00011-of-00012.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00011-of-00012.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00011-of-00012.safetensors", "model.layers.8.input_layernorm.weight": "model-00011-of-00012.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00011-of-00012.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00012-of-00012.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00012-of-00012.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00012-of-00012.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00012-of-00012.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00012-of-00012.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00012-of-00012.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00012-of-00012.safetensors", "model.layers.9.input_layernorm.weight": "model-00012-of-00012.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00012-of-00012.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00012-of-00012.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00012-of-00012.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00012-of-00012.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00012-of-00012.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00012-of-00012.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00012-of-00012.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00012-of-00012.safetensors", "model.norm.weight": "model-00012-of-00012.safetensors"}}
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59f95e28944c062244741268596badc900df86c7f5ded05088d2da22a7379e06
3
+ size 587583
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff