Fix config for checkpoint
Browse files- config.json +4 -4
- flax_model.msgpack +2 -2
- pytorch_model.bin +1 -1
config.json
CHANGED
@@ -8,14 +8,14 @@
|
|
8 |
"gradient_checkpointing": false,
|
9 |
"hidden_act": "gelu",
|
10 |
"hidden_dropout_prob": 0.1,
|
11 |
-
"hidden_size":
|
12 |
"initializer_range": 0.02,
|
13 |
-
"intermediate_size":
|
14 |
"layer_norm_eps": 1e-05,
|
15 |
"max_position_embeddings": 514,
|
16 |
"model_type": "roberta",
|
17 |
-
"num_attention_heads":
|
18 |
-
"num_hidden_layers":
|
19 |
"pad_token_id": 1,
|
20 |
"position_embedding_type": "absolute",
|
21 |
"transformers_version": "4.9.0.dev0",
|
|
|
8 |
"gradient_checkpointing": false,
|
9 |
"hidden_act": "gelu",
|
10 |
"hidden_dropout_prob": 0.1,
|
11 |
+
"hidden_size": 768,
|
12 |
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 3072,
|
14 |
"layer_norm_eps": 1e-05,
|
15 |
"max_position_embeddings": 514,
|
16 |
"model_type": "roberta",
|
17 |
+
"num_attention_heads": 12,
|
18 |
+
"num_hidden_layers": 12,
|
19 |
"pad_token_id": 1,
|
20 |
"position_embedding_type": "absolute",
|
21 |
"transformers_version": "4.9.0.dev0",
|
flax_model.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:db53f60af5170d39983a3940d68ca0fff2114957df8cb8235803f8af68a5467b
|
3 |
+
size 498796983
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 498858859
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:43f38b577722a8e7bad5c4598e36c4e3c2ac8a0843d75008f7597e706eedfd2c
|
3 |
size 498858859
|