Update model
Browse files- config.json +3 -5
- pytorch_model.bin +2 -2
config.json
CHANGED
@@ -79,7 +79,7 @@
|
|
79 |
"top_p": 1.0,
|
80 |
"torch_dtype": null,
|
81 |
"torchscript": false,
|
82 |
-
"transformers_version": "4.
|
83 |
"typical_p": 1.0,
|
84 |
"use_bfloat16": false,
|
85 |
"use_cache": true,
|
@@ -88,7 +88,6 @@
|
|
88 |
"encoder": {
|
89 |
"_name_or_path": "",
|
90 |
"add_cross_attention": false,
|
91 |
-
"add_final_layer_norm": false,
|
92 |
"architectures": null,
|
93 |
"attention_probs_dropout_prob": 0.0,
|
94 |
"bad_words_ids": null,
|
@@ -108,7 +107,6 @@
|
|
108 |
"early_stopping": false,
|
109 |
"embed_dim": 128,
|
110 |
"encoder_no_repeat_ngram_size": 0,
|
111 |
-
"encoder_stride": 32,
|
112 |
"eos_token_id": null,
|
113 |
"exponential_decay_length_penalty": null,
|
114 |
"finetuning_task": null,
|
@@ -137,7 +135,7 @@
|
|
137 |
"max_length": 20,
|
138 |
"min_length": 0,
|
139 |
"mlp_ratio": 4.0,
|
140 |
-
"model_type": "
|
141 |
"no_repeat_ngram_size": 0,
|
142 |
"num_beam_groups": 1,
|
143 |
"num_beams": 1,
|
@@ -175,7 +173,7 @@
|
|
175 |
"top_p": 1.0,
|
176 |
"torch_dtype": null,
|
177 |
"torchscript": false,
|
178 |
-
"transformers_version": "4.
|
179 |
"typical_p": 1.0,
|
180 |
"use_absolute_embeddings": false,
|
181 |
"use_bfloat16": false,
|
|
|
79 |
"top_p": 1.0,
|
80 |
"torch_dtype": null,
|
81 |
"torchscript": false,
|
82 |
+
"transformers_version": "4.22.0.dev0",
|
83 |
"typical_p": 1.0,
|
84 |
"use_bfloat16": false,
|
85 |
"use_cache": true,
|
|
|
88 |
"encoder": {
|
89 |
"_name_or_path": "",
|
90 |
"add_cross_attention": false,
|
|
|
91 |
"architectures": null,
|
92 |
"attention_probs_dropout_prob": 0.0,
|
93 |
"bad_words_ids": null,
|
|
|
107 |
"early_stopping": false,
|
108 |
"embed_dim": 128,
|
109 |
"encoder_no_repeat_ngram_size": 0,
|
|
|
110 |
"eos_token_id": null,
|
111 |
"exponential_decay_length_penalty": null,
|
112 |
"finetuning_task": null,
|
|
|
135 |
"max_length": 20,
|
136 |
"min_length": 0,
|
137 |
"mlp_ratio": 4.0,
|
138 |
+
"model_type": "donut",
|
139 |
"no_repeat_ngram_size": 0,
|
140 |
"num_beam_groups": 1,
|
141 |
"num_beams": 1,
|
|
|
173 |
"top_p": 1.0,
|
174 |
"torch_dtype": null,
|
175 |
"torchscript": false,
|
176 |
+
"transformers_version": "4.22.0.dev0",
|
177 |
"typical_p": 1.0,
|
178 |
"use_absolute_embeddings": false,
|
179 |
"use_bfloat16": false,
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0e37d027ce4efbb6a868c6d64622301f67f045e31c92b0b86fd085b41146176
|
3 |
+
size 802987835
|