turn on use_cache for fast inference
#5
by
loubnabnl
HF staff
- opened
- config.json +1 -1
config.json
CHANGED
@@ -33,7 +33,7 @@
|
|
33 |
"summary_use_proj": true,
|
34 |
"torch_dtype": "float16",
|
35 |
"transformers_version": "4.30.0.dev0",
|
36 |
-
"use_cache":
|
37 |
"validate_runner_input": true,
|
38 |
"vocab_size": 49153
|
39 |
}
|
|
|
33 |
"summary_use_proj": true,
|
34 |
"torch_dtype": "float16",
|
35 |
"transformers_version": "4.30.0.dev0",
|
36 |
+
"use_cache": true,
|
37 |
"validate_runner_input": true,
|
38 |
"vocab_size": 49153
|
39 |
}
|