turn on use_cache for fast inference

#5
by loubnabnl HF staff - opened
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -33,7 +33,7 @@
33
  "summary_use_proj": true,
34
  "torch_dtype": "float16",
35
  "transformers_version": "4.30.0.dev0",
36
- "use_cache": false,
37
  "validate_runner_input": true,
38
  "vocab_size": 49153
39
  }
 
33
  "summary_use_proj": true,
34
  "torch_dtype": "float16",
35
  "transformers_version": "4.30.0.dev0",
36
+ "use_cache": true,
37
  "validate_runner_input": true,
38
  "vocab_size": 49153
39
  }