fix: update tokenizer
Browse files* tokenizer_config
* return only 0 for token_type_id
- tokenizer.json +0 -0
- tokenizer_config.json +2 -1
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -11,5 +11,6 @@
|
|
11 |
"eos_token": "[SEP]",
|
12 |
"tokenize_chinese_chars": true,
|
13 |
"strip_accents": null,
|
14 |
-
"model_max_length": 512
|
|
|
15 |
}
|
|
|
11 |
"eos_token": "[SEP]",
|
12 |
"tokenize_chinese_chars": true,
|
13 |
"strip_accents": null,
|
14 |
+
"model_max_length": 512,
|
15 |
+
"tokenizer_class": "BertTokenizer"
|
16 |
}
|