Upload tokenizer.json
Browse files- tokenizer.json +2 -1
tokenizer.json
CHANGED
@@ -50,7 +50,7 @@
|
|
50 |
},
|
51 |
{
|
52 |
"id": 128005,
|
53 |
-
"content": "
|
54 |
"single_word": false,
|
55 |
"lstrip": false,
|
56 |
"rstrip": false,
|
@@ -2407,6 +2407,7 @@
|
|
2407 |
"end_of_word_suffix": null,
|
2408 |
"fuse_unk": false,
|
2409 |
"byte_fallback": false,
|
|
|
2410 |
"vocab": {
|
2411 |
"!": 0,
|
2412 |
"\"": 1,
|
|
|
50 |
},
|
51 |
{
|
52 |
"id": 128005,
|
53 |
+
"content": "<|reserved_special_token_3|>",
|
54 |
"single_word": false,
|
55 |
"lstrip": false,
|
56 |
"rstrip": false,
|
|
|
2407 |
"end_of_word_suffix": null,
|
2408 |
"fuse_unk": false,
|
2409 |
"byte_fallback": false,
|
2410 |
+
"ignore_merges": true,
|
2411 |
"vocab": {
|
2412 |
"!": 0,
|
2413 |
"\"": 1,
|