Just Llama 3b 3.2 tokenizer modified to use two new special tokens replacing the reserved special tokens slots:
[
{
"id": 128002,
"content": "<|reference_text|>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false,
"special": true
},
{
"id": 128003,
"content": "<|end_reference_text|>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false,
"special": true
}
]