Jamba-tiny-dev / tokenizer_config.json
tomeras1's picture
Upload model
38f499b verified
raw
history blame contribute delete
No virus
2.35 kB
{
"add_bos_token": true,
"add_eos_token": false,
"added_tokens_decoder": {
"0": {
"content": "<|pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"1": {
"content": "<|startoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"2": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"3": {
"content": "<|unk|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"bos_token": "<|startoftext|>",
"chat_template": "{% set bom_token='<|bom|>' %}{% set eom_token='<|eom|>' %}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = 'You are a task following system, respond to the user\\'s input precisely. Do not add any additional information.' %}{% endif %}{{ bom_token + 'System: ' + system_message + eom_token + '' }}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ bom_token + 'User: ' + content.strip() + eom_token }}{% elif message['role'] == 'system' %}{{ bom_token + 'System: ' + content.strip() + eom_token }}{% elif message['role'] == 'assistant' %}{{ bom_token + 'Assistant: ' + content.strip() + eom_token }}{% endif %}{% if message['role'] == 'assistant' and loop.index0 == loop.length - 1 %}{{ 'User: ' }}{% elif message['role'] == 'user' and loop.index0 == loop.length - 1 %}{{ '' + bom_token + 'Assistant:' }}{% else %}{{ '' }}{% endif %}{% endfor %}",
"clean_up_tokenization_spaces": false,
"eos_token": "<|endoftext|>",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<|pad|>",
"spaces_between_special_tokens": false,
"tokenizer_class": "LlamaTokenizer",
"unk_token": "<|unk|>",
"use_default_system_prompt": false
}