Upload folder using huggingface_hub
Browse files- config.json +1 -1
- tokenizer_config.json +1 -1
config.json
CHANGED
@@ -7,7 +7,7 @@
|
|
7 |
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 1,
|
9 |
"embd_pdrop": 0.0,
|
10 |
-
"eos_token_id":
|
11 |
"hidden_act": "silu",
|
12 |
"hidden_size": 3072,
|
13 |
"initializer_range": 0.02,
|
|
|
7 |
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 1,
|
9 |
"embd_pdrop": 0.0,
|
10 |
+
"eos_token_id": 32007,
|
11 |
"hidden_act": "silu",
|
12 |
"hidden_size": 3072,
|
13 |
"initializer_range": 0.02,
|
tokenizer_config.json
CHANGED
@@ -118,7 +118,7 @@
|
|
118 |
"bos_token": "<s>",
|
119 |
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|system|>' + '\n' + message['content'] + '<|end|>' + '\n'}}{% elif (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif message['role'] == 'assistant' %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}",
|
120 |
"clean_up_tokenization_spaces": false,
|
121 |
-
"eos_token": "<|
|
122 |
"legacy": false,
|
123 |
"model_max_length": 4096,
|
124 |
"pad_token": "<|endoftext|>",
|
|
|
118 |
"bos_token": "<s>",
|
119 |
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|system|>' + '\n' + message['content'] + '<|end|>' + '\n'}}{% elif (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif message['role'] == 'assistant' %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}",
|
120 |
"clean_up_tokenization_spaces": false,
|
121 |
+
"eos_token": "<|end|>",
|
122 |
"legacy": false,
|
123 |
"model_max_length": 4096,
|
124 |
"pad_token": "<|endoftext|>",
|