mferraretto
commited on
Commit
•
9da888d
1
Parent(s):
0530f30
adding tokenizer
Browse files- README.md +5 -6
- tokenizer.json +0 -0
- tokenizer_config.json +2 -1
README.md
CHANGED
@@ -1,19 +1,18 @@
|
|
1 |
---
|
|
|
|
|
|
|
2 |
tags:
|
3 |
- sft
|
4 |
- it
|
5 |
- mistral
|
6 |
- chatml
|
7 |
- axolotl
|
|
|
|
|
8 |
model-index:
|
9 |
- name: maestrale-chat-v0.3-beta
|
10 |
results: []
|
11 |
-
license: cc-by-nc-4.0
|
12 |
-
language:
|
13 |
-
- it
|
14 |
-
prompt_template: >-
|
15 |
-
<|im_start|>system {system_message}<|im_end|> <|im_start|>user
|
16 |
-
{prompt}<|im_end|> <|im_start|>assistant
|
17 |
---
|
18 |
|
19 |
<div style="width: auto; margin-left: auto; margin-right: auto">
|
|
|
1 |
---
|
2 |
+
language:
|
3 |
+
- it
|
4 |
+
license: cc-by-nc-4.0
|
5 |
tags:
|
6 |
- sft
|
7 |
- it
|
8 |
- mistral
|
9 |
- chatml
|
10 |
- axolotl
|
11 |
+
prompt_template: <|im_start|>system {system_message}<|im_end|> <|im_start|>user {prompt}<|im_end|>
|
12 |
+
<|im_start|>assistant
|
13 |
model-index:
|
14 |
- name: maestrale-chat-v0.3-beta
|
15 |
results: []
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
---
|
17 |
|
18 |
<div style="width: auto; margin-left: auto; margin-right: auto">
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -32,7 +32,7 @@
|
|
32 |
"normalized": false,
|
33 |
"rstrip": false,
|
34 |
"single_word": false,
|
35 |
-
"special":
|
36 |
},
|
37 |
"32001": {
|
38 |
"content": "<|im_start|>",
|
@@ -45,6 +45,7 @@
|
|
45 |
},
|
46 |
"additional_special_tokens": [],
|
47 |
"bos_token": "<s>",
|
|
|
48 |
"clean_up_tokenization_spaces": false,
|
49 |
"eos_token": "<|im_end|>",
|
50 |
"legacy": true,
|
|
|
32 |
"normalized": false,
|
33 |
"rstrip": false,
|
34 |
"single_word": false,
|
35 |
+
"special": true
|
36 |
},
|
37 |
"32001": {
|
38 |
"content": "<|im_start|>",
|
|
|
45 |
},
|
46 |
"additional_special_tokens": [],
|
47 |
"bos_token": "<s>",
|
48 |
+
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
49 |
"clean_up_tokenization_spaces": false,
|
50 |
"eos_token": "<|im_end|>",
|
51 |
"legacy": true,
|