Upload 11 files
Browse files- config.json +28 -0
- generation_config.json +10 -0
- openvino_detokenizer.bin +3 -0
- openvino_detokenizer.xml +100 -0
- openvino_model.xml +0 -0
- openvino_tokenizer.bin +3 -0
- openvino_tokenizer.xml +290 -0
- special_tokens_map.json +23 -0
- tokenizer.json +0 -0
- tokenizer.model +3 -0
- tokenizer_config.json +43 -0
config.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "pucpr-br/Clinical-BR-LlaMA-2-7B",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 1,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 4096,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 11008,
|
14 |
+
"max_position_embeddings": 4096,
|
15 |
+
"mlp_bias": false,
|
16 |
+
"model_type": "llama",
|
17 |
+
"num_attention_heads": 32,
|
18 |
+
"num_hidden_layers": 32,
|
19 |
+
"num_key_value_heads": 32,
|
20 |
+
"pretraining_tp": 1,
|
21 |
+
"rms_norm_eps": 1e-05,
|
22 |
+
"rope_scaling": null,
|
23 |
+
"rope_theta": 10000.0,
|
24 |
+
"tie_word_embeddings": false,
|
25 |
+
"transformers_version": "4.43.4",
|
26 |
+
"use_cache": true,
|
27 |
+
"vocab_size": 32000
|
28 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token_id": 1,
|
3 |
+
"do_sample": true,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"max_length": 4096,
|
6 |
+
"pad_token_id": 0,
|
7 |
+
"temperature": 0.6,
|
8 |
+
"top_p": 0.9,
|
9 |
+
"transformers_version": "4.43.4"
|
10 |
+
}
|
openvino_detokenizer.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9556d0a1f310629e217450ac4198c49f5457f1a69e22ce7c9f8e81fab4d530a7
|
3 |
+
size 499723
|
openvino_detokenizer.xml
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0"?>
|
2 |
+
<net name="detokenizer" version="11">
|
3 |
+
<layers>
|
4 |
+
<layer id="0" name="Parameter_168192" type="Parameter" version="opset1">
|
5 |
+
<data shape="?,?" element_type="i64" />
|
6 |
+
<output>
|
7 |
+
<port id="0" precision="I64" names="Parameter_168192">
|
8 |
+
<dim>-1</dim>
|
9 |
+
<dim>-1</dim>
|
10 |
+
</port>
|
11 |
+
</output>
|
12 |
+
</layer>
|
13 |
+
<layer id="1" name="Constant_168168" type="Const" version="opset1">
|
14 |
+
<data element_type="u8" shape="499723" offset="0" size="499723" />
|
15 |
+
<output>
|
16 |
+
<port id="0" precision="U8">
|
17 |
+
<dim>499723</dim>
|
18 |
+
</port>
|
19 |
+
</output>
|
20 |
+
</layer>
|
21 |
+
<layer id="2" name="Convert_168202" type="Convert" version="opset1">
|
22 |
+
<data destination_type="i32" />
|
23 |
+
<input>
|
24 |
+
<port id="0" precision="I64">
|
25 |
+
<dim>-1</dim>
|
26 |
+
<dim>-1</dim>
|
27 |
+
</port>
|
28 |
+
</input>
|
29 |
+
<output>
|
30 |
+
<port id="1" precision="I32">
|
31 |
+
<dim>-1</dim>
|
32 |
+
<dim>-1</dim>
|
33 |
+
</port>
|
34 |
+
</output>
|
35 |
+
</layer>
|
36 |
+
<layer id="3" name="SentencepieceDetokenizer_168193" type="SentencepieceDetokenizer" version="extension">
|
37 |
+
<input>
|
38 |
+
<port id="0" precision="U8">
|
39 |
+
<dim>499723</dim>
|
40 |
+
</port>
|
41 |
+
<port id="1" precision="I32">
|
42 |
+
<dim>-1</dim>
|
43 |
+
<dim>-1</dim>
|
44 |
+
</port>
|
45 |
+
</input>
|
46 |
+
<output>
|
47 |
+
<port id="2" precision="I32">
|
48 |
+
<dim>-1</dim>
|
49 |
+
</port>
|
50 |
+
<port id="3" precision="I32">
|
51 |
+
<dim>-1</dim>
|
52 |
+
</port>
|
53 |
+
<port id="4" precision="U8">
|
54 |
+
<dim>-1</dim>
|
55 |
+
</port>
|
56 |
+
</output>
|
57 |
+
</layer>
|
58 |
+
<layer id="4" name="StringTensorPack_168194" type="StringTensorPack" version="extension">
|
59 |
+
<data mode="begins_ends" />
|
60 |
+
<input>
|
61 |
+
<port id="0" precision="I32">
|
62 |
+
<dim>-1</dim>
|
63 |
+
</port>
|
64 |
+
<port id="1" precision="I32">
|
65 |
+
<dim>-1</dim>
|
66 |
+
</port>
|
67 |
+
<port id="2" precision="U8">
|
68 |
+
<dim>-1</dim>
|
69 |
+
</port>
|
70 |
+
</input>
|
71 |
+
<output>
|
72 |
+
<port id="3" precision="STRING" names="string_output">
|
73 |
+
<dim>-1</dim>
|
74 |
+
</port>
|
75 |
+
</output>
|
76 |
+
</layer>
|
77 |
+
<layer id="5" name="Result_168195" type="Result" version="opset1">
|
78 |
+
<input>
|
79 |
+
<port id="0" precision="STRING">
|
80 |
+
<dim>-1</dim>
|
81 |
+
</port>
|
82 |
+
</input>
|
83 |
+
</layer>
|
84 |
+
</layers>
|
85 |
+
<edges>
|
86 |
+
<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
|
87 |
+
<edge from-layer="1" from-port="0" to-layer="3" to-port="0" />
|
88 |
+
<edge from-layer="2" from-port="1" to-layer="3" to-port="1" />
|
89 |
+
<edge from-layer="3" from-port="2" to-layer="4" to-port="0" />
|
90 |
+
<edge from-layer="3" from-port="3" to-layer="4" to-port="1" />
|
91 |
+
<edge from-layer="3" from-port="4" to-layer="4" to-port="2" />
|
92 |
+
<edge from-layer="4" from-port="3" to-layer="5" to-port="0" />
|
93 |
+
</edges>
|
94 |
+
<rt_info>
|
95 |
+
<bos_token_id value="1" />
|
96 |
+
<chat_template value="{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif false == true and not '<<SYS>>' in messages[0]['content'] %}{% set loop_messages = messages %}{% set system_message = 'You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don\'t know the answer to a question, please don\'t share false information.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<<SYS>>\n' + system_message + '\n<</SYS>>\n\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ bos_token + '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'system' %}{{ '<<SYS>>\n' + content.strip() + '\n<</SYS>>\n\n' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}" />
|
97 |
+
<eos_token_id value="2" />
|
98 |
+
<original_tokenizer_class value="<class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
|
99 |
+
</rt_info>
|
100 |
+
</net>
|
openvino_model.xml
ADDED
The diff for this file is too large to render.
See raw diff
|
|
openvino_tokenizer.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:369ba2ee8df48ddf624f97a6a689b5453eddb26f79476fa104324dbe813908e6
|
3 |
+
size 499739
|
openvino_tokenizer.xml
ADDED
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0"?>
|
2 |
+
<net name="tokenizer" version="11">
|
3 |
+
<layers>
|
4 |
+
<layer id="0" name="string_input" type="Parameter" version="opset1">
|
5 |
+
<data shape="?" element_type="string" />
|
6 |
+
<output>
|
7 |
+
<port id="0" precision="STRING" names="string_input">
|
8 |
+
<dim>-1</dim>
|
9 |
+
</port>
|
10 |
+
</output>
|
11 |
+
</layer>
|
12 |
+
<layer id="1" name="Constant_168171" type="Const" version="opset1">
|
13 |
+
<data element_type="i32" shape="" offset="0" size="4" />
|
14 |
+
<output>
|
15 |
+
<port id="0" precision="I32" />
|
16 |
+
</output>
|
17 |
+
</layer>
|
18 |
+
<layer id="2" name="Constant_168167" type="Const" version="opset1">
|
19 |
+
<data element_type="u8" shape="499723" offset="4" size="499723" />
|
20 |
+
<output>
|
21 |
+
<port id="0" precision="U8">
|
22 |
+
<dim>499723</dim>
|
23 |
+
</port>
|
24 |
+
</output>
|
25 |
+
</layer>
|
26 |
+
<layer id="3" name="SentencepieceTokenizer_168170" type="SentencepieceTokenizer" version="extension">
|
27 |
+
<data nbest_size="0" alpha="0" add_bos="true" add_eos="false" reverse="true" />
|
28 |
+
<input>
|
29 |
+
<port id="0" precision="U8">
|
30 |
+
<dim>499723</dim>
|
31 |
+
</port>
|
32 |
+
<port id="1" precision="STRING">
|
33 |
+
<dim>-1</dim>
|
34 |
+
</port>
|
35 |
+
</input>
|
36 |
+
<output>
|
37 |
+
<port id="2" precision="I64">
|
38 |
+
<dim>-1</dim>
|
39 |
+
<dim>2</dim>
|
40 |
+
</port>
|
41 |
+
<port id="3" precision="I32">
|
42 |
+
<dim>-1</dim>
|
43 |
+
</port>
|
44 |
+
<port id="4" precision="I64">
|
45 |
+
<dim>2</dim>
|
46 |
+
</port>
|
47 |
+
</output>
|
48 |
+
</layer>
|
49 |
+
<layer id="4" name="Broadcast_168172" type="Broadcast" version="opset3">
|
50 |
+
<data mode="numpy" />
|
51 |
+
<input>
|
52 |
+
<port id="0" precision="I32" />
|
53 |
+
<port id="1" precision="I64">
|
54 |
+
<dim>2</dim>
|
55 |
+
</port>
|
56 |
+
</input>
|
57 |
+
<output>
|
58 |
+
<port id="2" precision="I32">
|
59 |
+
<dim>-1</dim>
|
60 |
+
<dim>-1</dim>
|
61 |
+
</port>
|
62 |
+
</output>
|
63 |
+
</layer>
|
64 |
+
<layer id="5" name="Constant_168173" type="Const" version="opset1">
|
65 |
+
<data element_type="i32" shape="" offset="499727" size="4" />
|
66 |
+
<output>
|
67 |
+
<port id="0" precision="I32" />
|
68 |
+
</output>
|
69 |
+
</layer>
|
70 |
+
<layer id="6" name="ShapeOf_168174" type="ShapeOf" version="opset3">
|
71 |
+
<data output_type="i64" />
|
72 |
+
<input>
|
73 |
+
<port id="0" precision="I32">
|
74 |
+
<dim>-1</dim>
|
75 |
+
</port>
|
76 |
+
</input>
|
77 |
+
<output>
|
78 |
+
<port id="1" precision="I64">
|
79 |
+
<dim>1</dim>
|
80 |
+
</port>
|
81 |
+
</output>
|
82 |
+
</layer>
|
83 |
+
<layer id="7" name="Broadcast_168175" type="Broadcast" version="opset3">
|
84 |
+
<data mode="numpy" />
|
85 |
+
<input>
|
86 |
+
<port id="0" precision="I32" />
|
87 |
+
<port id="1" precision="I64">
|
88 |
+
<dim>1</dim>
|
89 |
+
</port>
|
90 |
+
</input>
|
91 |
+
<output>
|
92 |
+
<port id="2" precision="I32">
|
93 |
+
<dim>-1</dim>
|
94 |
+
</port>
|
95 |
+
</output>
|
96 |
+
</layer>
|
97 |
+
<layer id="8" name="ScatterNDUpdate_168179" type="ScatterNDUpdate" version="opset4">
|
98 |
+
<input>
|
99 |
+
<port id="0" precision="I32">
|
100 |
+
<dim>-1</dim>
|
101 |
+
<dim>-1</dim>
|
102 |
+
</port>
|
103 |
+
<port id="1" precision="I64">
|
104 |
+
<dim>-1</dim>
|
105 |
+
<dim>2</dim>
|
106 |
+
</port>
|
107 |
+
<port id="2" precision="I32">
|
108 |
+
<dim>-1</dim>
|
109 |
+
</port>
|
110 |
+
</input>
|
111 |
+
<output>
|
112 |
+
<port id="3" precision="I32">
|
113 |
+
<dim>-1</dim>
|
114 |
+
<dim>-1</dim>
|
115 |
+
</port>
|
116 |
+
</output>
|
117 |
+
</layer>
|
118 |
+
<layer id="9" name="Constant_168183" type="Const" version="opset1">
|
119 |
+
<data element_type="i64" shape="1" offset="499731" size="8" />
|
120 |
+
<output>
|
121 |
+
<port id="0" precision="I64">
|
122 |
+
<dim>1</dim>
|
123 |
+
</port>
|
124 |
+
</output>
|
125 |
+
</layer>
|
126 |
+
<layer id="10" name="Reverse_168184" type="Reverse" version="opset1">
|
127 |
+
<data mode="index" />
|
128 |
+
<input>
|
129 |
+
<port id="0" precision="I32">
|
130 |
+
<dim>-1</dim>
|
131 |
+
<dim>-1</dim>
|
132 |
+
</port>
|
133 |
+
<port id="1" precision="I64">
|
134 |
+
<dim>1</dim>
|
135 |
+
</port>
|
136 |
+
</input>
|
137 |
+
<output>
|
138 |
+
<port id="2" precision="I32">
|
139 |
+
<dim>-1</dim>
|
140 |
+
<dim>-1</dim>
|
141 |
+
</port>
|
142 |
+
</output>
|
143 |
+
</layer>
|
144 |
+
<layer id="11" name="Reverse_168184" type="Convert" version="opset1">
|
145 |
+
<data destination_type="i64" />
|
146 |
+
<input>
|
147 |
+
<port id="0" precision="I32">
|
148 |
+
<dim>-1</dim>
|
149 |
+
<dim>-1</dim>
|
150 |
+
</port>
|
151 |
+
</input>
|
152 |
+
<output>
|
153 |
+
<port id="1" precision="I64" names="attention_mask">
|
154 |
+
<dim>-1</dim>
|
155 |
+
<dim>-1</dim>
|
156 |
+
</port>
|
157 |
+
</output>
|
158 |
+
</layer>
|
159 |
+
<layer id="13" name="Constant_168180" type="Const" version="opset1">
|
160 |
+
<data element_type="i32" shape="" offset="0" size="4" />
|
161 |
+
<output>
|
162 |
+
<port id="0" precision="I32" />
|
163 |
+
</output>
|
164 |
+
</layer>
|
165 |
+
<layer id="14" name="Broadcast_168181" type="Broadcast" version="opset3">
|
166 |
+
<data mode="bidirectional" />
|
167 |
+
<input>
|
168 |
+
<port id="0" precision="I32" />
|
169 |
+
<port id="1" precision="I64">
|
170 |
+
<dim>2</dim>
|
171 |
+
</port>
|
172 |
+
</input>
|
173 |
+
<output>
|
174 |
+
<port id="2" precision="I32">
|
175 |
+
<dim>-1</dim>
|
176 |
+
<dim>-1</dim>
|
177 |
+
</port>
|
178 |
+
</output>
|
179 |
+
</layer>
|
180 |
+
<layer id="15" name="ScatterNDUpdate_168182" type="ScatterNDUpdate" version="opset4">
|
181 |
+
<input>
|
182 |
+
<port id="0" precision="I32">
|
183 |
+
<dim>-1</dim>
|
184 |
+
<dim>-1</dim>
|
185 |
+
</port>
|
186 |
+
<port id="1" precision="I64">
|
187 |
+
<dim>-1</dim>
|
188 |
+
<dim>2</dim>
|
189 |
+
</port>
|
190 |
+
<port id="2" precision="I32">
|
191 |
+
<dim>-1</dim>
|
192 |
+
</port>
|
193 |
+
</input>
|
194 |
+
<output>
|
195 |
+
<port id="3" precision="I32">
|
196 |
+
<dim>-1</dim>
|
197 |
+
<dim>-1</dim>
|
198 |
+
</port>
|
199 |
+
</output>
|
200 |
+
</layer>
|
201 |
+
<layer id="16" name="Constant_168185" type="Const" version="opset1">
|
202 |
+
<data element_type="i64" shape="1" offset="499731" size="8" />
|
203 |
+
<output>
|
204 |
+
<port id="0" precision="I64">
|
205 |
+
<dim>1</dim>
|
206 |
+
</port>
|
207 |
+
</output>
|
208 |
+
</layer>
|
209 |
+
<layer id="17" name="Reverse_168186" type="Reverse" version="opset1">
|
210 |
+
<data mode="index" />
|
211 |
+
<input>
|
212 |
+
<port id="0" precision="I32">
|
213 |
+
<dim>-1</dim>
|
214 |
+
<dim>-1</dim>
|
215 |
+
</port>
|
216 |
+
<port id="1" precision="I64">
|
217 |
+
<dim>1</dim>
|
218 |
+
</port>
|
219 |
+
</input>
|
220 |
+
<output>
|
221 |
+
<port id="2" precision="I32">
|
222 |
+
<dim>-1</dim>
|
223 |
+
<dim>-1</dim>
|
224 |
+
</port>
|
225 |
+
</output>
|
226 |
+
</layer>
|
227 |
+
<layer id="18" name="Reverse_168186" type="Convert" version="opset1">
|
228 |
+
<data destination_type="i64" />
|
229 |
+
<input>
|
230 |
+
<port id="0" precision="I32">
|
231 |
+
<dim>-1</dim>
|
232 |
+
<dim>-1</dim>
|
233 |
+
</port>
|
234 |
+
</input>
|
235 |
+
<output>
|
236 |
+
<port id="1" precision="I64" names="input_ids">
|
237 |
+
<dim>-1</dim>
|
238 |
+
<dim>-1</dim>
|
239 |
+
</port>
|
240 |
+
</output>
|
241 |
+
</layer>
|
242 |
+
<layer id="19" name="Result_168187" type="Result" version="opset1">
|
243 |
+
<input>
|
244 |
+
<port id="0" precision="I64">
|
245 |
+
<dim>-1</dim>
|
246 |
+
<dim>-1</dim>
|
247 |
+
</port>
|
248 |
+
</input>
|
249 |
+
</layer>
|
250 |
+
<layer id="12" name="Result_168188" type="Result" version="opset1">
|
251 |
+
<input>
|
252 |
+
<port id="0" precision="I64">
|
253 |
+
<dim>-1</dim>
|
254 |
+
<dim>-1</dim>
|
255 |
+
</port>
|
256 |
+
</input>
|
257 |
+
</layer>
|
258 |
+
</layers>
|
259 |
+
<edges>
|
260 |
+
<edge from-layer="0" from-port="0" to-layer="3" to-port="1" />
|
261 |
+
<edge from-layer="1" from-port="0" to-layer="4" to-port="0" />
|
262 |
+
<edge from-layer="2" from-port="0" to-layer="3" to-port="0" />
|
263 |
+
<edge from-layer="3" from-port="4" to-layer="4" to-port="1" />
|
264 |
+
<edge from-layer="3" from-port="3" to-layer="6" to-port="0" />
|
265 |
+
<edge from-layer="3" from-port="2" to-layer="8" to-port="1" />
|
266 |
+
<edge from-layer="3" from-port="3" to-layer="15" to-port="2" />
|
267 |
+
<edge from-layer="3" from-port="2" to-layer="15" to-port="1" />
|
268 |
+
<edge from-layer="3" from-port="4" to-layer="14" to-port="1" />
|
269 |
+
<edge from-layer="4" from-port="2" to-layer="8" to-port="0" />
|
270 |
+
<edge from-layer="5" from-port="0" to-layer="7" to-port="0" />
|
271 |
+
<edge from-layer="6" from-port="1" to-layer="7" to-port="1" />
|
272 |
+
<edge from-layer="7" from-port="2" to-layer="8" to-port="2" />
|
273 |
+
<edge from-layer="8" from-port="3" to-layer="10" to-port="0" />
|
274 |
+
<edge from-layer="9" from-port="0" to-layer="10" to-port="1" />
|
275 |
+
<edge from-layer="10" from-port="2" to-layer="11" to-port="0" />
|
276 |
+
<edge from-layer="11" from-port="1" to-layer="12" to-port="0" />
|
277 |
+
<edge from-layer="13" from-port="0" to-layer="14" to-port="0" />
|
278 |
+
<edge from-layer="14" from-port="2" to-layer="15" to-port="0" />
|
279 |
+
<edge from-layer="15" from-port="3" to-layer="17" to-port="0" />
|
280 |
+
<edge from-layer="16" from-port="0" to-layer="17" to-port="1" />
|
281 |
+
<edge from-layer="17" from-port="2" to-layer="18" to-port="0" />
|
282 |
+
<edge from-layer="18" from-port="1" to-layer="19" to-port="0" />
|
283 |
+
</edges>
|
284 |
+
<rt_info>
|
285 |
+
<bos_token_id value="1" />
|
286 |
+
<chat_template value="{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif false == true and not '<<SYS>>' in messages[0]['content'] %}{% set loop_messages = messages %}{% set system_message = 'You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don\'t know the answer to a question, please don\'t share false information.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<<SYS>>\n' + system_message + '\n<</SYS>>\n\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ bos_token + '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'system' %}{{ '<<SYS>>\n' + content.strip() + '\n<</SYS>>\n\n' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}" />
|
287 |
+
<eos_token_id value="2" />
|
288 |
+
<original_tokenizer_class value="<class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
|
289 |
+
</rt_info>
|
290 |
+
</net>
|
special_tokens_map.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"unk_token": {
|
17 |
+
"content": "<unk>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
}
|
23 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
tokenizer_config.json
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "</s>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
}
|
30 |
+
},
|
31 |
+
"bos_token": "<s>",
|
32 |
+
"clean_up_tokenization_spaces": false,
|
33 |
+
"eos_token": "</s>",
|
34 |
+
"legacy": false,
|
35 |
+
"model_max_length": 1000000000000000019884624838656,
|
36 |
+
"pad_token": null,
|
37 |
+
"padding_side": "left",
|
38 |
+
"sp_model_kwargs": {},
|
39 |
+
"split_special_tokens": false,
|
40 |
+
"tokenizer_class": "LlamaTokenizer",
|
41 |
+
"unk_token": "<unk>",
|
42 |
+
"use_default_system_prompt": false
|
43 |
+
}
|