doberst commited on
Commit
2e76519
·
verified ·
1 Parent(s): 46fdb0f

Upload 11 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,3 +1,18 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ This is the [Deepseek-R1-Distill-Qwen-7B](https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B) model, convert to OpenVINO with symmetric channel-wise INT4 weight compression.
2
+
3
+ To run inference on this model, install openvino-genai (`pip install openvino-genai`) and run [llm_chat_deepseek.py(https://gist.github.com/helena-intel/554fba91f380df590ecc9245abdad33f)
4
+
5
+ Step-by-step instructions for best results:
6
+
7
+ ```
8
+ pip install --pre --upgrade openvino openvino-genai openvino-tokenizers --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
9
+ pip install huggingface-hub
10
+ huggingface-cli download helenai/DeepSeek-R1-Distill-Qwen-7B-ov-int4 --local-dir DeepSeek-R1-Distill-Qwen-7B-ov-int4
11
+ curl -O https://gist.githubusercontent.com/helena-intel/554fba91f380df590ecc9245abdad33f/raw/04f495164482823aa7e6ba1119a5c43e275d08f5/llm_chat_deepseek.py
12
+ python llm_chat_deepseek.py DeepSeek-R1-Distill-Qwen-7B-ov-int4 GPU
13
+ ```
14
+
15
+ > [!NOTE]
16
+ > The last line specifies the device to run inference. GPU is recommended for recent Intel laptops with integrated graphics, or for Intel discrete graphics. Change to CPU if you do not have an Intel GPU, or to NPU if you have a system with an Intel NPU
17
+
18
+ Gradio chatbot notebook using this model: https://gist.github.com/helena-intel/69e1c2921a2bcb618fdd7cdfb0bd0202
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151643,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 131072,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 10000,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.45.0",
26
+ "use_cache": true,
27
+ "use_mrope": false,
28
+ "use_sliding_window": false,
29
+ "vocab_size": 152064
30
+ }
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151646,
4
+ "do_sample": true,
5
+ "eos_token_id": 151643,
6
+ "temperature": 0.6,
7
+ "top_p": 0.95,
8
+ "transformers_version": "4.45.0"
9
+ }
openvino_detokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef1d389f62ba5765eee09bdf191f498e1f1a4390c1aae06939298f0c84dba539
3
+ size 2189649
openvino_detokenizer.xml ADDED
@@ -0,0 +1,281 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="detokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_68548" type="Parameter" version="opset1">
5
+ <data shape="?,?" element_type="i64" />
6
+ <output>
7
+ <port id="0" precision="I64" names="Parameter_68548">
8
+ <dim>-1</dim>
9
+ <dim>-1</dim>
10
+ </port>
11
+ </output>
12
+ </layer>
13
+ <layer id="1" name="Convert_68570" type="Convert" version="opset1">
14
+ <data destination_type="i32" />
15
+ <input>
16
+ <port id="0" precision="I64">
17
+ <dim>-1</dim>
18
+ <dim>-1</dim>
19
+ </port>
20
+ </input>
21
+ <output>
22
+ <port id="1" precision="I32">
23
+ <dim>-1</dim>
24
+ <dim>-1</dim>
25
+ </port>
26
+ </output>
27
+ </layer>
28
+ <layer id="2" name="Constant_68550" type="Const" version="opset1">
29
+ <data element_type="i32" shape="151665" offset="0" size="606660" />
30
+ <output>
31
+ <port id="0" precision="I32">
32
+ <dim>151665</dim>
33
+ </port>
34
+ </output>
35
+ </layer>
36
+ <layer id="3" name="Constant_68552" type="Const" version="opset1">
37
+ <data element_type="i32" shape="151665" offset="606660" size="606660" />
38
+ <output>
39
+ <port id="0" precision="I32">
40
+ <dim>151665</dim>
41
+ </port>
42
+ </output>
43
+ </layer>
44
+ <layer id="4" name="Constant_68554" type="Const" version="opset1">
45
+ <data element_type="u8" shape="976273" offset="1213320" size="976273" />
46
+ <output>
47
+ <port id="0" precision="U8">
48
+ <dim>976273</dim>
49
+ </port>
50
+ </output>
51
+ </layer>
52
+ <layer id="5" name="Constant_68558" type="Const" version="opset1">
53
+ <data element_type="i32" shape="9" offset="2189593" size="36" />
54
+ <output>
55
+ <port id="0" precision="I32">
56
+ <dim>9</dim>
57
+ </port>
58
+ </output>
59
+ </layer>
60
+ <layer id="6" name="Constant_68556" type="Const" version="opset1">
61
+ <data element_type="i32" shape="1" offset="2189629" size="4" />
62
+ <output>
63
+ <port id="0" precision="I32">
64
+ <dim>1</dim>
65
+ </port>
66
+ </output>
67
+ </layer>
68
+ <layer id="7" name="Constant_68555" type="Const" version="opset1">
69
+ <data element_type="i32" shape="1" offset="2189633" size="4" />
70
+ <output>
71
+ <port id="0" precision="I32">
72
+ <dim>1</dim>
73
+ </port>
74
+ </output>
75
+ </layer>
76
+ <layer id="8" name="Constant_68557" type="Const" version="opset1">
77
+ <data element_type="i32" shape="1" offset="2189637" size="4" />
78
+ <output>
79
+ <port id="0" precision="I32">
80
+ <dim>1</dim>
81
+ </port>
82
+ </output>
83
+ </layer>
84
+ <layer id="9" name="Constant_68560" type="Const" version="opset1">
85
+ <data element_type="i64" shape="1" offset="2189641" size="8" />
86
+ <output>
87
+ <port id="0" precision="I64">
88
+ <dim>1</dim>
89
+ </port>
90
+ </output>
91
+ </layer>
92
+ <layer id="10" name="Slice_68559" type="Slice" version="opset8">
93
+ <input>
94
+ <port id="0" precision="I32">
95
+ <dim>9</dim>
96
+ </port>
97
+ <port id="1" precision="I32">
98
+ <dim>1</dim>
99
+ </port>
100
+ <port id="2" precision="I32">
101
+ <dim>1</dim>
102
+ </port>
103
+ <port id="3" precision="I32">
104
+ <dim>1</dim>
105
+ </port>
106
+ <port id="4" precision="I64">
107
+ <dim>1</dim>
108
+ </port>
109
+ </input>
110
+ <output>
111
+ <port id="5" precision="I32">
112
+ <dim>9</dim>
113
+ </port>
114
+ </output>
115
+ </layer>
116
+ <layer id="11" name="VocabDecoder_68561" type="VocabDecoder" version="extension">
117
+ <data skip_tokens="" />
118
+ <input>
119
+ <port id="0" precision="I32">
120
+ <dim>-1</dim>
121
+ <dim>-1</dim>
122
+ </port>
123
+ <port id="1" precision="I32">
124
+ <dim>151665</dim>
125
+ </port>
126
+ <port id="2" precision="I32">
127
+ <dim>151665</dim>
128
+ </port>
129
+ <port id="3" precision="U8">
130
+ <dim>976273</dim>
131
+ </port>
132
+ <port id="4" precision="I32">
133
+ <dim>9</dim>
134
+ </port>
135
+ </input>
136
+ <output>
137
+ <port id="5" precision="I32">
138
+ <dim>-1</dim>
139
+ </port>
140
+ <port id="6" precision="I32">
141
+ <dim>-1</dim>
142
+ </port>
143
+ <port id="7" precision="I32">
144
+ <dim>-1</dim>
145
+ </port>
146
+ <port id="8" precision="I32">
147
+ <dim>-1</dim>
148
+ </port>
149
+ <port id="9" precision="U8">
150
+ <dim>-1</dim>
151
+ </port>
152
+ </output>
153
+ </layer>
154
+ <layer id="12" name="FuzeRagged_68562" type="FuzeRagged" version="extension">
155
+ <input>
156
+ <port id="0" precision="I32">
157
+ <dim>-1</dim>
158
+ </port>
159
+ <port id="1" precision="I32">
160
+ <dim>-1</dim>
161
+ </port>
162
+ <port id="2" precision="I32">
163
+ <dim>-1</dim>
164
+ </port>
165
+ <port id="3" precision="I32">
166
+ <dim>-1</dim>
167
+ </port>
168
+ </input>
169
+ <output>
170
+ <port id="4" precision="I32">
171
+ <dim>-1</dim>
172
+ </port>
173
+ <port id="5" precision="I32">
174
+ <dim>-1</dim>
175
+ </port>
176
+ </output>
177
+ </layer>
178
+ <layer id="13" name="UTF8Validate_68563" type="UTF8Validate" version="extension">
179
+ <data replace_mode="true" />
180
+ <input>
181
+ <port id="0" precision="I32">
182
+ <dim>-1</dim>
183
+ </port>
184
+ <port id="1" precision="I32">
185
+ <dim>-1</dim>
186
+ </port>
187
+ <port id="2" precision="U8">
188
+ <dim>-1</dim>
189
+ </port>
190
+ </input>
191
+ <output>
192
+ <port id="3" precision="I32">
193
+ <dim>-1</dim>
194
+ </port>
195
+ <port id="4" precision="I32">
196
+ <dim>-1</dim>
197
+ </port>
198
+ <port id="5" precision="U8">
199
+ <dim>-1</dim>
200
+ </port>
201
+ </output>
202
+ </layer>
203
+ <layer id="14" name="StringTensorPack_68564" type="StringTensorPack" version="opset15">
204
+ <input>
205
+ <port id="0" precision="I32">
206
+ <dim>-1</dim>
207
+ </port>
208
+ <port id="1" precision="I32">
209
+ <dim>-1</dim>
210
+ </port>
211
+ <port id="2" precision="U8">
212
+ <dim>-1</dim>
213
+ </port>
214
+ </input>
215
+ <output>
216
+ <port id="3" precision="STRING" names="string_output">
217
+ <dim>-1</dim>
218
+ </port>
219
+ </output>
220
+ </layer>
221
+ <layer id="15" name="Result_68565" type="Result" version="opset1">
222
+ <input>
223
+ <port id="0" precision="STRING">
224
+ <dim>-1</dim>
225
+ </port>
226
+ </input>
227
+ </layer>
228
+ </layers>
229
+ <edges>
230
+ <edge from-layer="0" from-port="0" to-layer="1" to-port="0" />
231
+ <edge from-layer="1" from-port="1" to-layer="11" to-port="0" />
232
+ <edge from-layer="2" from-port="0" to-layer="11" to-port="1" />
233
+ <edge from-layer="3" from-port="0" to-layer="11" to-port="2" />
234
+ <edge from-layer="4" from-port="0" to-layer="11" to-port="3" />
235
+ <edge from-layer="5" from-port="0" to-layer="10" to-port="0" />
236
+ <edge from-layer="6" from-port="0" to-layer="10" to-port="1" />
237
+ <edge from-layer="7" from-port="0" to-layer="10" to-port="2" />
238
+ <edge from-layer="8" from-port="0" to-layer="10" to-port="3" />
239
+ <edge from-layer="9" from-port="0" to-layer="10" to-port="4" />
240
+ <edge from-layer="10" from-port="5" to-layer="11" to-port="4" />
241
+ <edge from-layer="11" from-port="5" to-layer="12" to-port="0" />
242
+ <edge from-layer="11" from-port="9" to-layer="13" to-port="2" />
243
+ <edge from-layer="11" from-port="8" to-layer="12" to-port="3" />
244
+ <edge from-layer="11" from-port="7" to-layer="12" to-port="2" />
245
+ <edge from-layer="11" from-port="6" to-layer="12" to-port="1" />
246
+ <edge from-layer="12" from-port="4" to-layer="13" to-port="0" />
247
+ <edge from-layer="12" from-port="5" to-layer="13" to-port="1" />
248
+ <edge from-layer="13" from-port="3" to-layer="14" to-port="0" />
249
+ <edge from-layer="13" from-port="4" to-layer="14" to-port="1" />
250
+ <edge from-layer="13" from-port="5" to-layer="14" to-port="2" />
251
+ <edge from-layer="14" from-port="3" to-layer="15" to-port="0" />
252
+ </edges>
253
+ <rt_info>
254
+ <add_attention_mask value="True" />
255
+ <add_prefix_space />
256
+ <add_special_tokens value="True" />
257
+ <bos_token_id value="151646" />
258
+ <chat_template value="{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'&lt;|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'&lt;|Assistant|>&lt;|tool▁calls▁begin|>&lt;|tool▁call▁begin|>' + tool['type'] + '&lt;|tool▁sep|>' + tool['function']['name'] + '\n' + '```json' + '\n' + tool['function']['arguments'] + '\n' + '```' + '&lt;|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\n' + '&lt;|tool▁call▁begin|>' + tool['type'] + '&lt;|tool▁sep|>' + tool['function']['name'] + '\n' + '```json' + '\n' + tool['function']['arguments'] + '\n' + '```' + '&lt;|tool▁call▁end|>'}}{{'&lt;|tool▁calls▁end|>&lt;|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'&lt;|tool▁outputs▁end|>' + message['content'] + '&lt;|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '&lt;/think>' in content %}{% set content = content.split('&lt;/think>')[-1] %}{% endif %}{{'&lt;|Assistant|>' + content + '&lt;|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'&lt;|tool▁outputs▁begin|>&lt;|tool▁output▁begin|>' + message['content'] + '&lt;|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\n&lt;|tool▁output▁begin|>' + message['content'] + '&lt;|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'&lt;|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'&lt;|Assistant|>&lt;think>\n'}}{% endif %}" />
259
+ <clean_up_tokenization_spaces />
260
+ <detokenizer_input_type value="i64" />
261
+ <eos_token_id value="151643" />
262
+ <handle_special_tokens_with_re />
263
+ <max_length />
264
+ <number_of_inputs value="1" />
265
+ <openvino_tokenizers_version value="2025.1.0.0-501-330cb574aec" />
266
+ <openvino_version value="2025.1.0-18119-f124b593312" />
267
+ <original_tokenizer_class value="&lt;class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
268
+ <pad_token_id value="151643" />
269
+ <sentencepiece_version value="0.2.0" />
270
+ <skip_special_tokens value="True" />
271
+ <streaming_detokenizer value="False" />
272
+ <tiktoken_version value="0.8.0" />
273
+ <tokenizer_output_type value="i64" />
274
+ <tokenizers_version value="0.20.1" />
275
+ <transformers_version value="4.45.0" />
276
+ <use_max_padding value="False" />
277
+ <use_sentencepiece_backend value="False" />
278
+ <utf8_replace_mode value="replace" />
279
+ <with_detokenizer value="True" />
280
+ </rt_info>
281
+ </net>
openvino_model.xml ADDED
The diff for this file is too large to render. See raw diff
 
openvino_tokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b8a11e94ca0a95c1ccdd78453dd85d806ad0cdaf404ede9885e606bee1a613c
3
+ size 5588655
openvino_tokenizer.xml ADDED
@@ -0,0 +1,797 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="tokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_68421" type="Parameter" version="opset1">
5
+ <data shape="?" element_type="string" />
6
+ <output>
7
+ <port id="0" precision="STRING" names="Parameter_68421">
8
+ <dim>-1</dim>
9
+ </port>
10
+ </output>
11
+ </layer>
12
+ <layer id="1" name="Constant_68532" type="Const" version="opset1">
13
+ <data element_type="i32" shape="" offset="0" size="4" />
14
+ <output>
15
+ <port id="0" precision="I32" />
16
+ </output>
17
+ </layer>
18
+ <layer id="2" name="Constant_68533" type="Const" version="opset1">
19
+ <data element_type="i32" shape="" offset="4" size="4" />
20
+ <output>
21
+ <port id="0" precision="I32" />
22
+ </output>
23
+ </layer>
24
+ <layer id="3" name="Constant_68534" type="Const" version="opset1">
25
+ <data element_type="i32" shape="1" offset="8" size="4" />
26
+ <output>
27
+ <port id="0" precision="I32">
28
+ <dim>1</dim>
29
+ </port>
30
+ </output>
31
+ </layer>
32
+ <layer id="4" name="Constant_68427" type="Const" version="opset1">
33
+ <data element_type="i64" shape="" offset="12" size="8" />
34
+ <output>
35
+ <port id="0" precision="I64" />
36
+ </output>
37
+ </layer>
38
+ <layer id="5" name="StringTensorUnpack_68422" type="StringTensorUnpack" version="opset15">
39
+ <input>
40
+ <port id="0" precision="STRING">
41
+ <dim>-1</dim>
42
+ </port>
43
+ </input>
44
+ <output>
45
+ <port id="1" precision="I32">
46
+ <dim>-1</dim>
47
+ </port>
48
+ <port id="2" precision="I32">
49
+ <dim>-1</dim>
50
+ </port>
51
+ <port id="3" precision="U8">
52
+ <dim>-1</dim>
53
+ </port>
54
+ </output>
55
+ </layer>
56
+ <layer id="6" name="ShapeOf_68423" type="ShapeOf" version="opset3">
57
+ <data output_type="i64" />
58
+ <input>
59
+ <port id="0" precision="I32">
60
+ <dim>-1</dim>
61
+ </port>
62
+ </input>
63
+ <output>
64
+ <port id="1" precision="I64">
65
+ <dim>1</dim>
66
+ </port>
67
+ </output>
68
+ </layer>
69
+ <layer id="7" name="Constant_68424" type="Const" version="opset1">
70
+ <data element_type="i64" shape="" offset="12" size="8" />
71
+ <output>
72
+ <port id="0" precision="I64" />
73
+ </output>
74
+ </layer>
75
+ <layer id="8" name="Constant_68425" type="Const" version="opset1">
76
+ <data element_type="i64" shape="" offset="12" size="8" />
77
+ <output>
78
+ <port id="0" precision="I64" />
79
+ </output>
80
+ </layer>
81
+ <layer id="9" name="Gather_68426" type="Gather" version="opset8">
82
+ <data batch_dims="0" />
83
+ <input>
84
+ <port id="0" precision="I64">
85
+ <dim>1</dim>
86
+ </port>
87
+ <port id="1" precision="I64" />
88
+ <port id="2" precision="I64" />
89
+ </input>
90
+ <output>
91
+ <port id="3" precision="I64" />
92
+ </output>
93
+ </layer>
94
+ <layer id="10" name="Constant_68428" type="Const" version="opset1">
95
+ <data element_type="i64" shape="" offset="20" size="8" />
96
+ <output>
97
+ <port id="0" precision="I64" />
98
+ </output>
99
+ </layer>
100
+ <layer id="11" name="Range_68429" type="Range" version="opset4">
101
+ <data output_type="i32" />
102
+ <input>
103
+ <port id="0" precision="I64" />
104
+ <port id="1" precision="I64" />
105
+ <port id="2" precision="I64" />
106
+ </input>
107
+ <output>
108
+ <port id="3" precision="I32">
109
+ <dim>-1</dim>
110
+ </port>
111
+ </output>
112
+ </layer>
113
+ <layer id="12" name="Constant_68430" type="Const" version="opset1">
114
+ <data element_type="i64" shape="" offset="20" size="8" />
115
+ <output>
116
+ <port id="0" precision="I64" />
117
+ </output>
118
+ </layer>
119
+ <layer id="13" name="Constant_68431" type="Const" version="opset1">
120
+ <data element_type="i64" shape="" offset="20" size="8" />
121
+ <output>
122
+ <port id="0" precision="I64" />
123
+ </output>
124
+ </layer>
125
+ <layer id="14" name="Add_68432" type="Add" version="opset1">
126
+ <data auto_broadcast="numpy" />
127
+ <input>
128
+ <port id="0" precision="I64" />
129
+ <port id="1" precision="I64" />
130
+ </input>
131
+ <output>
132
+ <port id="2" precision="I64" />
133
+ </output>
134
+ </layer>
135
+ <layer id="15" name="Constant_68433" type="Const" version="opset1">
136
+ <data element_type="i64" shape="" offset="20" size="8" />
137
+ <output>
138
+ <port id="0" precision="I64" />
139
+ </output>
140
+ </layer>
141
+ <layer id="16" name="Range_68434" type="Range" version="opset4">
142
+ <data output_type="i32" />
143
+ <input>
144
+ <port id="0" precision="I64" />
145
+ <port id="1" precision="I64" />
146
+ <port id="2" precision="I64" />
147
+ </input>
148
+ <output>
149
+ <port id="3" precision="I32">
150
+ <dim>-1</dim>
151
+ </port>
152
+ </output>
153
+ </layer>
154
+ <layer id="17" name="Constant_68496" type="Const" version="opset1">
155
+ <data element_type="u8" shape="443" offset="28" size="443" />
156
+ <output>
157
+ <port id="0" precision="U8">
158
+ <dim>443</dim>
159
+ </port>
160
+ </output>
161
+ </layer>
162
+ <layer id="18" name="SpecialTokensSplit_68497" type="SpecialTokensSplit" version="extension">
163
+ <input>
164
+ <port id="0" precision="I32">
165
+ <dim>-1</dim>
166
+ </port>
167
+ <port id="1" precision="I32">
168
+ <dim>-1</dim>
169
+ </port>
170
+ <port id="2" precision="I32">
171
+ <dim>-1</dim>
172
+ </port>
173
+ <port id="3" precision="I32">
174
+ <dim>-1</dim>
175
+ </port>
176
+ <port id="4" precision="U8">
177
+ <dim>-1</dim>
178
+ </port>
179
+ <port id="5" precision="U8">
180
+ <dim>443</dim>
181
+ </port>
182
+ </input>
183
+ <output>
184
+ <port id="6" precision="I32">
185
+ <dim>-1</dim>
186
+ </port>
187
+ <port id="7" precision="I32">
188
+ <dim>-1</dim>
189
+ </port>
190
+ <port id="8" precision="I32">
191
+ <dim>-1</dim>
192
+ </port>
193
+ <port id="9" precision="I32">
194
+ <dim>-1</dim>
195
+ </port>
196
+ <port id="10" precision="U8">
197
+ <dim>-1</dim>
198
+ </port>
199
+ <port id="11" precision="BOOL">
200
+ <dim>-1</dim>
201
+ </port>
202
+ </output>
203
+ </layer>
204
+ <layer id="19" name="CharsMapNormalization_68498" type="CharsMapNormalization" version="extension">
205
+ <data add_dummy_prefix="false" remove_extra_whitespaces="false" escape_whitespaces="false" normalization_form="nfc" case_fold="false" nmt="false" />
206
+ <input>
207
+ <port id="0" precision="I32">
208
+ <dim>-1</dim>
209
+ </port>
210
+ <port id="1" precision="I32">
211
+ <dim>-1</dim>
212
+ </port>
213
+ <port id="2" precision="U8">
214
+ <dim>-1</dim>
215
+ </port>
216
+ <port id="3" precision="BOOL">
217
+ <dim>-1</dim>
218
+ </port>
219
+ </input>
220
+ <output>
221
+ <port id="4" precision="I32">
222
+ <dim>-1</dim>
223
+ </port>
224
+ <port id="5" precision="I32">
225
+ <dim>-1</dim>
226
+ </port>
227
+ <port id="6" precision="U8">
228
+ <dim>-1</dim>
229
+ </port>
230
+ <port id="7" precision="BOOL">
231
+ <dim>-1</dim>
232
+ </port>
233
+ </output>
234
+ </layer>
235
+ <layer id="20" name="Constant_68500" type="Const" version="opset1">
236
+ <data element_type="u8" shape="110" offset="471" size="110" />
237
+ <output>
238
+ <port id="0" precision="U8">
239
+ <dim>110</dim>
240
+ </port>
241
+ </output>
242
+ </layer>
243
+ <layer id="21" name="RegexSplit_68501" type="RegexSplit" version="extension">
244
+ <data behaviour="isolate" invert="false" max_splits="-1" />
245
+ <input>
246
+ <port id="0" precision="I32">
247
+ <dim>-1</dim>
248
+ </port>
249
+ <port id="1" precision="I32">
250
+ <dim>-1</dim>
251
+ </port>
252
+ <port id="2" precision="I32">
253
+ <dim>-1</dim>
254
+ </port>
255
+ <port id="3" precision="I32">
256
+ <dim>-1</dim>
257
+ </port>
258
+ <port id="4" precision="U8">
259
+ <dim>-1</dim>
260
+ </port>
261
+ <port id="5" precision="BOOL">
262
+ <dim>-1</dim>
263
+ </port>
264
+ <port id="6" precision="U8">
265
+ <dim>110</dim>
266
+ </port>
267
+ </input>
268
+ <output>
269
+ <port id="7" precision="I32">
270
+ <dim>-1</dim>
271
+ </port>
272
+ <port id="8" precision="I32">
273
+ <dim>-1</dim>
274
+ </port>
275
+ <port id="9" precision="I32">
276
+ <dim>-1</dim>
277
+ </port>
278
+ <port id="10" precision="I32">
279
+ <dim>-1</dim>
280
+ </port>
281
+ <port id="11" precision="U8">
282
+ <dim>-1</dim>
283
+ </port>
284
+ <port id="12" precision="BOOL">
285
+ <dim>-1</dim>
286
+ </port>
287
+ </output>
288
+ </layer>
289
+ <layer id="22" name="Constant_68503" type="Const" version="opset1">
290
+ <data element_type="i32" shape="151665" offset="581" size="606660" />
291
+ <output>
292
+ <port id="0" precision="I32">
293
+ <dim>151665</dim>
294
+ </port>
295
+ </output>
296
+ </layer>
297
+ <layer id="23" name="Constant_68505" type="Const" version="opset1">
298
+ <data element_type="i32" shape="151665" offset="607241" size="606660" />
299
+ <output>
300
+ <port id="0" precision="I32">
301
+ <dim>151665</dim>
302
+ </port>
303
+ </output>
304
+ </layer>
305
+ <layer id="24" name="Constant_68507" type="Const" version="opset1">
306
+ <data element_type="u8" shape="976273" offset="1213901" size="976273" />
307
+ <output>
308
+ <port id="0" precision="U8">
309
+ <dim>976273</dim>
310
+ </port>
311
+ </output>
312
+ </layer>
313
+ <layer id="25" name="Constant_68515" type="Const" version="opset1">
314
+ <data element_type="i32" shape="151387" offset="2190174" size="605548" />
315
+ <output>
316
+ <port id="0" precision="I32">
317
+ <dim>151387</dim>
318
+ </port>
319
+ </output>
320
+ </layer>
321
+ <layer id="26" name="Constant_68517" type="Const" version="opset1">
322
+ <data element_type="i32" shape="151387" offset="2795722" size="605548" />
323
+ <output>
324
+ <port id="0" precision="I32">
325
+ <dim>151387</dim>
326
+ </port>
327
+ </output>
328
+ </layer>
329
+ <layer id="27" name="Constant_68519" type="Const" version="opset1">
330
+ <data element_type="u8" shape="491359" offset="3401270" size="491359" />
331
+ <output>
332
+ <port id="0" precision="U8">
333
+ <dim>491359</dim>
334
+ </port>
335
+ </output>
336
+ </layer>
337
+ <layer id="28" name="Constant_68521" type="Const" version="opset1">
338
+ <data element_type="i32" shape="151387" offset="3892629" size="605548" />
339
+ <output>
340
+ <port id="0" precision="I32">
341
+ <dim>151387</dim>
342
+ </port>
343
+ </output>
344
+ </layer>
345
+ <layer id="29" name="Constant_68523" type="Const" version="opset1">
346
+ <data element_type="i32" shape="151387" offset="4498177" size="605548" />
347
+ <output>
348
+ <port id="0" precision="I32">
349
+ <dim>151387</dim>
350
+ </port>
351
+ </output>
352
+ </layer>
353
+ <layer id="30" name="Constant_68525" type="Const" version="opset1">
354
+ <data element_type="u8" shape="484354" offset="5103725" size="484354" />
355
+ <output>
356
+ <port id="0" precision="U8">
357
+ <dim>484354</dim>
358
+ </port>
359
+ </output>
360
+ </layer>
361
+ <layer id="31" name="Constant_68509" type="Const" version="opset1">
362
+ <data element_type="i32" shape="22" offset="5588079" size="88" />
363
+ <output>
364
+ <port id="0" precision="I32">
365
+ <dim>22</dim>
366
+ </port>
367
+ </output>
368
+ </layer>
369
+ <layer id="32" name="Constant_68511" type="Const" version="opset1">
370
+ <data element_type="i32" shape="22" offset="5588167" size="88" />
371
+ <output>
372
+ <port id="0" precision="I32">
373
+ <dim>22</dim>
374
+ </port>
375
+ </output>
376
+ </layer>
377
+ <layer id="33" name="Constant_68513" type="Const" version="opset1">
378
+ <data element_type="u8" shape="304" offset="5588255" size="304" />
379
+ <output>
380
+ <port id="0" precision="U8">
381
+ <dim>304</dim>
382
+ </port>
383
+ </output>
384
+ </layer>
385
+ <layer id="34" name="Constant_68526" type="Const" version="opset1">
386
+ <data element_type="i32" shape="22" offset="5588559" size="88" />
387
+ <output>
388
+ <port id="0" precision="I32">
389
+ <dim>22</dim>
390
+ </port>
391
+ </output>
392
+ </layer>
393
+ <layer id="35" name="BPETokenizer_68527" type="BPETokenizer" version="extension">
394
+ <data unk_token="" fuse_unk="false" suffix_indicator="" end_suffix="" byte_fallback="false" cache_capacity="30328" />
395
+ <input>
396
+ <port id="0" precision="I32">
397
+ <dim>-1</dim>
398
+ </port>
399
+ <port id="1" precision="I32">
400
+ <dim>-1</dim>
401
+ </port>
402
+ <port id="2" precision="I32">
403
+ <dim>-1</dim>
404
+ </port>
405
+ <port id="3" precision="I32">
406
+ <dim>-1</dim>
407
+ </port>
408
+ <port id="4" precision="U8">
409
+ <dim>-1</dim>
410
+ </port>
411
+ <port id="5" precision="I32">
412
+ <dim>151665</dim>
413
+ </port>
414
+ <port id="6" precision="I32">
415
+ <dim>151665</dim>
416
+ </port>
417
+ <port id="7" precision="U8">
418
+ <dim>976273</dim>
419
+ </port>
420
+ <port id="8" precision="I32">
421
+ <dim>151387</dim>
422
+ </port>
423
+ <port id="9" precision="I32">
424
+ <dim>151387</dim>
425
+ </port>
426
+ <port id="10" precision="U8">
427
+ <dim>491359</dim>
428
+ </port>
429
+ <port id="11" precision="I32">
430
+ <dim>151387</dim>
431
+ </port>
432
+ <port id="12" precision="I32">
433
+ <dim>151387</dim>
434
+ </port>
435
+ <port id="13" precision="U8">
436
+ <dim>484354</dim>
437
+ </port>
438
+ <port id="14" precision="I32">
439
+ <dim>22</dim>
440
+ </port>
441
+ <port id="15" precision="I32">
442
+ <dim>22</dim>
443
+ </port>
444
+ <port id="16" precision="U8">
445
+ <dim>304</dim>
446
+ </port>
447
+ <port id="17" precision="I32">
448
+ <dim>22</dim>
449
+ </port>
450
+ </input>
451
+ <output>
452
+ <port id="18" precision="I32">
453
+ <dim>-1</dim>
454
+ </port>
455
+ <port id="19" precision="I32">
456
+ <dim>-1</dim>
457
+ </port>
458
+ <port id="20" precision="I32">
459
+ <dim>-1</dim>
460
+ </port>
461
+ </output>
462
+ </layer>
463
+ <layer id="36" name="Subtract_68528" type="Subtract" version="opset1">
464
+ <data auto_broadcast="numpy" />
465
+ <input>
466
+ <port id="0" precision="I32">
467
+ <dim>-1</dim>
468
+ </port>
469
+ <port id="1" precision="I32">
470
+ <dim>-1</dim>
471
+ </port>
472
+ </input>
473
+ <output>
474
+ <port id="2" precision="I32">
475
+ <dim>-1</dim>
476
+ </port>
477
+ </output>
478
+ </layer>
479
+ <layer id="37" name="Constant_68529" type="Const" version="opset1">
480
+ <data element_type="i32" shape="" offset="5588647" size="4" />
481
+ <output>
482
+ <port id="0" precision="I32" />
483
+ </output>
484
+ </layer>
485
+ <layer id="38" name="Minimum_68530" type="Minimum" version="opset1">
486
+ <data auto_broadcast="numpy" />
487
+ <input>
488
+ <port id="0" precision="I32">
489
+ <dim>-1</dim>
490
+ </port>
491
+ <port id="1" precision="I32" />
492
+ </input>
493
+ <output>
494
+ <port id="2" precision="I32">
495
+ <dim>-1</dim>
496
+ </port>
497
+ </output>
498
+ </layer>
499
+ <layer id="39" name="Subtract_68531" type="Subtract" version="opset1">
500
+ <data auto_broadcast="numpy" />
501
+ <input>
502
+ <port id="0" precision="I32">
503
+ <dim>-1</dim>
504
+ </port>
505
+ <port id="1" precision="I32">
506
+ <dim>-1</dim>
507
+ </port>
508
+ </input>
509
+ <output>
510
+ <port id="2" precision="I32">
511
+ <dim>-1</dim>
512
+ </port>
513
+ </output>
514
+ </layer>
515
+ <layer id="40" name="Constant_68535" type="Const" version="opset1">
516
+ <data element_type="i32" shape="2" offset="12" size="8" />
517
+ <output>
518
+ <port id="0" precision="I32">
519
+ <dim>2</dim>
520
+ </port>
521
+ </output>
522
+ </layer>
523
+ <layer id="41" name="CombineSegments_68536" type="CombineSegments" version="extension">
524
+ <input>
525
+ <port id="0" precision="I32" />
526
+ <port id="1" precision="I32" />
527
+ <port id="2" precision="I32">
528
+ <dim>1</dim>
529
+ </port>
530
+ <port id="3" precision="I32">
531
+ <dim>-1</dim>
532
+ </port>
533
+ <port id="4" precision="I32">
534
+ <dim>-1</dim>
535
+ </port>
536
+ <port id="5" precision="I32">
537
+ <dim>-1</dim>
538
+ </port>
539
+ <port id="6" precision="I32">
540
+ <dim>2</dim>
541
+ </port>
542
+ </input>
543
+ <output>
544
+ <port id="7" precision="I32">
545
+ <dim>-1</dim>
546
+ </port>
547
+ <port id="8" precision="I32">
548
+ <dim>-1</dim>
549
+ </port>
550
+ <port id="9" precision="I32">
551
+ <dim>-1</dim>
552
+ </port>
553
+ <port id="10" precision="I32">
554
+ <dim>-1</dim>
555
+ </port>
556
+ <port id="11" precision="I32">
557
+ <dim>-1</dim>
558
+ </port>
559
+ <port id="12" precision="I32">
560
+ <dim>-1</dim>
561
+ </port>
562
+ </output>
563
+ </layer>
564
+ <layer id="42" name="Subtract_68537" type="Subtract" version="opset1">
565
+ <data auto_broadcast="numpy" />
566
+ <input>
567
+ <port id="0" precision="I32">
568
+ <dim>-1</dim>
569
+ </port>
570
+ <port id="1" precision="I32">
571
+ <dim>-1</dim>
572
+ </port>
573
+ </input>
574
+ <output>
575
+ <port id="2" precision="I32">
576
+ <dim>-1</dim>
577
+ </port>
578
+ </output>
579
+ </layer>
580
+ <layer id="43" name="Constant_68538" type="Const" version="opset1">
581
+ <data element_type="i32" shape="" offset="0" size="4" />
582
+ <output>
583
+ <port id="0" precision="I32" />
584
+ </output>
585
+ </layer>
586
+ <layer id="44" name="ReduceMax_68539" type="ReduceMax" version="opset1">
587
+ <data keep_dims="false" />
588
+ <input>
589
+ <port id="0" precision="I32">
590
+ <dim>-1</dim>
591
+ </port>
592
+ <port id="1" precision="I32" />
593
+ </input>
594
+ <output>
595
+ <port id="2" precision="I32" />
596
+ </output>
597
+ </layer>
598
+ <layer id="45" name="Constant_68540" type="Const" version="opset1">
599
+ <data element_type="i32" shape="" offset="5588651" size="4" />
600
+ <output>
601
+ <port id="0" precision="I32" />
602
+ </output>
603
+ </layer>
604
+ <layer id="46" name="RaggedToDense_68541" type="RaggedToDense" version="extension">
605
+ <data pad_right="false" m_pad_max_length="false" />
606
+ <input>
607
+ <port id="0" precision="I32">
608
+ <dim>-1</dim>
609
+ </port>
610
+ <port id="1" precision="I32">
611
+ <dim>-1</dim>
612
+ </port>
613
+ <port id="2" precision="I32">
614
+ <dim>-1</dim>
615
+ </port>
616
+ <port id="3" precision="I32" />
617
+ <port id="4" precision="I32" />
618
+ </input>
619
+ <output>
620
+ <port id="5" precision="I32">
621
+ <dim>-1</dim>
622
+ <dim>-1</dim>
623
+ </port>
624
+ <port id="6" precision="BOOL">
625
+ <dim>-1</dim>
626
+ <dim>-1</dim>
627
+ </port>
628
+ </output>
629
+ </layer>
630
+ <layer id="47" name="Convert_68542" type="Convert" version="opset1">
631
+ <data destination_type="i32" />
632
+ <input>
633
+ <port id="0" precision="BOOL">
634
+ <dim>-1</dim>
635
+ <dim>-1</dim>
636
+ </port>
637
+ </input>
638
+ <output>
639
+ <port id="1" precision="I32">
640
+ <dim>-1</dim>
641
+ <dim>-1</dim>
642
+ </port>
643
+ </output>
644
+ </layer>
645
+ <layer id="48" name="Convert_68542.0" type="Convert" version="opset1">
646
+ <data destination_type="i64" />
647
+ <input>
648
+ <port id="0" precision="I32">
649
+ <dim>-1</dim>
650
+ <dim>-1</dim>
651
+ </port>
652
+ </input>
653
+ <output>
654
+ <port id="1" precision="I64" names="attention_mask">
655
+ <dim>-1</dim>
656
+ <dim>-1</dim>
657
+ </port>
658
+ </output>
659
+ </layer>
660
+ <layer id="50" name="RaggedToDense_68541.0" type="Convert" version="opset1">
661
+ <data destination_type="i64" />
662
+ <input>
663
+ <port id="0" precision="I32">
664
+ <dim>-1</dim>
665
+ <dim>-1</dim>
666
+ </port>
667
+ </input>
668
+ <output>
669
+ <port id="1" precision="I64" names="input_ids">
670
+ <dim>-1</dim>
671
+ <dim>-1</dim>
672
+ </port>
673
+ </output>
674
+ </layer>
675
+ <layer id="51" name="Result_68545" type="Result" version="opset1">
676
+ <input>
677
+ <port id="0" precision="I64">
678
+ <dim>-1</dim>
679
+ <dim>-1</dim>
680
+ </port>
681
+ </input>
682
+ </layer>
683
+ <layer id="49" name="Result_68547" type="Result" version="opset1">
684
+ <input>
685
+ <port id="0" precision="I64">
686
+ <dim>-1</dim>
687
+ <dim>-1</dim>
688
+ </port>
689
+ </input>
690
+ </layer>
691
+ </layers>
692
+ <edges>
693
+ <edge from-layer="0" from-port="0" to-layer="5" to-port="0" />
694
+ <edge from-layer="1" from-port="0" to-layer="41" to-port="0" />
695
+ <edge from-layer="2" from-port="0" to-layer="41" to-port="1" />
696
+ <edge from-layer="3" from-port="0" to-layer="41" to-port="2" />
697
+ <edge from-layer="4" from-port="0" to-layer="11" to-port="0" />
698
+ <edge from-layer="5" from-port="1" to-layer="6" to-port="0" />
699
+ <edge from-layer="5" from-port="3" to-layer="18" to-port="4" />
700
+ <edge from-layer="5" from-port="2" to-layer="18" to-port="3" />
701
+ <edge from-layer="5" from-port="1" to-layer="18" to-port="2" />
702
+ <edge from-layer="6" from-port="1" to-layer="9" to-port="0" />
703
+ <edge from-layer="7" from-port="0" to-layer="9" to-port="1" />
704
+ <edge from-layer="8" from-port="0" to-layer="9" to-port="2" />
705
+ <edge from-layer="9" from-port="3" to-layer="11" to-port="1" />
706
+ <edge from-layer="9" from-port="3" to-layer="14" to-port="0" />
707
+ <edge from-layer="10" from-port="0" to-layer="11" to-port="2" />
708
+ <edge from-layer="11" from-port="3" to-layer="18" to-port="0" />
709
+ <edge from-layer="12" from-port="0" to-layer="16" to-port="0" />
710
+ <edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
711
+ <edge from-layer="14" from-port="2" to-layer="16" to-port="1" />
712
+ <edge from-layer="15" from-port="0" to-layer="16" to-port="2" />
713
+ <edge from-layer="16" from-port="3" to-layer="18" to-port="1" />
714
+ <edge from-layer="17" from-port="0" to-layer="18" to-port="5" />
715
+ <edge from-layer="18" from-port="8" to-layer="19" to-port="0" />
716
+ <edge from-layer="18" from-port="9" to-layer="19" to-port="1" />
717
+ <edge from-layer="18" from-port="10" to-layer="19" to-port="2" />
718
+ <edge from-layer="18" from-port="11" to-layer="19" to-port="3" />
719
+ <edge from-layer="18" from-port="6" to-layer="21" to-port="0" />
720
+ <edge from-layer="18" from-port="7" to-layer="21" to-port="1" />
721
+ <edge from-layer="19" from-port="4" to-layer="21" to-port="2" />
722
+ <edge from-layer="19" from-port="7" to-layer="21" to-port="5" />
723
+ <edge from-layer="19" from-port="6" to-layer="21" to-port="4" />
724
+ <edge from-layer="19" from-port="5" to-layer="21" to-port="3" />
725
+ <edge from-layer="20" from-port="0" to-layer="21" to-port="6" />
726
+ <edge from-layer="21" from-port="7" to-layer="35" to-port="0" />
727
+ <edge from-layer="21" from-port="8" to-layer="35" to-port="1" />
728
+ <edge from-layer="21" from-port="9" to-layer="35" to-port="2" />
729
+ <edge from-layer="21" from-port="10" to-layer="35" to-port="3" />
730
+ <edge from-layer="21" from-port="11" to-layer="35" to-port="4" />
731
+ <edge from-layer="22" from-port="0" to-layer="35" to-port="5" />
732
+ <edge from-layer="23" from-port="0" to-layer="35" to-port="6" />
733
+ <edge from-layer="24" from-port="0" to-layer="35" to-port="7" />
734
+ <edge from-layer="25" from-port="0" to-layer="35" to-port="8" />
735
+ <edge from-layer="26" from-port="0" to-layer="35" to-port="9" />
736
+ <edge from-layer="27" from-port="0" to-layer="35" to-port="10" />
737
+ <edge from-layer="28" from-port="0" to-layer="35" to-port="11" />
738
+ <edge from-layer="29" from-port="0" to-layer="35" to-port="12" />
739
+ <edge from-layer="30" from-port="0" to-layer="35" to-port="13" />
740
+ <edge from-layer="31" from-port="0" to-layer="35" to-port="14" />
741
+ <edge from-layer="32" from-port="0" to-layer="35" to-port="15" />
742
+ <edge from-layer="33" from-port="0" to-layer="35" to-port="16" />
743
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="17" />
744
+ <edge from-layer="35" from-port="19" to-layer="41" to-port="4" />
745
+ <edge from-layer="35" from-port="20" to-layer="41" to-port="5" />
746
+ <edge from-layer="35" from-port="19" to-layer="39" to-port="0" />
747
+ <edge from-layer="35" from-port="18" to-layer="36" to-port="1" />
748
+ <edge from-layer="35" from-port="19" to-layer="36" to-port="0" />
749
+ <edge from-layer="36" from-port="2" to-layer="38" to-port="0" />
750
+ <edge from-layer="37" from-port="0" to-layer="38" to-port="1" />
751
+ <edge from-layer="38" from-port="2" to-layer="39" to-port="1" />
752
+ <edge from-layer="39" from-port="2" to-layer="41" to-port="3" />
753
+ <edge from-layer="40" from-port="0" to-layer="41" to-port="6" />
754
+ <edge from-layer="41" from-port="8" to-layer="42" to-port="0" />
755
+ <edge from-layer="41" from-port="7" to-layer="42" to-port="1" />
756
+ <edge from-layer="41" from-port="7" to-layer="46" to-port="0" />
757
+ <edge from-layer="41" from-port="8" to-layer="46" to-port="1" />
758
+ <edge from-layer="41" from-port="9" to-layer="46" to-port="2" />
759
+ <edge from-layer="42" from-port="2" to-layer="44" to-port="0" />
760
+ <edge from-layer="43" from-port="0" to-layer="44" to-port="1" />
761
+ <edge from-layer="44" from-port="2" to-layer="46" to-port="3" />
762
+ <edge from-layer="45" from-port="0" to-layer="46" to-port="4" />
763
+ <edge from-layer="46" from-port="6" to-layer="47" to-port="0" />
764
+ <edge from-layer="46" from-port="5" to-layer="50" to-port="0" />
765
+ <edge from-layer="47" from-port="1" to-layer="48" to-port="0" />
766
+ <edge from-layer="48" from-port="1" to-layer="49" to-port="0" />
767
+ <edge from-layer="50" from-port="1" to-layer="51" to-port="0" />
768
+ </edges>
769
+ <rt_info>
770
+ <add_attention_mask value="True" />
771
+ <add_prefix_space />
772
+ <add_special_tokens value="True" />
773
+ <bos_token_id value="151646" />
774
+ <chat_template value="{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'&lt;|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'&lt;|Assistant|>&lt;|tool▁calls▁begin|>&lt;|tool▁call▁begin|>' + tool['type'] + '&lt;|tool▁sep|>' + tool['function']['name'] + '\n' + '```json' + '\n' + tool['function']['arguments'] + '\n' + '```' + '&lt;|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\n' + '&lt;|tool▁call▁begin|>' + tool['type'] + '&lt;|tool▁sep|>' + tool['function']['name'] + '\n' + '```json' + '\n' + tool['function']['arguments'] + '\n' + '```' + '&lt;|tool▁call▁end|>'}}{{'&lt;|tool▁calls▁end|>&lt;|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'&lt;|tool▁outputs▁end|>' + message['content'] + '&lt;|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '&lt;/think>' in content %}{% set content = content.split('&lt;/think>')[-1] %}{% endif %}{{'&lt;|Assistant|>' + content + '&lt;|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'&lt;|tool▁outputs▁begin|>&lt;|tool▁output▁begin|>' + message['content'] + '&lt;|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\n&lt;|tool▁output▁begin|>' + message['content'] + '&lt;|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'&lt;|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'&lt;|Assistant|>&lt;think>\n'}}{% endif %}" />
775
+ <clean_up_tokenization_spaces />
776
+ <detokenizer_input_type value="i64" />
777
+ <eos_token_id value="151643" />
778
+ <handle_special_tokens_with_re />
779
+ <max_length />
780
+ <number_of_inputs value="1" />
781
+ <openvino_tokenizers_version value="2025.1.0.0-501-330cb574aec" />
782
+ <openvino_version value="2025.1.0-18119-f124b593312" />
783
+ <original_tokenizer_class value="&lt;class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
784
+ <pad_token_id value="151643" />
785
+ <sentencepiece_version value="0.2.0" />
786
+ <skip_special_tokens value="True" />
787
+ <streaming_detokenizer value="False" />
788
+ <tiktoken_version value="0.8.0" />
789
+ <tokenizer_output_type value="i64" />
790
+ <tokenizers_version value="0.20.1" />
791
+ <transformers_version value="4.45.0" />
792
+ <use_max_padding value="False" />
793
+ <use_sentencepiece_backend value="False" />
794
+ <utf8_replace_mode value="replace" />
795
+ <with_detokenizer value="True" />
796
+ </rt_info>
797
+ </net>
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin▁of▁sentence|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end▁of▁sentence|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|end▁of▁sentence|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e20ddafc659ba90242154b55275402edeca0715e5dbb30f56815a4ce081f4893
3
+ size 11422778
tokenizer_config.json ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "151643": {
7
+ "content": "<|end▁of▁sentence|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "151644": {
15
+ "content": "<|User|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": false
21
+ },
22
+ "151645": {
23
+ "content": "<|Assistant|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "151646": {
31
+ "content": "<|begin▁of▁sentence|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "151647": {
39
+ "content": "<|EOT|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "151648": {
47
+ "content": "<think>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "151649": {
55
+ "content": "</think>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "151650": {
63
+ "content": "<|quad_start|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "151651": {
71
+ "content": "<|quad_end|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "151652": {
79
+ "content": "<|vision_start|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "151653": {
87
+ "content": "<|vision_end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "151654": {
95
+ "content": "<|vision_pad|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "151655": {
103
+ "content": "<|image_pad|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "151656": {
111
+ "content": "<|video_pad|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "151657": {
119
+ "content": "<tool_call>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "151658": {
127
+ "content": "</tool_call>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "151659": {
135
+ "content": "<|fim_prefix|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "151660": {
143
+ "content": "<|fim_middle|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "151661": {
151
+ "content": "<|fim_suffix|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "151662": {
159
+ "content": "<|fim_pad|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "151663": {
167
+ "content": "<|repo_name|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "151664": {
175
+ "content": "<|file_sep|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ }
182
+ },
183
+ "bos_token": "<|begin▁of▁sentence|>",
184
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin��>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|><think>\\n'}}{% endif %}",
185
+ "clean_up_tokenization_spaces": false,
186
+ "eos_token": "<|end▁of▁sentence|>",
187
+ "legacy": true,
188
+ "model_max_length": 16384,
189
+ "pad_token": "<|end▁of▁sentence|>",
190
+ "sp_model_kwargs": {},
191
+ "tokenizer_class": "LlamaTokenizer",
192
+ "unk_token": null,
193
+ "use_default_system_prompt": false
194
+ }