YiYiXu commited on
Commit
8486bb8
·
verified ·
1 Parent(s): 940e2ec

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
feature_extractor/preprocessor_config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_convert_rgb": null,
3
+ "do_normalize": true,
4
+ "do_rescale": true,
5
+ "do_resize": true,
6
+ "image_mean": [
7
+ 0.5,
8
+ 0.5,
9
+ 0.5
10
+ ],
11
+ "image_processor_type": "SiglipImageProcessor",
12
+ "image_std": [
13
+ 0.5,
14
+ 0.5,
15
+ 0.5
16
+ ],
17
+ "processor_class": "SiglipProcessor",
18
+ "resample": 3,
19
+ "rescale_factor": 0.00392156862745098,
20
+ "size": {
21
+ "height": 384,
22
+ "width": 384
23
+ }
24
+ }
guider/guider_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "ClassifierFreeGuidance",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "enabled": true,
5
+ "guidance_rescale": 0.0,
6
+ "guidance_scale": 6.0,
7
+ "start": 0.0,
8
+ "stop": 1.0,
9
+ "use_original_formulation": false
10
+ }
image_encoder/config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "SiglipVisionModel"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "dtype": "bfloat16",
7
+ "hidden_act": "gelu_pytorch_tanh",
8
+ "hidden_size": 1152,
9
+ "image_size": 384,
10
+ "intermediate_size": 4304,
11
+ "layer_norm_eps": 1e-06,
12
+ "model_type": "siglip_vision_model",
13
+ "num_attention_heads": 16,
14
+ "num_channels": 3,
15
+ "num_hidden_layers": 27,
16
+ "patch_size": 14,
17
+ "transformers_version": "4.57.1"
18
+ }
image_encoder/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d769e3a32a6a9bac72d4d93b989e44491f71b50f02bfa14cd9187758d4a68ff1
3
+ size 856506120
model_index.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "HunyuanVideo15ImageToVideoPipeline",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "feature_extractor": [
5
+ "transformers",
6
+ "SiglipImageProcessor"
7
+ ],
8
+ "guider": [
9
+ "diffusers",
10
+ "ClassifierFreeGuidance"
11
+ ],
12
+ "image_encoder": [
13
+ "transformers",
14
+ "SiglipVisionModel"
15
+ ],
16
+ "scheduler": [
17
+ "diffusers",
18
+ "FlowMatchEulerDiscreteScheduler"
19
+ ],
20
+ "text_encoder": [
21
+ "transformers",
22
+ "Qwen2_5_VLTextModel"
23
+ ],
24
+ "text_encoder_2": [
25
+ "transformers",
26
+ "T5EncoderModel"
27
+ ],
28
+ "tokenizer": [
29
+ "transformers",
30
+ "Qwen2TokenizerFast"
31
+ ],
32
+ "tokenizer_2": [
33
+ "transformers",
34
+ "ByT5Tokenizer"
35
+ ],
36
+ "transformer": [
37
+ "diffusers",
38
+ "HunyuanVideo15Transformer3DModel"
39
+ ],
40
+ "vae": [
41
+ "diffusers",
42
+ "AutoencoderKLHunyuanVideo15"
43
+ ]
44
+ }
scheduler/scheduler_config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FlowMatchEulerDiscreteScheduler",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "base_image_seq_len": 256,
5
+ "base_shift": 0.5,
6
+ "invert_sigmas": false,
7
+ "max_image_seq_len": 4096,
8
+ "max_shift": 1.15,
9
+ "num_train_timesteps": 1000,
10
+ "shift": 5.0,
11
+ "shift_terminal": null,
12
+ "stochastic_sampling": false,
13
+ "time_shift_type": "exponential",
14
+ "use_beta_sigmas": false,
15
+ "use_dynamic_shifting": false,
16
+ "use_exponential_sigmas": false,
17
+ "use_karras_sigmas": false
18
+ }
text_encoder/config.json ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2_5_VLTextModel"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "dtype": "bfloat16",
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "layer_types": [
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention"
42
+ ],
43
+ "max_position_embeddings": 128000,
44
+ "max_window_layers": 28,
45
+ "model_type": "qwen2_5_vl_text",
46
+ "num_attention_heads": 28,
47
+ "num_hidden_layers": 28,
48
+ "num_key_value_heads": 4,
49
+ "rms_norm_eps": 1e-06,
50
+ "rope_scaling": {
51
+ "mrope_section": [
52
+ 16,
53
+ 24,
54
+ 24
55
+ ],
56
+ "rope_type": "default",
57
+ "type": "default"
58
+ },
59
+ "rope_theta": 1000000.0,
60
+ "sliding_window": null,
61
+ "tie_word_embeddings": false,
62
+ "transformers_version": "4.57.1",
63
+ "use_cache": true,
64
+ "use_sliding_window": false,
65
+ "vision_token_id": 151654,
66
+ "vocab_size": 152064
67
+ }
text_encoder/model-00001-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1b9f95e60d6844963933dc20c11cf712d902441ef50d0926e5f2b3521a2f01d
3
+ size 4877660152
text_encoder/model-00002-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab32a0fe73edfa4af4eb0c7f17633c44704335d80755f9230200cf16fea42b7e
3
+ size 4932750280
text_encoder/model-00003-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe26b2e0da51f3c6522b46056b26603f0642f866a60eacec09ebe66a025cb7ee
3
+ size 4330864528
text_encoder/model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 7070619136,
4
+ "total_size": 14141238272
5
+ },
6
+ "weight_map": {
7
+ "embed_tokens.weight": "model-00001-of-00003.safetensors",
8
+ "layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
9
+ "layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
10
+ "layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
11
+ "layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
12
+ "layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
13
+ "layers.0.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
14
+ "layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
15
+ "layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
16
+ "layers.0.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
17
+ "layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
18
+ "layers.0.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
19
+ "layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
20
+ "layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
21
+ "layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
22
+ "layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
23
+ "layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
24
+ "layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
25
+ "layers.1.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
26
+ "layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
27
+ "layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
28
+ "layers.1.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
29
+ "layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
30
+ "layers.1.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
31
+ "layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
32
+ "layers.10.input_layernorm.weight": "model-00002-of-00003.safetensors",
33
+ "layers.10.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
34
+ "layers.10.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
35
+ "layers.10.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
36
+ "layers.10.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
37
+ "layers.10.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
38
+ "layers.10.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
39
+ "layers.10.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
40
+ "layers.10.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
41
+ "layers.10.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
42
+ "layers.10.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
43
+ "layers.10.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
44
+ "layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
45
+ "layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
46
+ "layers.11.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
47
+ "layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
48
+ "layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
49
+ "layers.11.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
50
+ "layers.11.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
51
+ "layers.11.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
52
+ "layers.11.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
53
+ "layers.11.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
54
+ "layers.11.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
55
+ "layers.11.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
56
+ "layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
57
+ "layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
58
+ "layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
59
+ "layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
60
+ "layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
61
+ "layers.12.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
62
+ "layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
63
+ "layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
64
+ "layers.12.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
65
+ "layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
66
+ "layers.12.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
67
+ "layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
68
+ "layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
69
+ "layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
70
+ "layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
71
+ "layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
72
+ "layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
73
+ "layers.13.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
74
+ "layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
75
+ "layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
76
+ "layers.13.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
77
+ "layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
78
+ "layers.13.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
79
+ "layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
80
+ "layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
81
+ "layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
82
+ "layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
83
+ "layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
84
+ "layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
85
+ "layers.14.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
86
+ "layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
87
+ "layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
88
+ "layers.14.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
89
+ "layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
90
+ "layers.14.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
91
+ "layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
92
+ "layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
93
+ "layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
94
+ "layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
95
+ "layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
96
+ "layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
97
+ "layers.15.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
98
+ "layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
99
+ "layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
100
+ "layers.15.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
101
+ "layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
102
+ "layers.15.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
103
+ "layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
104
+ "layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
105
+ "layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
106
+ "layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
107
+ "layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
108
+ "layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
109
+ "layers.16.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
110
+ "layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
111
+ "layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
112
+ "layers.16.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
113
+ "layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
114
+ "layers.16.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
115
+ "layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
116
+ "layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
117
+ "layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
118
+ "layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
119
+ "layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
120
+ "layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
121
+ "layers.17.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
122
+ "layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
123
+ "layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
124
+ "layers.17.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
125
+ "layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
126
+ "layers.17.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
127
+ "layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
128
+ "layers.18.input_layernorm.weight": "model-00003-of-00003.safetensors",
129
+ "layers.18.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
130
+ "layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
131
+ "layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
132
+ "layers.18.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
133
+ "layers.18.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
134
+ "layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
135
+ "layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
136
+ "layers.18.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
137
+ "layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
138
+ "layers.18.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
139
+ "layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
140
+ "layers.19.input_layernorm.weight": "model-00003-of-00003.safetensors",
141
+ "layers.19.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
142
+ "layers.19.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
143
+ "layers.19.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
144
+ "layers.19.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
145
+ "layers.19.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
146
+ "layers.19.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
147
+ "layers.19.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
148
+ "layers.19.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
149
+ "layers.19.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
150
+ "layers.19.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
151
+ "layers.19.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
152
+ "layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
153
+ "layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
154
+ "layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
155
+ "layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
156
+ "layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
157
+ "layers.2.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
158
+ "layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
159
+ "layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
160
+ "layers.2.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
161
+ "layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
162
+ "layers.2.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
163
+ "layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
164
+ "layers.20.input_layernorm.weight": "model-00003-of-00003.safetensors",
165
+ "layers.20.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
166
+ "layers.20.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
167
+ "layers.20.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
168
+ "layers.20.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
169
+ "layers.20.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
170
+ "layers.20.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
171
+ "layers.20.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
172
+ "layers.20.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
173
+ "layers.20.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
174
+ "layers.20.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
175
+ "layers.20.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
176
+ "layers.21.input_layernorm.weight": "model-00003-of-00003.safetensors",
177
+ "layers.21.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
178
+ "layers.21.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
179
+ "layers.21.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
180
+ "layers.21.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
181
+ "layers.21.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
182
+ "layers.21.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
183
+ "layers.21.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
184
+ "layers.21.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
185
+ "layers.21.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
186
+ "layers.21.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
187
+ "layers.21.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
188
+ "layers.22.input_layernorm.weight": "model-00003-of-00003.safetensors",
189
+ "layers.22.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
190
+ "layers.22.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
191
+ "layers.22.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
192
+ "layers.22.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
193
+ "layers.22.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
194
+ "layers.22.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
195
+ "layers.22.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
196
+ "layers.22.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
197
+ "layers.22.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
198
+ "layers.22.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
199
+ "layers.22.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
200
+ "layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
201
+ "layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
202
+ "layers.23.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
203
+ "layers.23.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
204
+ "layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
205
+ "layers.23.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
206
+ "layers.23.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
207
+ "layers.23.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
208
+ "layers.23.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
209
+ "layers.23.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
210
+ "layers.23.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
211
+ "layers.23.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
212
+ "layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
213
+ "layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
214
+ "layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
215
+ "layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
216
+ "layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
217
+ "layers.24.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
218
+ "layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
219
+ "layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
220
+ "layers.24.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
221
+ "layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
222
+ "layers.24.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
223
+ "layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
224
+ "layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
225
+ "layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
226
+ "layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
227
+ "layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
228
+ "layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
229
+ "layers.25.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
230
+ "layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
231
+ "layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
232
+ "layers.25.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
233
+ "layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
234
+ "layers.25.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
235
+ "layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
236
+ "layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
237
+ "layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
238
+ "layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
239
+ "layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
240
+ "layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
241
+ "layers.26.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
242
+ "layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
243
+ "layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
244
+ "layers.26.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
245
+ "layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
246
+ "layers.26.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
247
+ "layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
248
+ "layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
249
+ "layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
250
+ "layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
251
+ "layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
252
+ "layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
253
+ "layers.27.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
254
+ "layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
255
+ "layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
256
+ "layers.27.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
257
+ "layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
258
+ "layers.27.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
259
+ "layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
260
+ "layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
261
+ "layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
262
+ "layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
263
+ "layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
264
+ "layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
265
+ "layers.3.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
266
+ "layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
267
+ "layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
268
+ "layers.3.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
269
+ "layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
270
+ "layers.3.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
271
+ "layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
272
+ "layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
273
+ "layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
274
+ "layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
275
+ "layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
276
+ "layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
277
+ "layers.4.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
278
+ "layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
279
+ "layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
280
+ "layers.4.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
281
+ "layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
282
+ "layers.4.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
283
+ "layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
284
+ "layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
285
+ "layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
286
+ "layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
287
+ "layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
288
+ "layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
289
+ "layers.5.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
290
+ "layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
291
+ "layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
292
+ "layers.5.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
293
+ "layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
294
+ "layers.5.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
295
+ "layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
296
+ "layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
297
+ "layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
298
+ "layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
299
+ "layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
300
+ "layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
301
+ "layers.6.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
302
+ "layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
303
+ "layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
304
+ "layers.6.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
305
+ "layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
306
+ "layers.6.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
307
+ "layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
308
+ "layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
309
+ "layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
310
+ "layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
311
+ "layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
312
+ "layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
313
+ "layers.7.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
314
+ "layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
315
+ "layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
316
+ "layers.7.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
317
+ "layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
318
+ "layers.7.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
319
+ "layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
320
+ "layers.8.input_layernorm.weight": "model-00002-of-00003.safetensors",
321
+ "layers.8.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
322
+ "layers.8.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
323
+ "layers.8.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
324
+ "layers.8.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
325
+ "layers.8.self_attn.k_proj.bias": "model-00001-of-00003.safetensors",
326
+ "layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
327
+ "layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
328
+ "layers.8.self_attn.q_proj.bias": "model-00001-of-00003.safetensors",
329
+ "layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
330
+ "layers.8.self_attn.v_proj.bias": "model-00001-of-00003.safetensors",
331
+ "layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
332
+ "layers.9.input_layernorm.weight": "model-00002-of-00003.safetensors",
333
+ "layers.9.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
334
+ "layers.9.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
335
+ "layers.9.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
336
+ "layers.9.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
337
+ "layers.9.self_attn.k_proj.bias": "model-00002-of-00003.safetensors",
338
+ "layers.9.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
339
+ "layers.9.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
340
+ "layers.9.self_attn.q_proj.bias": "model-00002-of-00003.safetensors",
341
+ "layers.9.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
342
+ "layers.9.self_attn.v_proj.bias": "model-00002-of-00003.safetensors",
343
+ "layers.9.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
344
+ "norm.weight": "model-00003-of-00003.safetensors"
345
+ }
346
+ }
text_encoder_2/config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "T5EncoderModel"
4
+ ],
5
+ "classifier_dropout": 0.0,
6
+ "d_ff": 3584,
7
+ "d_kv": 64,
8
+ "d_model": 1472,
9
+ "decoder_start_token_id": 0,
10
+ "dense_act_fn": "gelu_new",
11
+ "dropout_rate": 0.1,
12
+ "dtype": "float32",
13
+ "eos_token_id": 1,
14
+ "feed_forward_proj": "gated-gelu",
15
+ "gradient_checkpointing": false,
16
+ "initializer_factor": 1.0,
17
+ "is_encoder_decoder": false,
18
+ "is_gated_act": true,
19
+ "layer_norm_epsilon": 1e-06,
20
+ "model_type": "t5",
21
+ "num_decoder_layers": 4,
22
+ "num_heads": 6,
23
+ "num_layers": 12,
24
+ "pad_token_id": 0,
25
+ "relative_attention_max_distance": 128,
26
+ "relative_attention_num_buckets": 32,
27
+ "tie_word_embeddings": false,
28
+ "tokenizer_class": "ByT5Tokenizer",
29
+ "transformers_version": "4.57.1",
30
+ "use_cache": false,
31
+ "vocab_size": 1510
32
+ }
text_encoder_2/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f3b22f7926015b76022ca4842a3130d83c477d4ecee8a5275252650d96bf97a
3
+ size 877273128
tokenizer/added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
tokenizer/chat_template.jinja ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system
2
+ You are a helpful assistant.<|im_end|>
3
+ {% endif %}<|im_start|>{{ message['role'] }}
4
+ {% if message['content'] is string %}{{ message['content'] }}<|im_end|>
5
+ {% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>
6
+ {% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant
7
+ {% endif %}
tokenizer/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|im_end|>",
200
+ "errors": "replace",
201
+ "extra_special_tokens": {},
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "padding_side": "right",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2Tokenizer",
207
+ "unk_token": null
208
+ }
tokenizer/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_2/added_tokens.json ADDED
@@ -0,0 +1,1253 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<cn-font-0>": 1034,
3
+ "<cn-font-100>": 1134,
4
+ "<cn-font-101>": 1135,
5
+ "<cn-font-102>": 1136,
6
+ "<cn-font-103>": 1137,
7
+ "<cn-font-104>": 1138,
8
+ "<cn-font-105>": 1139,
9
+ "<cn-font-106>": 1140,
10
+ "<cn-font-107>": 1141,
11
+ "<cn-font-108>": 1142,
12
+ "<cn-font-109>": 1143,
13
+ "<cn-font-10>": 1044,
14
+ "<cn-font-110>": 1144,
15
+ "<cn-font-111>": 1145,
16
+ "<cn-font-112>": 1146,
17
+ "<cn-font-113>": 1147,
18
+ "<cn-font-114>": 1148,
19
+ "<cn-font-115>": 1149,
20
+ "<cn-font-116>": 1150,
21
+ "<cn-font-117>": 1151,
22
+ "<cn-font-118>": 1152,
23
+ "<cn-font-119>": 1153,
24
+ "<cn-font-11>": 1045,
25
+ "<cn-font-120>": 1154,
26
+ "<cn-font-121>": 1155,
27
+ "<cn-font-122>": 1156,
28
+ "<cn-font-123>": 1157,
29
+ "<cn-font-124>": 1158,
30
+ "<cn-font-125>": 1159,
31
+ "<cn-font-126>": 1160,
32
+ "<cn-font-127>": 1161,
33
+ "<cn-font-128>": 1162,
34
+ "<cn-font-129>": 1163,
35
+ "<cn-font-12>": 1046,
36
+ "<cn-font-130>": 1164,
37
+ "<cn-font-131>": 1165,
38
+ "<cn-font-132>": 1166,
39
+ "<cn-font-133>": 1167,
40
+ "<cn-font-134>": 1168,
41
+ "<cn-font-135>": 1169,
42
+ "<cn-font-136>": 1170,
43
+ "<cn-font-137>": 1171,
44
+ "<cn-font-138>": 1172,
45
+ "<cn-font-139>": 1173,
46
+ "<cn-font-13>": 1047,
47
+ "<cn-font-140>": 1174,
48
+ "<cn-font-141>": 1175,
49
+ "<cn-font-142>": 1176,
50
+ "<cn-font-14>": 1048,
51
+ "<cn-font-15>": 1049,
52
+ "<cn-font-16>": 1050,
53
+ "<cn-font-17>": 1051,
54
+ "<cn-font-18>": 1052,
55
+ "<cn-font-19>": 1053,
56
+ "<cn-font-1>": 1035,
57
+ "<cn-font-20>": 1054,
58
+ "<cn-font-21>": 1055,
59
+ "<cn-font-22>": 1056,
60
+ "<cn-font-23>": 1057,
61
+ "<cn-font-24>": 1058,
62
+ "<cn-font-25>": 1059,
63
+ "<cn-font-26>": 1060,
64
+ "<cn-font-27>": 1061,
65
+ "<cn-font-28>": 1062,
66
+ "<cn-font-29>": 1063,
67
+ "<cn-font-2>": 1036,
68
+ "<cn-font-30>": 1064,
69
+ "<cn-font-31>": 1065,
70
+ "<cn-font-32>": 1066,
71
+ "<cn-font-33>": 1067,
72
+ "<cn-font-34>": 1068,
73
+ "<cn-font-35>": 1069,
74
+ "<cn-font-36>": 1070,
75
+ "<cn-font-37>": 1071,
76
+ "<cn-font-38>": 1072,
77
+ "<cn-font-39>": 1073,
78
+ "<cn-font-3>": 1037,
79
+ "<cn-font-40>": 1074,
80
+ "<cn-font-41>": 1075,
81
+ "<cn-font-42>": 1076,
82
+ "<cn-font-43>": 1077,
83
+ "<cn-font-44>": 1078,
84
+ "<cn-font-45>": 1079,
85
+ "<cn-font-46>": 1080,
86
+ "<cn-font-47>": 1081,
87
+ "<cn-font-48>": 1082,
88
+ "<cn-font-49>": 1083,
89
+ "<cn-font-4>": 1038,
90
+ "<cn-font-50>": 1084,
91
+ "<cn-font-51>": 1085,
92
+ "<cn-font-52>": 1086,
93
+ "<cn-font-53>": 1087,
94
+ "<cn-font-54>": 1088,
95
+ "<cn-font-55>": 1089,
96
+ "<cn-font-56>": 1090,
97
+ "<cn-font-57>": 1091,
98
+ "<cn-font-58>": 1092,
99
+ "<cn-font-59>": 1093,
100
+ "<cn-font-5>": 1039,
101
+ "<cn-font-60>": 1094,
102
+ "<cn-font-61>": 1095,
103
+ "<cn-font-62>": 1096,
104
+ "<cn-font-63>": 1097,
105
+ "<cn-font-64>": 1098,
106
+ "<cn-font-65>": 1099,
107
+ "<cn-font-66>": 1100,
108
+ "<cn-font-67>": 1101,
109
+ "<cn-font-68>": 1102,
110
+ "<cn-font-69>": 1103,
111
+ "<cn-font-6>": 1040,
112
+ "<cn-font-70>": 1104,
113
+ "<cn-font-71>": 1105,
114
+ "<cn-font-72>": 1106,
115
+ "<cn-font-73>": 1107,
116
+ "<cn-font-74>": 1108,
117
+ "<cn-font-75>": 1109,
118
+ "<cn-font-76>": 1110,
119
+ "<cn-font-77>": 1111,
120
+ "<cn-font-78>": 1112,
121
+ "<cn-font-79>": 1113,
122
+ "<cn-font-7>": 1041,
123
+ "<cn-font-80>": 1114,
124
+ "<cn-font-81>": 1115,
125
+ "<cn-font-82>": 1116,
126
+ "<cn-font-83>": 1117,
127
+ "<cn-font-84>": 1118,
128
+ "<cn-font-85>": 1119,
129
+ "<cn-font-86>": 1120,
130
+ "<cn-font-87>": 1121,
131
+ "<cn-font-88>": 1122,
132
+ "<cn-font-89>": 1123,
133
+ "<cn-font-8>": 1042,
134
+ "<cn-font-90>": 1124,
135
+ "<cn-font-91>": 1125,
136
+ "<cn-font-92>": 1126,
137
+ "<cn-font-93>": 1127,
138
+ "<cn-font-94>": 1128,
139
+ "<cn-font-95>": 1129,
140
+ "<cn-font-96>": 1130,
141
+ "<cn-font-97>": 1131,
142
+ "<cn-font-98>": 1132,
143
+ "<cn-font-99>": 1133,
144
+ "<cn-font-9>": 1043,
145
+ "<color-0>": 384,
146
+ "<color-100>": 484,
147
+ "<color-101>": 485,
148
+ "<color-102>": 486,
149
+ "<color-103>": 487,
150
+ "<color-104>": 488,
151
+ "<color-105>": 489,
152
+ "<color-106>": 490,
153
+ "<color-107>": 491,
154
+ "<color-108>": 492,
155
+ "<color-109>": 493,
156
+ "<color-10>": 394,
157
+ "<color-110>": 494,
158
+ "<color-111>": 495,
159
+ "<color-112>": 496,
160
+ "<color-113>": 497,
161
+ "<color-114>": 498,
162
+ "<color-115>": 499,
163
+ "<color-116>": 500,
164
+ "<color-117>": 501,
165
+ "<color-118>": 502,
166
+ "<color-119>": 503,
167
+ "<color-11>": 395,
168
+ "<color-120>": 504,
169
+ "<color-121>": 505,
170
+ "<color-122>": 506,
171
+ "<color-123>": 507,
172
+ "<color-124>": 508,
173
+ "<color-125>": 509,
174
+ "<color-126>": 510,
175
+ "<color-127>": 511,
176
+ "<color-128>": 512,
177
+ "<color-129>": 513,
178
+ "<color-12>": 396,
179
+ "<color-130>": 514,
180
+ "<color-131>": 515,
181
+ "<color-132>": 516,
182
+ "<color-133>": 517,
183
+ "<color-134>": 518,
184
+ "<color-135>": 519,
185
+ "<color-136>": 520,
186
+ "<color-137>": 521,
187
+ "<color-13>": 397,
188
+ "<color-14>": 398,
189
+ "<color-15>": 399,
190
+ "<color-16>": 400,
191
+ "<color-17>": 401,
192
+ "<color-18>": 402,
193
+ "<color-19>": 403,
194
+ "<color-1>": 385,
195
+ "<color-20>": 404,
196
+ "<color-21>": 405,
197
+ "<color-22>": 406,
198
+ "<color-23>": 407,
199
+ "<color-24>": 408,
200
+ "<color-25>": 409,
201
+ "<color-26>": 410,
202
+ "<color-27>": 411,
203
+ "<color-28>": 412,
204
+ "<color-29>": 413,
205
+ "<color-2>": 386,
206
+ "<color-30>": 414,
207
+ "<color-31>": 415,
208
+ "<color-32>": 416,
209
+ "<color-33>": 417,
210
+ "<color-34>": 418,
211
+ "<color-35>": 419,
212
+ "<color-36>": 420,
213
+ "<color-37>": 421,
214
+ "<color-38>": 422,
215
+ "<color-39>": 423,
216
+ "<color-3>": 387,
217
+ "<color-40>": 424,
218
+ "<color-41>": 425,
219
+ "<color-42>": 426,
220
+ "<color-43>": 427,
221
+ "<color-44>": 428,
222
+ "<color-45>": 429,
223
+ "<color-46>": 430,
224
+ "<color-47>": 431,
225
+ "<color-48>": 432,
226
+ "<color-49>": 433,
227
+ "<color-4>": 388,
228
+ "<color-50>": 434,
229
+ "<color-51>": 435,
230
+ "<color-52>": 436,
231
+ "<color-53>": 437,
232
+ "<color-54>": 438,
233
+ "<color-55>": 439,
234
+ "<color-56>": 440,
235
+ "<color-57>": 441,
236
+ "<color-58>": 442,
237
+ "<color-59>": 443,
238
+ "<color-5>": 389,
239
+ "<color-60>": 444,
240
+ "<color-61>": 445,
241
+ "<color-62>": 446,
242
+ "<color-63>": 447,
243
+ "<color-64>": 448,
244
+ "<color-65>": 449,
245
+ "<color-66>": 450,
246
+ "<color-67>": 451,
247
+ "<color-68>": 452,
248
+ "<color-69>": 453,
249
+ "<color-6>": 390,
250
+ "<color-70>": 454,
251
+ "<color-71>": 455,
252
+ "<color-72>": 456,
253
+ "<color-73>": 457,
254
+ "<color-74>": 458,
255
+ "<color-75>": 459,
256
+ "<color-76>": 460,
257
+ "<color-77>": 461,
258
+ "<color-78>": 462,
259
+ "<color-79>": 463,
260
+ "<color-7>": 391,
261
+ "<color-80>": 464,
262
+ "<color-81>": 465,
263
+ "<color-82>": 466,
264
+ "<color-83>": 467,
265
+ "<color-84>": 468,
266
+ "<color-85>": 469,
267
+ "<color-86>": 470,
268
+ "<color-87>": 471,
269
+ "<color-88>": 472,
270
+ "<color-89>": 473,
271
+ "<color-8>": 392,
272
+ "<color-90>": 474,
273
+ "<color-91>": 475,
274
+ "<color-92>": 476,
275
+ "<color-93>": 477,
276
+ "<color-94>": 478,
277
+ "<color-95>": 479,
278
+ "<color-96>": 480,
279
+ "<color-97>": 481,
280
+ "<color-98>": 482,
281
+ "<color-99>": 483,
282
+ "<color-9>": 393,
283
+ "<en-font-0>": 522,
284
+ "<en-font-100>": 622,
285
+ "<en-font-101>": 623,
286
+ "<en-font-102>": 624,
287
+ "<en-font-103>": 625,
288
+ "<en-font-104>": 626,
289
+ "<en-font-105>": 627,
290
+ "<en-font-106>": 628,
291
+ "<en-font-107>": 629,
292
+ "<en-font-108>": 630,
293
+ "<en-font-109>": 631,
294
+ "<en-font-10>": 532,
295
+ "<en-font-110>": 632,
296
+ "<en-font-111>": 633,
297
+ "<en-font-112>": 634,
298
+ "<en-font-113>": 635,
299
+ "<en-font-114>": 636,
300
+ "<en-font-115>": 637,
301
+ "<en-font-116>": 638,
302
+ "<en-font-117>": 639,
303
+ "<en-font-118>": 640,
304
+ "<en-font-119>": 641,
305
+ "<en-font-11>": 533,
306
+ "<en-font-120>": 642,
307
+ "<en-font-121>": 643,
308
+ "<en-font-122>": 644,
309
+ "<en-font-123>": 645,
310
+ "<en-font-124>": 646,
311
+ "<en-font-125>": 647,
312
+ "<en-font-126>": 648,
313
+ "<en-font-127>": 649,
314
+ "<en-font-128>": 650,
315
+ "<en-font-129>": 651,
316
+ "<en-font-12>": 534,
317
+ "<en-font-130>": 652,
318
+ "<en-font-131>": 653,
319
+ "<en-font-132>": 654,
320
+ "<en-font-133>": 655,
321
+ "<en-font-134>": 656,
322
+ "<en-font-135>": 657,
323
+ "<en-font-136>": 658,
324
+ "<en-font-137>": 659,
325
+ "<en-font-138>": 660,
326
+ "<en-font-139>": 661,
327
+ "<en-font-13>": 535,
328
+ "<en-font-140>": 662,
329
+ "<en-font-141>": 663,
330
+ "<en-font-142>": 664,
331
+ "<en-font-143>": 665,
332
+ "<en-font-144>": 666,
333
+ "<en-font-145>": 667,
334
+ "<en-font-146>": 668,
335
+ "<en-font-147>": 669,
336
+ "<en-font-148>": 670,
337
+ "<en-font-149>": 671,
338
+ "<en-font-14>": 536,
339
+ "<en-font-150>": 672,
340
+ "<en-font-151>": 673,
341
+ "<en-font-152>": 674,
342
+ "<en-font-153>": 675,
343
+ "<en-font-154>": 676,
344
+ "<en-font-155>": 677,
345
+ "<en-font-156>": 678,
346
+ "<en-font-157>": 679,
347
+ "<en-font-158>": 680,
348
+ "<en-font-159>": 681,
349
+ "<en-font-15>": 537,
350
+ "<en-font-160>": 682,
351
+ "<en-font-161>": 683,
352
+ "<en-font-162>": 684,
353
+ "<en-font-163>": 685,
354
+ "<en-font-164>": 686,
355
+ "<en-font-165>": 687,
356
+ "<en-font-166>": 688,
357
+ "<en-font-167>": 689,
358
+ "<en-font-168>": 690,
359
+ "<en-font-169>": 691,
360
+ "<en-font-16>": 538,
361
+ "<en-font-170>": 692,
362
+ "<en-font-171>": 693,
363
+ "<en-font-172>": 694,
364
+ "<en-font-173>": 695,
365
+ "<en-font-174>": 696,
366
+ "<en-font-175>": 697,
367
+ "<en-font-176>": 698,
368
+ "<en-font-177>": 699,
369
+ "<en-font-178>": 700,
370
+ "<en-font-179>": 701,
371
+ "<en-font-17>": 539,
372
+ "<en-font-180>": 702,
373
+ "<en-font-181>": 703,
374
+ "<en-font-182>": 704,
375
+ "<en-font-183>": 705,
376
+ "<en-font-184>": 706,
377
+ "<en-font-185>": 707,
378
+ "<en-font-186>": 708,
379
+ "<en-font-187>": 709,
380
+ "<en-font-188>": 710,
381
+ "<en-font-189>": 711,
382
+ "<en-font-18>": 540,
383
+ "<en-font-190>": 712,
384
+ "<en-font-191>": 713,
385
+ "<en-font-192>": 714,
386
+ "<en-font-193>": 715,
387
+ "<en-font-194>": 716,
388
+ "<en-font-195>": 717,
389
+ "<en-font-196>": 718,
390
+ "<en-font-197>": 719,
391
+ "<en-font-198>": 720,
392
+ "<en-font-199>": 721,
393
+ "<en-font-19>": 541,
394
+ "<en-font-1>": 523,
395
+ "<en-font-200>": 722,
396
+ "<en-font-201>": 723,
397
+ "<en-font-202>": 724,
398
+ "<en-font-203>": 725,
399
+ "<en-font-204>": 726,
400
+ "<en-font-205>": 727,
401
+ "<en-font-206>": 728,
402
+ "<en-font-207>": 729,
403
+ "<en-font-208>": 730,
404
+ "<en-font-209>": 731,
405
+ "<en-font-20>": 542,
406
+ "<en-font-210>": 732,
407
+ "<en-font-211>": 733,
408
+ "<en-font-212>": 734,
409
+ "<en-font-213>": 735,
410
+ "<en-font-214>": 736,
411
+ "<en-font-215>": 737,
412
+ "<en-font-216>": 738,
413
+ "<en-font-217>": 739,
414
+ "<en-font-218>": 740,
415
+ "<en-font-219>": 741,
416
+ "<en-font-21>": 543,
417
+ "<en-font-220>": 742,
418
+ "<en-font-221>": 743,
419
+ "<en-font-222>": 744,
420
+ "<en-font-223>": 745,
421
+ "<en-font-224>": 746,
422
+ "<en-font-225>": 747,
423
+ "<en-font-226>": 748,
424
+ "<en-font-227>": 749,
425
+ "<en-font-228>": 750,
426
+ "<en-font-229>": 751,
427
+ "<en-font-22>": 544,
428
+ "<en-font-230>": 752,
429
+ "<en-font-231>": 753,
430
+ "<en-font-232>": 754,
431
+ "<en-font-233>": 755,
432
+ "<en-font-234>": 756,
433
+ "<en-font-235>": 757,
434
+ "<en-font-236>": 758,
435
+ "<en-font-237>": 759,
436
+ "<en-font-238>": 760,
437
+ "<en-font-239>": 761,
438
+ "<en-font-23>": 545,
439
+ "<en-font-240>": 762,
440
+ "<en-font-241>": 763,
441
+ "<en-font-242>": 764,
442
+ "<en-font-243>": 765,
443
+ "<en-font-244>": 766,
444
+ "<en-font-245>": 767,
445
+ "<en-font-246>": 768,
446
+ "<en-font-247>": 769,
447
+ "<en-font-248>": 770,
448
+ "<en-font-249>": 771,
449
+ "<en-font-24>": 546,
450
+ "<en-font-250>": 772,
451
+ "<en-font-251>": 773,
452
+ "<en-font-252>": 774,
453
+ "<en-font-253>": 775,
454
+ "<en-font-254>": 776,
455
+ "<en-font-255>": 777,
456
+ "<en-font-256>": 778,
457
+ "<en-font-257>": 779,
458
+ "<en-font-258>": 780,
459
+ "<en-font-259>": 781,
460
+ "<en-font-25>": 547,
461
+ "<en-font-260>": 782,
462
+ "<en-font-261>": 783,
463
+ "<en-font-262>": 784,
464
+ "<en-font-263>": 785,
465
+ "<en-font-264>": 786,
466
+ "<en-font-265>": 787,
467
+ "<en-font-266>": 788,
468
+ "<en-font-267>": 789,
469
+ "<en-font-268>": 790,
470
+ "<en-font-269>": 791,
471
+ "<en-font-26>": 548,
472
+ "<en-font-270>": 792,
473
+ "<en-font-271>": 793,
474
+ "<en-font-272>": 794,
475
+ "<en-font-273>": 795,
476
+ "<en-font-274>": 796,
477
+ "<en-font-275>": 797,
478
+ "<en-font-276>": 798,
479
+ "<en-font-277>": 799,
480
+ "<en-font-278>": 800,
481
+ "<en-font-279>": 801,
482
+ "<en-font-27>": 549,
483
+ "<en-font-280>": 802,
484
+ "<en-font-281>": 803,
485
+ "<en-font-282>": 804,
486
+ "<en-font-283>": 805,
487
+ "<en-font-284>": 806,
488
+ "<en-font-285>": 807,
489
+ "<en-font-286>": 808,
490
+ "<en-font-287>": 809,
491
+ "<en-font-288>": 810,
492
+ "<en-font-289>": 811,
493
+ "<en-font-28>": 550,
494
+ "<en-font-290>": 812,
495
+ "<en-font-291>": 813,
496
+ "<en-font-292>": 814,
497
+ "<en-font-293>": 815,
498
+ "<en-font-294>": 816,
499
+ "<en-font-295>": 817,
500
+ "<en-font-296>": 818,
501
+ "<en-font-297>": 819,
502
+ "<en-font-298>": 820,
503
+ "<en-font-299>": 821,
504
+ "<en-font-29>": 551,
505
+ "<en-font-2>": 524,
506
+ "<en-font-300>": 822,
507
+ "<en-font-301>": 823,
508
+ "<en-font-302>": 824,
509
+ "<en-font-303>": 825,
510
+ "<en-font-304>": 826,
511
+ "<en-font-305>": 827,
512
+ "<en-font-306>": 828,
513
+ "<en-font-307>": 829,
514
+ "<en-font-308>": 830,
515
+ "<en-font-309>": 831,
516
+ "<en-font-30>": 552,
517
+ "<en-font-310>": 832,
518
+ "<en-font-311>": 833,
519
+ "<en-font-312>": 834,
520
+ "<en-font-313>": 835,
521
+ "<en-font-314>": 836,
522
+ "<en-font-315>": 837,
523
+ "<en-font-316>": 838,
524
+ "<en-font-317>": 839,
525
+ "<en-font-318>": 840,
526
+ "<en-font-319>": 841,
527
+ "<en-font-31>": 553,
528
+ "<en-font-320>": 842,
529
+ "<en-font-321>": 843,
530
+ "<en-font-322>": 844,
531
+ "<en-font-323>": 845,
532
+ "<en-font-324>": 846,
533
+ "<en-font-325>": 847,
534
+ "<en-font-326>": 848,
535
+ "<en-font-327>": 849,
536
+ "<en-font-328>": 850,
537
+ "<en-font-329>": 851,
538
+ "<en-font-32>": 554,
539
+ "<en-font-330>": 852,
540
+ "<en-font-331>": 853,
541
+ "<en-font-332>": 854,
542
+ "<en-font-333>": 855,
543
+ "<en-font-334>": 856,
544
+ "<en-font-335>": 857,
545
+ "<en-font-336>": 858,
546
+ "<en-font-337>": 859,
547
+ "<en-font-338>": 860,
548
+ "<en-font-339>": 861,
549
+ "<en-font-33>": 555,
550
+ "<en-font-340>": 862,
551
+ "<en-font-341>": 863,
552
+ "<en-font-342>": 864,
553
+ "<en-font-343>": 865,
554
+ "<en-font-344>": 866,
555
+ "<en-font-345>": 867,
556
+ "<en-font-346>": 868,
557
+ "<en-font-347>": 869,
558
+ "<en-font-348>": 870,
559
+ "<en-font-349>": 871,
560
+ "<en-font-34>": 556,
561
+ "<en-font-350>": 872,
562
+ "<en-font-351>": 873,
563
+ "<en-font-352>": 874,
564
+ "<en-font-353>": 875,
565
+ "<en-font-354>": 876,
566
+ "<en-font-355>": 877,
567
+ "<en-font-356>": 878,
568
+ "<en-font-357>": 879,
569
+ "<en-font-358>": 880,
570
+ "<en-font-359>": 881,
571
+ "<en-font-35>": 557,
572
+ "<en-font-360>": 882,
573
+ "<en-font-361>": 883,
574
+ "<en-font-362>": 884,
575
+ "<en-font-363>": 885,
576
+ "<en-font-364>": 886,
577
+ "<en-font-365>": 887,
578
+ "<en-font-366>": 888,
579
+ "<en-font-367>": 889,
580
+ "<en-font-368>": 890,
581
+ "<en-font-369>": 891,
582
+ "<en-font-36>": 558,
583
+ "<en-font-370>": 892,
584
+ "<en-font-371>": 893,
585
+ "<en-font-372>": 894,
586
+ "<en-font-373>": 895,
587
+ "<en-font-374>": 896,
588
+ "<en-font-375>": 897,
589
+ "<en-font-376>": 898,
590
+ "<en-font-377>": 899,
591
+ "<en-font-378>": 900,
592
+ "<en-font-379>": 901,
593
+ "<en-font-37>": 559,
594
+ "<en-font-380>": 902,
595
+ "<en-font-381>": 903,
596
+ "<en-font-382>": 904,
597
+ "<en-font-383>": 905,
598
+ "<en-font-384>": 906,
599
+ "<en-font-385>": 907,
600
+ "<en-font-386>": 908,
601
+ "<en-font-387>": 909,
602
+ "<en-font-388>": 910,
603
+ "<en-font-389>": 911,
604
+ "<en-font-38>": 560,
605
+ "<en-font-390>": 912,
606
+ "<en-font-391>": 913,
607
+ "<en-font-392>": 914,
608
+ "<en-font-393>": 915,
609
+ "<en-font-394>": 916,
610
+ "<en-font-395>": 917,
611
+ "<en-font-396>": 918,
612
+ "<en-font-397>": 919,
613
+ "<en-font-398>": 920,
614
+ "<en-font-399>": 921,
615
+ "<en-font-39>": 561,
616
+ "<en-font-3>": 525,
617
+ "<en-font-400>": 922,
618
+ "<en-font-401>": 923,
619
+ "<en-font-402>": 924,
620
+ "<en-font-403>": 925,
621
+ "<en-font-404>": 926,
622
+ "<en-font-405>": 927,
623
+ "<en-font-406>": 928,
624
+ "<en-font-407>": 929,
625
+ "<en-font-408>": 930,
626
+ "<en-font-409>": 931,
627
+ "<en-font-40>": 562,
628
+ "<en-font-410>": 932,
629
+ "<en-font-411>": 933,
630
+ "<en-font-412>": 934,
631
+ "<en-font-413>": 935,
632
+ "<en-font-414>": 936,
633
+ "<en-font-415>": 937,
634
+ "<en-font-416>": 938,
635
+ "<en-font-417>": 939,
636
+ "<en-font-418>": 940,
637
+ "<en-font-419>": 941,
638
+ "<en-font-41>": 563,
639
+ "<en-font-420>": 942,
640
+ "<en-font-421>": 943,
641
+ "<en-font-422>": 944,
642
+ "<en-font-423>": 945,
643
+ "<en-font-424>": 946,
644
+ "<en-font-425>": 947,
645
+ "<en-font-426>": 948,
646
+ "<en-font-427>": 949,
647
+ "<en-font-428>": 950,
648
+ "<en-font-429>": 951,
649
+ "<en-font-42>": 564,
650
+ "<en-font-430>": 952,
651
+ "<en-font-431>": 953,
652
+ "<en-font-432>": 954,
653
+ "<en-font-433>": 955,
654
+ "<en-font-434>": 956,
655
+ "<en-font-435>": 957,
656
+ "<en-font-436>": 958,
657
+ "<en-font-437>": 959,
658
+ "<en-font-438>": 960,
659
+ "<en-font-439>": 961,
660
+ "<en-font-43>": 565,
661
+ "<en-font-440>": 962,
662
+ "<en-font-441>": 963,
663
+ "<en-font-442>": 964,
664
+ "<en-font-443>": 965,
665
+ "<en-font-444>": 966,
666
+ "<en-font-445>": 967,
667
+ "<en-font-446>": 968,
668
+ "<en-font-447>": 969,
669
+ "<en-font-448>": 970,
670
+ "<en-font-449>": 971,
671
+ "<en-font-44>": 566,
672
+ "<en-font-450>": 972,
673
+ "<en-font-451>": 973,
674
+ "<en-font-452>": 974,
675
+ "<en-font-453>": 975,
676
+ "<en-font-454>": 976,
677
+ "<en-font-455>": 977,
678
+ "<en-font-456>": 978,
679
+ "<en-font-457>": 979,
680
+ "<en-font-458>": 980,
681
+ "<en-font-459>": 981,
682
+ "<en-font-45>": 567,
683
+ "<en-font-460>": 982,
684
+ "<en-font-461>": 983,
685
+ "<en-font-462>": 984,
686
+ "<en-font-463>": 985,
687
+ "<en-font-464>": 986,
688
+ "<en-font-465>": 987,
689
+ "<en-font-466>": 988,
690
+ "<en-font-467>": 989,
691
+ "<en-font-468>": 990,
692
+ "<en-font-469>": 991,
693
+ "<en-font-46>": 568,
694
+ "<en-font-470>": 992,
695
+ "<en-font-471>": 993,
696
+ "<en-font-472>": 994,
697
+ "<en-font-473>": 995,
698
+ "<en-font-474>": 996,
699
+ "<en-font-475>": 997,
700
+ "<en-font-476>": 998,
701
+ "<en-font-477>": 999,
702
+ "<en-font-478>": 1000,
703
+ "<en-font-479>": 1001,
704
+ "<en-font-47>": 569,
705
+ "<en-font-480>": 1002,
706
+ "<en-font-481>": 1003,
707
+ "<en-font-482>": 1004,
708
+ "<en-font-483>": 1005,
709
+ "<en-font-484>": 1006,
710
+ "<en-font-485>": 1007,
711
+ "<en-font-486>": 1008,
712
+ "<en-font-487>": 1009,
713
+ "<en-font-488>": 1010,
714
+ "<en-font-489>": 1011,
715
+ "<en-font-48>": 570,
716
+ "<en-font-490>": 1012,
717
+ "<en-font-491>": 1013,
718
+ "<en-font-492>": 1014,
719
+ "<en-font-493>": 1015,
720
+ "<en-font-494>": 1016,
721
+ "<en-font-495>": 1017,
722
+ "<en-font-496>": 1018,
723
+ "<en-font-497>": 1019,
724
+ "<en-font-498>": 1020,
725
+ "<en-font-499>": 1021,
726
+ "<en-font-49>": 571,
727
+ "<en-font-4>": 526,
728
+ "<en-font-500>": 1022,
729
+ "<en-font-501>": 1023,
730
+ "<en-font-502>": 1024,
731
+ "<en-font-503>": 1025,
732
+ "<en-font-504>": 1026,
733
+ "<en-font-505>": 1027,
734
+ "<en-font-506>": 1028,
735
+ "<en-font-507>": 1029,
736
+ "<en-font-508>": 1030,
737
+ "<en-font-509>": 1031,
738
+ "<en-font-50>": 572,
739
+ "<en-font-510>": 1032,
740
+ "<en-font-511>": 1033,
741
+ "<en-font-51>": 573,
742
+ "<en-font-52>": 574,
743
+ "<en-font-53>": 575,
744
+ "<en-font-54>": 576,
745
+ "<en-font-55>": 577,
746
+ "<en-font-56>": 578,
747
+ "<en-font-57>": 579,
748
+ "<en-font-58>": 580,
749
+ "<en-font-59>": 581,
750
+ "<en-font-5>": 527,
751
+ "<en-font-60>": 582,
752
+ "<en-font-61>": 583,
753
+ "<en-font-62>": 584,
754
+ "<en-font-63>": 585,
755
+ "<en-font-64>": 586,
756
+ "<en-font-65>": 587,
757
+ "<en-font-66>": 588,
758
+ "<en-font-67>": 589,
759
+ "<en-font-68>": 590,
760
+ "<en-font-69>": 591,
761
+ "<en-font-6>": 528,
762
+ "<en-font-70>": 592,
763
+ "<en-font-71>": 593,
764
+ "<en-font-72>": 594,
765
+ "<en-font-73>": 595,
766
+ "<en-font-74>": 596,
767
+ "<en-font-75>": 597,
768
+ "<en-font-76>": 598,
769
+ "<en-font-77>": 599,
770
+ "<en-font-78>": 600,
771
+ "<en-font-79>": 601,
772
+ "<en-font-7>": 529,
773
+ "<en-font-80>": 602,
774
+ "<en-font-81>": 603,
775
+ "<en-font-82>": 604,
776
+ "<en-font-83>": 605,
777
+ "<en-font-84>": 606,
778
+ "<en-font-85>": 607,
779
+ "<en-font-86>": 608,
780
+ "<en-font-87>": 609,
781
+ "<en-font-88>": 610,
782
+ "<en-font-89>": 611,
783
+ "<en-font-8>": 530,
784
+ "<en-font-90>": 612,
785
+ "<en-font-91>": 613,
786
+ "<en-font-92>": 614,
787
+ "<en-font-93>": 615,
788
+ "<en-font-94>": 616,
789
+ "<en-font-95>": 617,
790
+ "<en-font-96>": 618,
791
+ "<en-font-97>": 619,
792
+ "<en-font-98>": 620,
793
+ "<en-font-99>": 621,
794
+ "<en-font-9>": 531,
795
+ "<extra_id_0>": 259,
796
+ "<extra_id_100>": 359,
797
+ "<extra_id_101>": 360,
798
+ "<extra_id_102>": 361,
799
+ "<extra_id_103>": 362,
800
+ "<extra_id_104>": 363,
801
+ "<extra_id_105>": 364,
802
+ "<extra_id_106>": 365,
803
+ "<extra_id_107>": 366,
804
+ "<extra_id_108>": 367,
805
+ "<extra_id_109>": 368,
806
+ "<extra_id_10>": 269,
807
+ "<extra_id_110>": 369,
808
+ "<extra_id_111>": 370,
809
+ "<extra_id_112>": 371,
810
+ "<extra_id_113>": 372,
811
+ "<extra_id_114>": 373,
812
+ "<extra_id_115>": 374,
813
+ "<extra_id_116>": 375,
814
+ "<extra_id_117>": 376,
815
+ "<extra_id_118>": 377,
816
+ "<extra_id_119>": 378,
817
+ "<extra_id_11>": 270,
818
+ "<extra_id_120>": 379,
819
+ "<extra_id_121>": 380,
820
+ "<extra_id_122>": 381,
821
+ "<extra_id_123>": 382,
822
+ "<extra_id_124>": 383,
823
+ "<extra_id_12>": 271,
824
+ "<extra_id_13>": 272,
825
+ "<extra_id_14>": 273,
826
+ "<extra_id_15>": 274,
827
+ "<extra_id_16>": 275,
828
+ "<extra_id_17>": 276,
829
+ "<extra_id_18>": 277,
830
+ "<extra_id_19>": 278,
831
+ "<extra_id_1>": 260,
832
+ "<extra_id_20>": 279,
833
+ "<extra_id_21>": 280,
834
+ "<extra_id_22>": 281,
835
+ "<extra_id_23>": 282,
836
+ "<extra_id_24>": 283,
837
+ "<extra_id_25>": 284,
838
+ "<extra_id_26>": 285,
839
+ "<extra_id_27>": 286,
840
+ "<extra_id_28>": 287,
841
+ "<extra_id_29>": 288,
842
+ "<extra_id_2>": 261,
843
+ "<extra_id_30>": 289,
844
+ "<extra_id_31>": 290,
845
+ "<extra_id_32>": 291,
846
+ "<extra_id_33>": 292,
847
+ "<extra_id_34>": 293,
848
+ "<extra_id_35>": 294,
849
+ "<extra_id_36>": 295,
850
+ "<extra_id_37>": 296,
851
+ "<extra_id_38>": 297,
852
+ "<extra_id_39>": 298,
853
+ "<extra_id_3>": 262,
854
+ "<extra_id_40>": 299,
855
+ "<extra_id_41>": 300,
856
+ "<extra_id_42>": 301,
857
+ "<extra_id_43>": 302,
858
+ "<extra_id_44>": 303,
859
+ "<extra_id_45>": 304,
860
+ "<extra_id_46>": 305,
861
+ "<extra_id_47>": 306,
862
+ "<extra_id_48>": 307,
863
+ "<extra_id_49>": 308,
864
+ "<extra_id_4>": 263,
865
+ "<extra_id_50>": 309,
866
+ "<extra_id_51>": 310,
867
+ "<extra_id_52>": 311,
868
+ "<extra_id_53>": 312,
869
+ "<extra_id_54>": 313,
870
+ "<extra_id_55>": 314,
871
+ "<extra_id_56>": 315,
872
+ "<extra_id_57>": 316,
873
+ "<extra_id_58>": 317,
874
+ "<extra_id_59>": 318,
875
+ "<extra_id_5>": 264,
876
+ "<extra_id_60>": 319,
877
+ "<extra_id_61>": 320,
878
+ "<extra_id_62>": 321,
879
+ "<extra_id_63>": 322,
880
+ "<extra_id_64>": 323,
881
+ "<extra_id_65>": 324,
882
+ "<extra_id_66>": 325,
883
+ "<extra_id_67>": 326,
884
+ "<extra_id_68>": 327,
885
+ "<extra_id_69>": 328,
886
+ "<extra_id_6>": 265,
887
+ "<extra_id_70>": 329,
888
+ "<extra_id_71>": 330,
889
+ "<extra_id_72>": 331,
890
+ "<extra_id_73>": 332,
891
+ "<extra_id_74>": 333,
892
+ "<extra_id_75>": 334,
893
+ "<extra_id_76>": 335,
894
+ "<extra_id_77>": 336,
895
+ "<extra_id_78>": 337,
896
+ "<extra_id_79>": 338,
897
+ "<extra_id_7>": 266,
898
+ "<extra_id_80>": 339,
899
+ "<extra_id_81>": 340,
900
+ "<extra_id_82>": 341,
901
+ "<extra_id_83>": 342,
902
+ "<extra_id_84>": 343,
903
+ "<extra_id_85>": 344,
904
+ "<extra_id_86>": 345,
905
+ "<extra_id_87>": 346,
906
+ "<extra_id_88>": 347,
907
+ "<extra_id_89>": 348,
908
+ "<extra_id_8>": 267,
909
+ "<extra_id_90>": 349,
910
+ "<extra_id_91>": 350,
911
+ "<extra_id_92>": 351,
912
+ "<extra_id_93>": 352,
913
+ "<extra_id_94>": 353,
914
+ "<extra_id_95>": 354,
915
+ "<extra_id_96>": 355,
916
+ "<extra_id_97>": 356,
917
+ "<extra_id_98>": 357,
918
+ "<extra_id_99>": 358,
919
+ "<extra_id_9>": 268,
920
+ "<jp-font-0>": 1177,
921
+ "<jp-font-100>": 1277,
922
+ "<jp-font-101>": 1278,
923
+ "<jp-font-102>": 1279,
924
+ "<jp-font-103>": 1280,
925
+ "<jp-font-104>": 1281,
926
+ "<jp-font-105>": 1282,
927
+ "<jp-font-106>": 1283,
928
+ "<jp-font-107>": 1284,
929
+ "<jp-font-108>": 1285,
930
+ "<jp-font-109>": 1286,
931
+ "<jp-font-10>": 1187,
932
+ "<jp-font-110>": 1287,
933
+ "<jp-font-111>": 1288,
934
+ "<jp-font-112>": 1289,
935
+ "<jp-font-113>": 1290,
936
+ "<jp-font-114>": 1291,
937
+ "<jp-font-115>": 1292,
938
+ "<jp-font-116>": 1293,
939
+ "<jp-font-117>": 1294,
940
+ "<jp-font-118>": 1295,
941
+ "<jp-font-119>": 1296,
942
+ "<jp-font-11>": 1188,
943
+ "<jp-font-120>": 1297,
944
+ "<jp-font-121>": 1298,
945
+ "<jp-font-122>": 1299,
946
+ "<jp-font-123>": 1300,
947
+ "<jp-font-124>": 1301,
948
+ "<jp-font-125>": 1302,
949
+ "<jp-font-126>": 1303,
950
+ "<jp-font-127>": 1304,
951
+ "<jp-font-128>": 1305,
952
+ "<jp-font-129>": 1306,
953
+ "<jp-font-12>": 1189,
954
+ "<jp-font-130>": 1307,
955
+ "<jp-font-131>": 1308,
956
+ "<jp-font-132>": 1309,
957
+ "<jp-font-133>": 1310,
958
+ "<jp-font-134>": 1311,
959
+ "<jp-font-135>": 1312,
960
+ "<jp-font-136>": 1313,
961
+ "<jp-font-137>": 1314,
962
+ "<jp-font-138>": 1315,
963
+ "<jp-font-139>": 1316,
964
+ "<jp-font-13>": 1190,
965
+ "<jp-font-140>": 1317,
966
+ "<jp-font-141>": 1318,
967
+ "<jp-font-142>": 1319,
968
+ "<jp-font-143>": 1320,
969
+ "<jp-font-144>": 1321,
970
+ "<jp-font-145>": 1322,
971
+ "<jp-font-146>": 1323,
972
+ "<jp-font-147>": 1324,
973
+ "<jp-font-148>": 1325,
974
+ "<jp-font-149>": 1326,
975
+ "<jp-font-14>": 1191,
976
+ "<jp-font-150>": 1327,
977
+ "<jp-font-151>": 1328,
978
+ "<jp-font-152>": 1329,
979
+ "<jp-font-153>": 1330,
980
+ "<jp-font-154>": 1331,
981
+ "<jp-font-155>": 1332,
982
+ "<jp-font-156>": 1333,
983
+ "<jp-font-157>": 1334,
984
+ "<jp-font-158>": 1335,
985
+ "<jp-font-159>": 1336,
986
+ "<jp-font-15>": 1192,
987
+ "<jp-font-160>": 1337,
988
+ "<jp-font-161>": 1338,
989
+ "<jp-font-162>": 1339,
990
+ "<jp-font-163>": 1340,
991
+ "<jp-font-164>": 1341,
992
+ "<jp-font-165>": 1342,
993
+ "<jp-font-166>": 1343,
994
+ "<jp-font-167>": 1344,
995
+ "<jp-font-168>": 1345,
996
+ "<jp-font-169>": 1346,
997
+ "<jp-font-16>": 1193,
998
+ "<jp-font-170>": 1347,
999
+ "<jp-font-171>": 1348,
1000
+ "<jp-font-172>": 1349,
1001
+ "<jp-font-173>": 1350,
1002
+ "<jp-font-174>": 1351,
1003
+ "<jp-font-175>": 1352,
1004
+ "<jp-font-176>": 1353,
1005
+ "<jp-font-177>": 1354,
1006
+ "<jp-font-178>": 1355,
1007
+ "<jp-font-179>": 1356,
1008
+ "<jp-font-17>": 1194,
1009
+ "<jp-font-180>": 1357,
1010
+ "<jp-font-181>": 1358,
1011
+ "<jp-font-182>": 1359,
1012
+ "<jp-font-183>": 1360,
1013
+ "<jp-font-184>": 1361,
1014
+ "<jp-font-185>": 1362,
1015
+ "<jp-font-186>": 1363,
1016
+ "<jp-font-187>": 1364,
1017
+ "<jp-font-188>": 1365,
1018
+ "<jp-font-189>": 1366,
1019
+ "<jp-font-18>": 1195,
1020
+ "<jp-font-190>": 1367,
1021
+ "<jp-font-191>": 1368,
1022
+ "<jp-font-192>": 1369,
1023
+ "<jp-font-193>": 1370,
1024
+ "<jp-font-194>": 1371,
1025
+ "<jp-font-195>": 1372,
1026
+ "<jp-font-196>": 1373,
1027
+ "<jp-font-197>": 1374,
1028
+ "<jp-font-198>": 1375,
1029
+ "<jp-font-199>": 1376,
1030
+ "<jp-font-19>": 1196,
1031
+ "<jp-font-1>": 1178,
1032
+ "<jp-font-200>": 1377,
1033
+ "<jp-font-201>": 1378,
1034
+ "<jp-font-202>": 1379,
1035
+ "<jp-font-203>": 1380,
1036
+ "<jp-font-204>": 1381,
1037
+ "<jp-font-205>": 1382,
1038
+ "<jp-font-206>": 1383,
1039
+ "<jp-font-207>": 1384,
1040
+ "<jp-font-208>": 1385,
1041
+ "<jp-font-209>": 1386,
1042
+ "<jp-font-20>": 1197,
1043
+ "<jp-font-210>": 1387,
1044
+ "<jp-font-21>": 1198,
1045
+ "<jp-font-22>": 1199,
1046
+ "<jp-font-23>": 1200,
1047
+ "<jp-font-24>": 1201,
1048
+ "<jp-font-25>": 1202,
1049
+ "<jp-font-26>": 1203,
1050
+ "<jp-font-27>": 1204,
1051
+ "<jp-font-28>": 1205,
1052
+ "<jp-font-29>": 1206,
1053
+ "<jp-font-2>": 1179,
1054
+ "<jp-font-30>": 1207,
1055
+ "<jp-font-31>": 1208,
1056
+ "<jp-font-32>": 1209,
1057
+ "<jp-font-33>": 1210,
1058
+ "<jp-font-34>": 1211,
1059
+ "<jp-font-35>": 1212,
1060
+ "<jp-font-36>": 1213,
1061
+ "<jp-font-37>": 1214,
1062
+ "<jp-font-38>": 1215,
1063
+ "<jp-font-39>": 1216,
1064
+ "<jp-font-3>": 1180,
1065
+ "<jp-font-40>": 1217,
1066
+ "<jp-font-41>": 1218,
1067
+ "<jp-font-42>": 1219,
1068
+ "<jp-font-43>": 1220,
1069
+ "<jp-font-44>": 1221,
1070
+ "<jp-font-45>": 1222,
1071
+ "<jp-font-46>": 1223,
1072
+ "<jp-font-47>": 1224,
1073
+ "<jp-font-48>": 1225,
1074
+ "<jp-font-49>": 1226,
1075
+ "<jp-font-4>": 1181,
1076
+ "<jp-font-50>": 1227,
1077
+ "<jp-font-51>": 1228,
1078
+ "<jp-font-52>": 1229,
1079
+ "<jp-font-53>": 1230,
1080
+ "<jp-font-54>": 1231,
1081
+ "<jp-font-55>": 1232,
1082
+ "<jp-font-56>": 1233,
1083
+ "<jp-font-57>": 1234,
1084
+ "<jp-font-58>": 1235,
1085
+ "<jp-font-59>": 1236,
1086
+ "<jp-font-5>": 1182,
1087
+ "<jp-font-60>": 1237,
1088
+ "<jp-font-61>": 1238,
1089
+ "<jp-font-62>": 1239,
1090
+ "<jp-font-63>": 1240,
1091
+ "<jp-font-64>": 1241,
1092
+ "<jp-font-65>": 1242,
1093
+ "<jp-font-66>": 1243,
1094
+ "<jp-font-67>": 1244,
1095
+ "<jp-font-68>": 1245,
1096
+ "<jp-font-69>": 1246,
1097
+ "<jp-font-6>": 1183,
1098
+ "<jp-font-70>": 1247,
1099
+ "<jp-font-71>": 1248,
1100
+ "<jp-font-72>": 1249,
1101
+ "<jp-font-73>": 1250,
1102
+ "<jp-font-74>": 1251,
1103
+ "<jp-font-75>": 1252,
1104
+ "<jp-font-76>": 1253,
1105
+ "<jp-font-77>": 1254,
1106
+ "<jp-font-78>": 1255,
1107
+ "<jp-font-79>": 1256,
1108
+ "<jp-font-7>": 1184,
1109
+ "<jp-font-80>": 1257,
1110
+ "<jp-font-81>": 1258,
1111
+ "<jp-font-82>": 1259,
1112
+ "<jp-font-83>": 1260,
1113
+ "<jp-font-84>": 1261,
1114
+ "<jp-font-85>": 1262,
1115
+ "<jp-font-86>": 1263,
1116
+ "<jp-font-87>": 1264,
1117
+ "<jp-font-88>": 1265,
1118
+ "<jp-font-89>": 1266,
1119
+ "<jp-font-8>": 1185,
1120
+ "<jp-font-90>": 1267,
1121
+ "<jp-font-91>": 1268,
1122
+ "<jp-font-92>": 1269,
1123
+ "<jp-font-93>": 1270,
1124
+ "<jp-font-94>": 1271,
1125
+ "<jp-font-95>": 1272,
1126
+ "<jp-font-96>": 1273,
1127
+ "<jp-font-97>": 1274,
1128
+ "<jp-font-98>": 1275,
1129
+ "<jp-font-99>": 1276,
1130
+ "<jp-font-9>": 1186,
1131
+ "<kr-font-0>": 1388,
1132
+ "<kr-font-100>": 1488,
1133
+ "<kr-font-101>": 1489,
1134
+ "<kr-font-102>": 1490,
1135
+ "<kr-font-103>": 1491,
1136
+ "<kr-font-104>": 1492,
1137
+ "<kr-font-105>": 1493,
1138
+ "<kr-font-106>": 1494,
1139
+ "<kr-font-107>": 1495,
1140
+ "<kr-font-108>": 1496,
1141
+ "<kr-font-109>": 1497,
1142
+ "<kr-font-10>": 1398,
1143
+ "<kr-font-110>": 1498,
1144
+ "<kr-font-111>": 1499,
1145
+ "<kr-font-112>": 1500,
1146
+ "<kr-font-113>": 1501,
1147
+ "<kr-font-114>": 1502,
1148
+ "<kr-font-115>": 1503,
1149
+ "<kr-font-116>": 1504,
1150
+ "<kr-font-117>": 1505,
1151
+ "<kr-font-118>": 1506,
1152
+ "<kr-font-119>": 1507,
1153
+ "<kr-font-11>": 1399,
1154
+ "<kr-font-120>": 1508,
1155
+ "<kr-font-121>": 1509,
1156
+ "<kr-font-12>": 1400,
1157
+ "<kr-font-13>": 1401,
1158
+ "<kr-font-14>": 1402,
1159
+ "<kr-font-15>": 1403,
1160
+ "<kr-font-16>": 1404,
1161
+ "<kr-font-17>": 1405,
1162
+ "<kr-font-18>": 1406,
1163
+ "<kr-font-19>": 1407,
1164
+ "<kr-font-1>": 1389,
1165
+ "<kr-font-20>": 1408,
1166
+ "<kr-font-21>": 1409,
1167
+ "<kr-font-22>": 1410,
1168
+ "<kr-font-23>": 1411,
1169
+ "<kr-font-24>": 1412,
1170
+ "<kr-font-25>": 1413,
1171
+ "<kr-font-26>": 1414,
1172
+ "<kr-font-27>": 1415,
1173
+ "<kr-font-28>": 1416,
1174
+ "<kr-font-29>": 1417,
1175
+ "<kr-font-2>": 1390,
1176
+ "<kr-font-30>": 1418,
1177
+ "<kr-font-31>": 1419,
1178
+ "<kr-font-32>": 1420,
1179
+ "<kr-font-33>": 1421,
1180
+ "<kr-font-34>": 1422,
1181
+ "<kr-font-35>": 1423,
1182
+ "<kr-font-36>": 1424,
1183
+ "<kr-font-37>": 1425,
1184
+ "<kr-font-38>": 1426,
1185
+ "<kr-font-39>": 1427,
1186
+ "<kr-font-3>": 1391,
1187
+ "<kr-font-40>": 1428,
1188
+ "<kr-font-41>": 1429,
1189
+ "<kr-font-42>": 1430,
1190
+ "<kr-font-43>": 1431,
1191
+ "<kr-font-44>": 1432,
1192
+ "<kr-font-45>": 1433,
1193
+ "<kr-font-46>": 1434,
1194
+ "<kr-font-47>": 1435,
1195
+ "<kr-font-48>": 1436,
1196
+ "<kr-font-49>": 1437,
1197
+ "<kr-font-4>": 1392,
1198
+ "<kr-font-50>": 1438,
1199
+ "<kr-font-51>": 1439,
1200
+ "<kr-font-52>": 1440,
1201
+ "<kr-font-53>": 1441,
1202
+ "<kr-font-54>": 1442,
1203
+ "<kr-font-55>": 1443,
1204
+ "<kr-font-56>": 1444,
1205
+ "<kr-font-57>": 1445,
1206
+ "<kr-font-58>": 1446,
1207
+ "<kr-font-59>": 1447,
1208
+ "<kr-font-5>": 1393,
1209
+ "<kr-font-60>": 1448,
1210
+ "<kr-font-61>": 1449,
1211
+ "<kr-font-62>": 1450,
1212
+ "<kr-font-63>": 1451,
1213
+ "<kr-font-64>": 1452,
1214
+ "<kr-font-65>": 1453,
1215
+ "<kr-font-66>": 1454,
1216
+ "<kr-font-67>": 1455,
1217
+ "<kr-font-68>": 1456,
1218
+ "<kr-font-69>": 1457,
1219
+ "<kr-font-6>": 1394,
1220
+ "<kr-font-70>": 1458,
1221
+ "<kr-font-71>": 1459,
1222
+ "<kr-font-72>": 1460,
1223
+ "<kr-font-73>": 1461,
1224
+ "<kr-font-74>": 1462,
1225
+ "<kr-font-75>": 1463,
1226
+ "<kr-font-76>": 1464,
1227
+ "<kr-font-77>": 1465,
1228
+ "<kr-font-78>": 1466,
1229
+ "<kr-font-79>": 1467,
1230
+ "<kr-font-7>": 1395,
1231
+ "<kr-font-80>": 1468,
1232
+ "<kr-font-81>": 1469,
1233
+ "<kr-font-82>": 1470,
1234
+ "<kr-font-83>": 1471,
1235
+ "<kr-font-84>": 1472,
1236
+ "<kr-font-85>": 1473,
1237
+ "<kr-font-86>": 1474,
1238
+ "<kr-font-87>": 1475,
1239
+ "<kr-font-88>": 1476,
1240
+ "<kr-font-89>": 1477,
1241
+ "<kr-font-8>": 1396,
1242
+ "<kr-font-90>": 1478,
1243
+ "<kr-font-91>": 1479,
1244
+ "<kr-font-92>": 1480,
1245
+ "<kr-font-93>": 1481,
1246
+ "<kr-font-94>": 1482,
1247
+ "<kr-font-95>": 1483,
1248
+ "<kr-font-96>": 1484,
1249
+ "<kr-font-97>": 1485,
1250
+ "<kr-font-98>": 1486,
1251
+ "<kr-font-99>": 1487,
1252
+ "<kr-font-9>": 1397
1253
+ }
tokenizer_2/special_tokens_map.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_2/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
transformer/config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "HunyuanVideo15Transformer3DModel",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "attention_head_dim": 128,
5
+ "image_embed_dim": 1152,
6
+ "in_channels": 65,
7
+ "mlp_ratio": 4.0,
8
+ "num_attention_heads": 16,
9
+ "num_layers": 54,
10
+ "num_refiner_layers": 2,
11
+ "out_channels": 32,
12
+ "patch_size": 1,
13
+ "patch_size_t": 1,
14
+ "qk_norm": "rms_norm",
15
+ "rope_axes_dim": [
16
+ 16,
17
+ 56,
18
+ 56
19
+ ],
20
+ "rope_theta": 256.0,
21
+ "target_size": 640,
22
+ "task_type": "i2v",
23
+ "text_embed_2_dim": 1472,
24
+ "text_embed_dim": 3584,
25
+ "use_meanflow": false
26
+ }
transformer/diffusion_pytorch_model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bae084552cbd9e7e8dafabb5ee70eb4fa79f9736a53d7c18e68d5e387d46444
3
+ size 9993142960
transformer/diffusion_pytorch_model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b037c84e046bafadb34d89082181f3ddfb7766ecaf9b5f6519d8caf3edf6ab6
3
+ size 9986577616
transformer/diffusion_pytorch_model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0245cb73b5ad1a4c7f82ef80d9fcd0702097fd7364fab93dbd6c66bbb4171aa4
3
+ size 9986627192
transformer/diffusion_pytorch_model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b255fcd8b5313bf71b29951c9f48b0af59dede83b694094946d68a2c5f4b9bf
3
+ size 3340295200
transformer/diffusion_pytorch_model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
vae/config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "AutoencoderKLHunyuanVideo15",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "block_out_channels": [
5
+ 128,
6
+ 256,
7
+ 512,
8
+ 1024,
9
+ 1024
10
+ ],
11
+ "downsample_match_channel": true,
12
+ "in_channels": 3,
13
+ "latent_channels": 32,
14
+ "layers_per_block": 2,
15
+ "out_channels": 3,
16
+ "scaling_factor": 1.03682,
17
+ "spatial_compression_ratio": 16,
18
+ "temporal_compression_ratio": 4,
19
+ "upsample_match_channel": true
20
+ }
vae/diffusion_pytorch_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d54daf92cdecc2fb4e54bc681443dcd671eaa05cc912aa32cd28f8e7cb8d101
3
+ size 5042562948