Zery commited on
Commit
7b78303
·
verified ·
1 Parent(s): 5273413

Upload without hidden files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ datasets: boot_osworld_vscode
3
+ library_name: transformers
4
+ tags:
5
+ - generated_from_trainer
6
+ - R1-V
7
+ licence: license
8
+ ---
9
+
10
+ # Model Card for None
11
+
12
+ This model is a fine-tuned version of [None](https://huggingface.co/None) on the [boot_osworld_vscode](https://huggingface.co/datasets/boot_osworld_vscode) dataset.
13
+ It has been trained using [TRL](https://github.com/huggingface/trl).
14
+
15
+ ## Quick start
16
+
17
+ ```python
18
+ from transformers import pipeline
19
+
20
+ question = "If you had a time machine, but could only go to the past or the future once and never return, which would you choose and why?"
21
+ generator = pipeline("text-generation", model="None", device="cuda")
22
+ output = generator([{"role": "user", "content": question}], max_new_tokens=128, return_full_text=False)[0]
23
+ print(output["generated_text"])
24
+ ```
25
+
26
+ ## Training procedure
27
+
28
+
29
+
30
+
31
+ This model was trained with SFT.
32
+
33
+ ### Framework versions
34
+
35
+ - TRL: 0.17.0
36
+ - Transformers: 4.51.3
37
+ - Pytorch: 2.6.0
38
+ - Datasets: 3.5.1
39
+ - Tokenizers: 0.21.1
40
+
41
+ ## Citations
42
+
43
+
44
+
45
+ Cite TRL as:
46
+
47
+ ```bibtex
48
+ @misc{vonwerra2022trl,
49
+ title = {{TRL: Transformer Reinforcement Learning}},
50
+ author = {Leandro von Werra and Younes Belkada and Lewis Tunstall and Edward Beeching and Tristan Thrush and Nathan Lambert and Shengyi Huang and Kashif Rasul and Quentin Gallou{\'e}dec},
51
+ year = 2020,
52
+ journal = {GitHub repository},
53
+ publisher = {GitHub},
54
+ howpublished = {\url{https://github.com/huggingface/trl}}
55
+ }
56
+ ```
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
chat_template.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
3
+ }
config.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2_5_VLForConditionalGeneration"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "eos_token_id": 151645,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 5120,
9
+ "image_token_id": 151655,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 27648,
12
+ "max_position_embeddings": 128000,
13
+ "max_window_layers": 64,
14
+ "model_type": "qwen2_5_vl",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 64,
17
+ "num_key_value_heads": 8,
18
+ "pad_token_id": 151643,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": {
21
+ "mrope_section": [
22
+ 16,
23
+ 24,
24
+ 24
25
+ ],
26
+ "rope_type": "default",
27
+ "type": "default"
28
+ },
29
+ "rope_theta": 1000000.0,
30
+ "sliding_window": 32768,
31
+ "tie_word_embeddings": false,
32
+ "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.51.3",
34
+ "use_cache": true,
35
+ "use_sliding_window": false,
36
+ "video_token_id": 151656,
37
+ "vision_config": {
38
+ "depth": 32,
39
+ "fullatt_block_indexes": [
40
+ 7,
41
+ 15,
42
+ 23,
43
+ 31
44
+ ],
45
+ "hidden_act": "silu",
46
+ "hidden_size": 1280,
47
+ "in_channels": 3,
48
+ "in_chans": 3,
49
+ "intermediate_size": 3456,
50
+ "model_type": "qwen2_5_vl",
51
+ "num_heads": 16,
52
+ "out_hidden_size": 5120,
53
+ "patch_size": 14,
54
+ "spatial_merge_size": 2,
55
+ "spatial_patch_size": 14,
56
+ "temporal_patch_size": 2,
57
+ "tokens_per_second": 2,
58
+ "torch_dtype": "bfloat16",
59
+ "window_size": 112
60
+ },
61
+ "vision_end_token_id": 151653,
62
+ "vision_start_token_id": 151652,
63
+ "vision_token_id": 151654,
64
+ "vocab_size": 152064
65
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "attn_implementation": "flash_attention_2",
3
+ "bos_token_id": 151643,
4
+ "do_sample": true,
5
+ "eos_token_id": [
6
+ 151645,
7
+ 151643
8
+ ],
9
+ "pad_token_id": 151643,
10
+ "repetition_penalty": 1.05,
11
+ "temperature": 1e-06,
12
+ "transformers_version": "4.51.3",
13
+ "use_cache": false
14
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa3c5530a78787633441e70d9d823f203ae4dfbe24af1d8178376dbf55c0ec92
3
+ size 4958700928
model-00002-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4465fdc5a5864493dbc1d48b358710339041afb5120384e4700b18efb7d0923
3
+ size 4928488272
model-00003-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb5eff31953a2988a2ea2fd2a585ac53db5dc9d66151f0dd59b8d1a846856836
3
+ size 4876059368
model-00004-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a83110daca67bf50de4fa12ea14c61b49110aa5769eb6d6c5f82583ab3da51a
3
+ size 4876059416
model-00005-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1819e41d0e4823525d961f4a83826711eec903df0921d43fc30d3db2876d66f0
3
+ size 4876059416
model-00006-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98931f3dc22162514662b32c54e3026fe4c576debf7996faf61396dbf8b1a125
3
+ size 4876059416
model-00007-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:044f0bddf632aa6ffdb195e8a162d20b48018dd407e97a0ea65d29b5ff62783e
3
+ size 4876059416
model-00008-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:699e722df8bf352ba125a975afbfc82a5c56f9c359df5b7c7118703c2e9a6f47
3
+ size 4876059416
model-00009-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8b3e93677c40a89d7587ebcb9b307b20d953cccdd3e4ddce8fed0acc4e1e554
3
+ size 4876059416
model-00010-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d3b37a098a1954a8c0a481baa789ecacc2fadf192994365f88b81fc6481c9ce
3
+ size 4876059416
model-00011-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d56f2cb516152302ae143ca91f278eb7001c50a47d46c037acd05c39e9260df
3
+ size 4876059416
model-00012-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8e8a061d685574cc35f68a365512c283007944a23e72d0636b97d6571461aaf
3
+ size 4876059416
model-00013-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9de3febd28d4a7ef8b370b4cfed8f2d924f4de308d69739ef9f2528dd3319bc0
3
+ size 4876059416
model-00014-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fb43cc12853b36230573c5622fee7a448e9c8f9d0d0db4422bb52b11e51e283
3
+ size 3381725320
model.safetensors.index.json ADDED
@@ -0,0 +1,1168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 66905436672
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00014-of-00014.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00014.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00014.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00014.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00003-of-00014.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00003-of-00014.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00004-of-00014.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00004-of-00014.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00004-of-00014.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00004-of-00014.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00004-of-00014.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00005-of-00014.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00005-of-00014.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00005-of-00014.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00002-of-00014.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00005-of-00014.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00005-of-00014.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00006-of-00014.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00006-of-00014.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00006-of-00014.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00006-of-00014.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00006-of-00014.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00007-of-00014.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
260
+ "model.layers.28.input_layernorm.weight": "model-00007-of-00014.safetensors",
261
+ "model.layers.28.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
262
+ "model.layers.28.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
263
+ "model.layers.28.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
264
+ "model.layers.28.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
265
+ "model.layers.28.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
266
+ "model.layers.28.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
267
+ "model.layers.28.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
268
+ "model.layers.28.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
269
+ "model.layers.28.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
270
+ "model.layers.28.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
271
+ "model.layers.28.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
272
+ "model.layers.29.input_layernorm.weight": "model-00007-of-00014.safetensors",
273
+ "model.layers.29.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
274
+ "model.layers.29.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
275
+ "model.layers.29.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
276
+ "model.layers.29.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
277
+ "model.layers.29.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
278
+ "model.layers.29.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
279
+ "model.layers.29.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
280
+ "model.layers.29.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
281
+ "model.layers.29.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
282
+ "model.layers.29.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
283
+ "model.layers.29.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
284
+ "model.layers.3.input_layernorm.weight": "model-00002-of-00014.safetensors",
285
+ "model.layers.3.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
286
+ "model.layers.3.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
287
+ "model.layers.3.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
288
+ "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
289
+ "model.layers.3.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
290
+ "model.layers.3.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
291
+ "model.layers.3.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
292
+ "model.layers.3.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
293
+ "model.layers.3.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
294
+ "model.layers.3.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
295
+ "model.layers.3.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
296
+ "model.layers.30.input_layernorm.weight": "model-00007-of-00014.safetensors",
297
+ "model.layers.30.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
298
+ "model.layers.30.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
299
+ "model.layers.30.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
300
+ "model.layers.30.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
301
+ "model.layers.30.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
302
+ "model.layers.30.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
303
+ "model.layers.30.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
304
+ "model.layers.30.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
305
+ "model.layers.30.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
306
+ "model.layers.30.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
307
+ "model.layers.30.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
308
+ "model.layers.31.input_layernorm.weight": "model-00007-of-00014.safetensors",
309
+ "model.layers.31.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
310
+ "model.layers.31.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
311
+ "model.layers.31.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
312
+ "model.layers.31.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
313
+ "model.layers.31.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
314
+ "model.layers.31.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
315
+ "model.layers.31.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
316
+ "model.layers.31.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
317
+ "model.layers.31.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
318
+ "model.layers.31.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
319
+ "model.layers.31.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
320
+ "model.layers.32.input_layernorm.weight": "model-00008-of-00014.safetensors",
321
+ "model.layers.32.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
322
+ "model.layers.32.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
323
+ "model.layers.32.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
324
+ "model.layers.32.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
325
+ "model.layers.32.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
326
+ "model.layers.32.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
327
+ "model.layers.32.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
328
+ "model.layers.32.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
329
+ "model.layers.32.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
330
+ "model.layers.32.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
331
+ "model.layers.32.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
332
+ "model.layers.33.input_layernorm.weight": "model-00008-of-00014.safetensors",
333
+ "model.layers.33.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
334
+ "model.layers.33.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
335
+ "model.layers.33.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
336
+ "model.layers.33.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
337
+ "model.layers.33.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
338
+ "model.layers.33.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
339
+ "model.layers.33.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
340
+ "model.layers.33.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
341
+ "model.layers.33.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
342
+ "model.layers.33.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
343
+ "model.layers.33.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
344
+ "model.layers.34.input_layernorm.weight": "model-00008-of-00014.safetensors",
345
+ "model.layers.34.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
346
+ "model.layers.34.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
347
+ "model.layers.34.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
348
+ "model.layers.34.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
349
+ "model.layers.34.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
350
+ "model.layers.34.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
351
+ "model.layers.34.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
352
+ "model.layers.34.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
353
+ "model.layers.34.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
354
+ "model.layers.34.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
355
+ "model.layers.34.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
356
+ "model.layers.35.input_layernorm.weight": "model-00008-of-00014.safetensors",
357
+ "model.layers.35.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
358
+ "model.layers.35.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
359
+ "model.layers.35.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
360
+ "model.layers.35.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
361
+ "model.layers.35.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
362
+ "model.layers.35.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
363
+ "model.layers.35.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
364
+ "model.layers.35.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
365
+ "model.layers.35.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
366
+ "model.layers.35.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
367
+ "model.layers.35.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
368
+ "model.layers.36.input_layernorm.weight": "model-00008-of-00014.safetensors",
369
+ "model.layers.36.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
370
+ "model.layers.36.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
371
+ "model.layers.36.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
372
+ "model.layers.36.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
373
+ "model.layers.36.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
374
+ "model.layers.36.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
375
+ "model.layers.36.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
376
+ "model.layers.36.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
377
+ "model.layers.36.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
378
+ "model.layers.36.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
379
+ "model.layers.36.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
380
+ "model.layers.37.input_layernorm.weight": "model-00009-of-00014.safetensors",
381
+ "model.layers.37.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
382
+ "model.layers.37.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
383
+ "model.layers.37.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
384
+ "model.layers.37.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
385
+ "model.layers.37.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
386
+ "model.layers.37.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
387
+ "model.layers.37.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
388
+ "model.layers.37.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
389
+ "model.layers.37.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
390
+ "model.layers.37.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
391
+ "model.layers.37.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
392
+ "model.layers.38.input_layernorm.weight": "model-00009-of-00014.safetensors",
393
+ "model.layers.38.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
394
+ "model.layers.38.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
395
+ "model.layers.38.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
396
+ "model.layers.38.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
397
+ "model.layers.38.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
398
+ "model.layers.38.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
399
+ "model.layers.38.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
400
+ "model.layers.38.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
401
+ "model.layers.38.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
402
+ "model.layers.38.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
403
+ "model.layers.38.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
404
+ "model.layers.39.input_layernorm.weight": "model-00009-of-00014.safetensors",
405
+ "model.layers.39.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
406
+ "model.layers.39.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
407
+ "model.layers.39.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
408
+ "model.layers.39.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
409
+ "model.layers.39.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
410
+ "model.layers.39.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
411
+ "model.layers.39.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
412
+ "model.layers.39.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
413
+ "model.layers.39.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
414
+ "model.layers.39.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
415
+ "model.layers.39.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
416
+ "model.layers.4.input_layernorm.weight": "model-00002-of-00014.safetensors",
417
+ "model.layers.4.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
418
+ "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
419
+ "model.layers.4.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
420
+ "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
421
+ "model.layers.4.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
422
+ "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
423
+ "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
424
+ "model.layers.4.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
425
+ "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
426
+ "model.layers.4.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
427
+ "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
428
+ "model.layers.40.input_layernorm.weight": "model-00009-of-00014.safetensors",
429
+ "model.layers.40.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
430
+ "model.layers.40.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
431
+ "model.layers.40.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
432
+ "model.layers.40.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
433
+ "model.layers.40.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
434
+ "model.layers.40.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
435
+ "model.layers.40.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
436
+ "model.layers.40.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
437
+ "model.layers.40.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
438
+ "model.layers.40.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
439
+ "model.layers.40.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
440
+ "model.layers.41.input_layernorm.weight": "model-00009-of-00014.safetensors",
441
+ "model.layers.41.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
442
+ "model.layers.41.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
443
+ "model.layers.41.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
444
+ "model.layers.41.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
445
+ "model.layers.41.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
446
+ "model.layers.41.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
447
+ "model.layers.41.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
448
+ "model.layers.41.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
449
+ "model.layers.41.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
450
+ "model.layers.41.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
451
+ "model.layers.41.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
452
+ "model.layers.42.input_layernorm.weight": "model-00010-of-00014.safetensors",
453
+ "model.layers.42.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
454
+ "model.layers.42.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
455
+ "model.layers.42.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
456
+ "model.layers.42.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
457
+ "model.layers.42.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
458
+ "model.layers.42.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
459
+ "model.layers.42.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
460
+ "model.layers.42.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
461
+ "model.layers.42.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
462
+ "model.layers.42.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
463
+ "model.layers.42.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
464
+ "model.layers.43.input_layernorm.weight": "model-00010-of-00014.safetensors",
465
+ "model.layers.43.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
466
+ "model.layers.43.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
467
+ "model.layers.43.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
468
+ "model.layers.43.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
469
+ "model.layers.43.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
470
+ "model.layers.43.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
471
+ "model.layers.43.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
472
+ "model.layers.43.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
473
+ "model.layers.43.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
474
+ "model.layers.43.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
475
+ "model.layers.43.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
476
+ "model.layers.44.input_layernorm.weight": "model-00010-of-00014.safetensors",
477
+ "model.layers.44.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
478
+ "model.layers.44.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
479
+ "model.layers.44.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
480
+ "model.layers.44.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
481
+ "model.layers.44.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
482
+ "model.layers.44.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
483
+ "model.layers.44.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
484
+ "model.layers.44.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
485
+ "model.layers.44.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
486
+ "model.layers.44.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
487
+ "model.layers.44.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
488
+ "model.layers.45.input_layernorm.weight": "model-00010-of-00014.safetensors",
489
+ "model.layers.45.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
490
+ "model.layers.45.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
491
+ "model.layers.45.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
492
+ "model.layers.45.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
493
+ "model.layers.45.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
494
+ "model.layers.45.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
495
+ "model.layers.45.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
496
+ "model.layers.45.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
497
+ "model.layers.45.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
498
+ "model.layers.45.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
499
+ "model.layers.45.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
500
+ "model.layers.46.input_layernorm.weight": "model-00010-of-00014.safetensors",
501
+ "model.layers.46.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
502
+ "model.layers.46.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
503
+ "model.layers.46.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
504
+ "model.layers.46.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
505
+ "model.layers.46.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
506
+ "model.layers.46.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
507
+ "model.layers.46.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
508
+ "model.layers.46.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
509
+ "model.layers.46.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
510
+ "model.layers.46.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
511
+ "model.layers.46.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
512
+ "model.layers.47.input_layernorm.weight": "model-00011-of-00014.safetensors",
513
+ "model.layers.47.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
514
+ "model.layers.47.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
515
+ "model.layers.47.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
516
+ "model.layers.47.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
517
+ "model.layers.47.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
518
+ "model.layers.47.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
519
+ "model.layers.47.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
520
+ "model.layers.47.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
521
+ "model.layers.47.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
522
+ "model.layers.47.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
523
+ "model.layers.47.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
524
+ "model.layers.48.input_layernorm.weight": "model-00011-of-00014.safetensors",
525
+ "model.layers.48.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
526
+ "model.layers.48.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
527
+ "model.layers.48.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
528
+ "model.layers.48.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
529
+ "model.layers.48.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
530
+ "model.layers.48.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
531
+ "model.layers.48.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
532
+ "model.layers.48.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
533
+ "model.layers.48.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
534
+ "model.layers.48.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
535
+ "model.layers.48.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
536
+ "model.layers.49.input_layernorm.weight": "model-00011-of-00014.safetensors",
537
+ "model.layers.49.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
538
+ "model.layers.49.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
539
+ "model.layers.49.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
540
+ "model.layers.49.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
541
+ "model.layers.49.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
542
+ "model.layers.49.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
543
+ "model.layers.49.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
544
+ "model.layers.49.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
545
+ "model.layers.49.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
546
+ "model.layers.49.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
547
+ "model.layers.49.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
548
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00014.safetensors",
549
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
550
+ "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
551
+ "model.layers.5.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
552
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
553
+ "model.layers.5.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
554
+ "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
555
+ "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
556
+ "model.layers.5.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
557
+ "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
558
+ "model.layers.5.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
559
+ "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
560
+ "model.layers.50.input_layernorm.weight": "model-00011-of-00014.safetensors",
561
+ "model.layers.50.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
562
+ "model.layers.50.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
563
+ "model.layers.50.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
564
+ "model.layers.50.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
565
+ "model.layers.50.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
566
+ "model.layers.50.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
567
+ "model.layers.50.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
568
+ "model.layers.50.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
569
+ "model.layers.50.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
570
+ "model.layers.50.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
571
+ "model.layers.50.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
572
+ "model.layers.51.input_layernorm.weight": "model-00011-of-00014.safetensors",
573
+ "model.layers.51.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
574
+ "model.layers.51.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
575
+ "model.layers.51.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
576
+ "model.layers.51.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
577
+ "model.layers.51.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
578
+ "model.layers.51.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
579
+ "model.layers.51.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
580
+ "model.layers.51.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
581
+ "model.layers.51.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
582
+ "model.layers.51.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
583
+ "model.layers.51.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
584
+ "model.layers.52.input_layernorm.weight": "model-00012-of-00014.safetensors",
585
+ "model.layers.52.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
586
+ "model.layers.52.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
587
+ "model.layers.52.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
588
+ "model.layers.52.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
589
+ "model.layers.52.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
590
+ "model.layers.52.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
591
+ "model.layers.52.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
592
+ "model.layers.52.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
593
+ "model.layers.52.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
594
+ "model.layers.52.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
595
+ "model.layers.52.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
596
+ "model.layers.53.input_layernorm.weight": "model-00012-of-00014.safetensors",
597
+ "model.layers.53.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
598
+ "model.layers.53.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
599
+ "model.layers.53.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
600
+ "model.layers.53.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
601
+ "model.layers.53.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
602
+ "model.layers.53.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
603
+ "model.layers.53.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
604
+ "model.layers.53.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
605
+ "model.layers.53.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
606
+ "model.layers.53.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
607
+ "model.layers.53.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
608
+ "model.layers.54.input_layernorm.weight": "model-00012-of-00014.safetensors",
609
+ "model.layers.54.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
610
+ "model.layers.54.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
611
+ "model.layers.54.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
612
+ "model.layers.54.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
613
+ "model.layers.54.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
614
+ "model.layers.54.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
615
+ "model.layers.54.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
616
+ "model.layers.54.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
617
+ "model.layers.54.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
618
+ "model.layers.54.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
619
+ "model.layers.54.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
620
+ "model.layers.55.input_layernorm.weight": "model-00012-of-00014.safetensors",
621
+ "model.layers.55.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
622
+ "model.layers.55.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
623
+ "model.layers.55.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
624
+ "model.layers.55.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
625
+ "model.layers.55.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
626
+ "model.layers.55.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
627
+ "model.layers.55.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
628
+ "model.layers.55.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
629
+ "model.layers.55.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
630
+ "model.layers.55.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
631
+ "model.layers.55.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
632
+ "model.layers.56.input_layernorm.weight": "model-00012-of-00014.safetensors",
633
+ "model.layers.56.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
634
+ "model.layers.56.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
635
+ "model.layers.56.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
636
+ "model.layers.56.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
637
+ "model.layers.56.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
638
+ "model.layers.56.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
639
+ "model.layers.56.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
640
+ "model.layers.56.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
641
+ "model.layers.56.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
642
+ "model.layers.56.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
643
+ "model.layers.56.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
644
+ "model.layers.57.input_layernorm.weight": "model-00013-of-00014.safetensors",
645
+ "model.layers.57.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
646
+ "model.layers.57.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
647
+ "model.layers.57.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
648
+ "model.layers.57.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
649
+ "model.layers.57.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
650
+ "model.layers.57.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
651
+ "model.layers.57.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
652
+ "model.layers.57.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
653
+ "model.layers.57.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
654
+ "model.layers.57.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
655
+ "model.layers.57.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
656
+ "model.layers.58.input_layernorm.weight": "model-00013-of-00014.safetensors",
657
+ "model.layers.58.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
658
+ "model.layers.58.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
659
+ "model.layers.58.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
660
+ "model.layers.58.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
661
+ "model.layers.58.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
662
+ "model.layers.58.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
663
+ "model.layers.58.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
664
+ "model.layers.58.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
665
+ "model.layers.58.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
666
+ "model.layers.58.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
667
+ "model.layers.58.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
668
+ "model.layers.59.input_layernorm.weight": "model-00013-of-00014.safetensors",
669
+ "model.layers.59.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
670
+ "model.layers.59.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
671
+ "model.layers.59.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
672
+ "model.layers.59.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
673
+ "model.layers.59.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
674
+ "model.layers.59.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
675
+ "model.layers.59.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
676
+ "model.layers.59.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
677
+ "model.layers.59.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
678
+ "model.layers.59.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
679
+ "model.layers.59.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
680
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00014.safetensors",
681
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
682
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
683
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
684
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
685
+ "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
686
+ "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
687
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
688
+ "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
689
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
690
+ "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
691
+ "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
692
+ "model.layers.60.input_layernorm.weight": "model-00013-of-00014.safetensors",
693
+ "model.layers.60.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
694
+ "model.layers.60.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
695
+ "model.layers.60.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
696
+ "model.layers.60.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
697
+ "model.layers.60.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
698
+ "model.layers.60.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
699
+ "model.layers.60.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
700
+ "model.layers.60.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
701
+ "model.layers.60.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
702
+ "model.layers.60.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
703
+ "model.layers.60.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
704
+ "model.layers.61.input_layernorm.weight": "model-00013-of-00014.safetensors",
705
+ "model.layers.61.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
706
+ "model.layers.61.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
707
+ "model.layers.61.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
708
+ "model.layers.61.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
709
+ "model.layers.61.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
710
+ "model.layers.61.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
711
+ "model.layers.61.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
712
+ "model.layers.61.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
713
+ "model.layers.61.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
714
+ "model.layers.61.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
715
+ "model.layers.61.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
716
+ "model.layers.62.input_layernorm.weight": "model-00014-of-00014.safetensors",
717
+ "model.layers.62.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
718
+ "model.layers.62.mlp.gate_proj.weight": "model-00014-of-00014.safetensors",
719
+ "model.layers.62.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
720
+ "model.layers.62.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
721
+ "model.layers.62.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
722
+ "model.layers.62.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
723
+ "model.layers.62.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
724
+ "model.layers.62.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
725
+ "model.layers.62.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
726
+ "model.layers.62.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
727
+ "model.layers.62.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
728
+ "model.layers.63.input_layernorm.weight": "model-00014-of-00014.safetensors",
729
+ "model.layers.63.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
730
+ "model.layers.63.mlp.gate_proj.weight": "model-00014-of-00014.safetensors",
731
+ "model.layers.63.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
732
+ "model.layers.63.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
733
+ "model.layers.63.self_attn.k_proj.bias": "model-00014-of-00014.safetensors",
734
+ "model.layers.63.self_attn.k_proj.weight": "model-00014-of-00014.safetensors",
735
+ "model.layers.63.self_attn.o_proj.weight": "model-00014-of-00014.safetensors",
736
+ "model.layers.63.self_attn.q_proj.bias": "model-00014-of-00014.safetensors",
737
+ "model.layers.63.self_attn.q_proj.weight": "model-00014-of-00014.safetensors",
738
+ "model.layers.63.self_attn.v_proj.bias": "model-00014-of-00014.safetensors",
739
+ "model.layers.63.self_attn.v_proj.weight": "model-00014-of-00014.safetensors",
740
+ "model.layers.7.input_layernorm.weight": "model-00003-of-00014.safetensors",
741
+ "model.layers.7.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
742
+ "model.layers.7.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
743
+ "model.layers.7.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
744
+ "model.layers.7.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
745
+ "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
746
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
747
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
748
+ "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
749
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
750
+ "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
751
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
752
+ "model.layers.8.input_layernorm.weight": "model-00003-of-00014.safetensors",
753
+ "model.layers.8.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
754
+ "model.layers.8.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
755
+ "model.layers.8.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
756
+ "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
757
+ "model.layers.8.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
758
+ "model.layers.8.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
759
+ "model.layers.8.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
760
+ "model.layers.8.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
761
+ "model.layers.8.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
762
+ "model.layers.8.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
763
+ "model.layers.8.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
764
+ "model.layers.9.input_layernorm.weight": "model-00003-of-00014.safetensors",
765
+ "model.layers.9.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
766
+ "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
767
+ "model.layers.9.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
768
+ "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
769
+ "model.layers.9.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
770
+ "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
771
+ "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
772
+ "model.layers.9.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
773
+ "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
774
+ "model.layers.9.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
775
+ "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
776
+ "model.norm.weight": "model-00014-of-00014.safetensors",
777
+ "visual.blocks.0.attn.proj.bias": "model-00001-of-00014.safetensors",
778
+ "visual.blocks.0.attn.proj.weight": "model-00001-of-00014.safetensors",
779
+ "visual.blocks.0.attn.qkv.bias": "model-00001-of-00014.safetensors",
780
+ "visual.blocks.0.attn.qkv.weight": "model-00001-of-00014.safetensors",
781
+ "visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
782
+ "visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
783
+ "visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
784
+ "visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
785
+ "visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
786
+ "visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
787
+ "visual.blocks.0.norm1.weight": "model-00001-of-00014.safetensors",
788
+ "visual.blocks.0.norm2.weight": "model-00001-of-00014.safetensors",
789
+ "visual.blocks.1.attn.proj.bias": "model-00001-of-00014.safetensors",
790
+ "visual.blocks.1.attn.proj.weight": "model-00001-of-00014.safetensors",
791
+ "visual.blocks.1.attn.qkv.bias": "model-00001-of-00014.safetensors",
792
+ "visual.blocks.1.attn.qkv.weight": "model-00001-of-00014.safetensors",
793
+ "visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
794
+ "visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
795
+ "visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
796
+ "visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
797
+ "visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
798
+ "visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
799
+ "visual.blocks.1.norm1.weight": "model-00001-of-00014.safetensors",
800
+ "visual.blocks.1.norm2.weight": "model-00001-of-00014.safetensors",
801
+ "visual.blocks.10.attn.proj.bias": "model-00001-of-00014.safetensors",
802
+ "visual.blocks.10.attn.proj.weight": "model-00001-of-00014.safetensors",
803
+ "visual.blocks.10.attn.qkv.bias": "model-00001-of-00014.safetensors",
804
+ "visual.blocks.10.attn.qkv.weight": "model-00001-of-00014.safetensors",
805
+ "visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
806
+ "visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
807
+ "visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
808
+ "visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
809
+ "visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
810
+ "visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
811
+ "visual.blocks.10.norm1.weight": "model-00001-of-00014.safetensors",
812
+ "visual.blocks.10.norm2.weight": "model-00001-of-00014.safetensors",
813
+ "visual.blocks.11.attn.proj.bias": "model-00001-of-00014.safetensors",
814
+ "visual.blocks.11.attn.proj.weight": "model-00001-of-00014.safetensors",
815
+ "visual.blocks.11.attn.qkv.bias": "model-00001-of-00014.safetensors",
816
+ "visual.blocks.11.attn.qkv.weight": "model-00001-of-00014.safetensors",
817
+ "visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
818
+ "visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
819
+ "visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
820
+ "visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
821
+ "visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
822
+ "visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
823
+ "visual.blocks.11.norm1.weight": "model-00001-of-00014.safetensors",
824
+ "visual.blocks.11.norm2.weight": "model-00001-of-00014.safetensors",
825
+ "visual.blocks.12.attn.proj.bias": "model-00001-of-00014.safetensors",
826
+ "visual.blocks.12.attn.proj.weight": "model-00001-of-00014.safetensors",
827
+ "visual.blocks.12.attn.qkv.bias": "model-00001-of-00014.safetensors",
828
+ "visual.blocks.12.attn.qkv.weight": "model-00001-of-00014.safetensors",
829
+ "visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
830
+ "visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
831
+ "visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
832
+ "visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
833
+ "visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
834
+ "visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
835
+ "visual.blocks.12.norm1.weight": "model-00001-of-00014.safetensors",
836
+ "visual.blocks.12.norm2.weight": "model-00001-of-00014.safetensors",
837
+ "visual.blocks.13.attn.proj.bias": "model-00001-of-00014.safetensors",
838
+ "visual.blocks.13.attn.proj.weight": "model-00001-of-00014.safetensors",
839
+ "visual.blocks.13.attn.qkv.bias": "model-00001-of-00014.safetensors",
840
+ "visual.blocks.13.attn.qkv.weight": "model-00001-of-00014.safetensors",
841
+ "visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
842
+ "visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
843
+ "visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
844
+ "visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
845
+ "visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
846
+ "visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
847
+ "visual.blocks.13.norm1.weight": "model-00001-of-00014.safetensors",
848
+ "visual.blocks.13.norm2.weight": "model-00001-of-00014.safetensors",
849
+ "visual.blocks.14.attn.proj.bias": "model-00001-of-00014.safetensors",
850
+ "visual.blocks.14.attn.proj.weight": "model-00001-of-00014.safetensors",
851
+ "visual.blocks.14.attn.qkv.bias": "model-00001-of-00014.safetensors",
852
+ "visual.blocks.14.attn.qkv.weight": "model-00001-of-00014.safetensors",
853
+ "visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
854
+ "visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
855
+ "visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
856
+ "visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
857
+ "visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
858
+ "visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
859
+ "visual.blocks.14.norm1.weight": "model-00001-of-00014.safetensors",
860
+ "visual.blocks.14.norm2.weight": "model-00001-of-00014.safetensors",
861
+ "visual.blocks.15.attn.proj.bias": "model-00001-of-00014.safetensors",
862
+ "visual.blocks.15.attn.proj.weight": "model-00001-of-00014.safetensors",
863
+ "visual.blocks.15.attn.qkv.bias": "model-00001-of-00014.safetensors",
864
+ "visual.blocks.15.attn.qkv.weight": "model-00001-of-00014.safetensors",
865
+ "visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
866
+ "visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
867
+ "visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
868
+ "visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
869
+ "visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
870
+ "visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
871
+ "visual.blocks.15.norm1.weight": "model-00001-of-00014.safetensors",
872
+ "visual.blocks.15.norm2.weight": "model-00001-of-00014.safetensors",
873
+ "visual.blocks.16.attn.proj.bias": "model-00001-of-00014.safetensors",
874
+ "visual.blocks.16.attn.proj.weight": "model-00001-of-00014.safetensors",
875
+ "visual.blocks.16.attn.qkv.bias": "model-00001-of-00014.safetensors",
876
+ "visual.blocks.16.attn.qkv.weight": "model-00001-of-00014.safetensors",
877
+ "visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
878
+ "visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
879
+ "visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
880
+ "visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
881
+ "visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
882
+ "visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
883
+ "visual.blocks.16.norm1.weight": "model-00001-of-00014.safetensors",
884
+ "visual.blocks.16.norm2.weight": "model-00001-of-00014.safetensors",
885
+ "visual.blocks.17.attn.proj.bias": "model-00001-of-00014.safetensors",
886
+ "visual.blocks.17.attn.proj.weight": "model-00001-of-00014.safetensors",
887
+ "visual.blocks.17.attn.qkv.bias": "model-00001-of-00014.safetensors",
888
+ "visual.blocks.17.attn.qkv.weight": "model-00001-of-00014.safetensors",
889
+ "visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
890
+ "visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
891
+ "visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
892
+ "visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
893
+ "visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
894
+ "visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
895
+ "visual.blocks.17.norm1.weight": "model-00001-of-00014.safetensors",
896
+ "visual.blocks.17.norm2.weight": "model-00001-of-00014.safetensors",
897
+ "visual.blocks.18.attn.proj.bias": "model-00001-of-00014.safetensors",
898
+ "visual.blocks.18.attn.proj.weight": "model-00001-of-00014.safetensors",
899
+ "visual.blocks.18.attn.qkv.bias": "model-00001-of-00014.safetensors",
900
+ "visual.blocks.18.attn.qkv.weight": "model-00001-of-00014.safetensors",
901
+ "visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
902
+ "visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
903
+ "visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
904
+ "visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
905
+ "visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
906
+ "visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
907
+ "visual.blocks.18.norm1.weight": "model-00001-of-00014.safetensors",
908
+ "visual.blocks.18.norm2.weight": "model-00001-of-00014.safetensors",
909
+ "visual.blocks.19.attn.proj.bias": "model-00001-of-00014.safetensors",
910
+ "visual.blocks.19.attn.proj.weight": "model-00001-of-00014.safetensors",
911
+ "visual.blocks.19.attn.qkv.bias": "model-00001-of-00014.safetensors",
912
+ "visual.blocks.19.attn.qkv.weight": "model-00001-of-00014.safetensors",
913
+ "visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
914
+ "visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
915
+ "visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
916
+ "visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
917
+ "visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
918
+ "visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
919
+ "visual.blocks.19.norm1.weight": "model-00001-of-00014.safetensors",
920
+ "visual.blocks.19.norm2.weight": "model-00001-of-00014.safetensors",
921
+ "visual.blocks.2.attn.proj.bias": "model-00001-of-00014.safetensors",
922
+ "visual.blocks.2.attn.proj.weight": "model-00001-of-00014.safetensors",
923
+ "visual.blocks.2.attn.qkv.bias": "model-00001-of-00014.safetensors",
924
+ "visual.blocks.2.attn.qkv.weight": "model-00001-of-00014.safetensors",
925
+ "visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
926
+ "visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
927
+ "visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
928
+ "visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
929
+ "visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
930
+ "visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
931
+ "visual.blocks.2.norm1.weight": "model-00001-of-00014.safetensors",
932
+ "visual.blocks.2.norm2.weight": "model-00001-of-00014.safetensors",
933
+ "visual.blocks.20.attn.proj.bias": "model-00001-of-00014.safetensors",
934
+ "visual.blocks.20.attn.proj.weight": "model-00001-of-00014.safetensors",
935
+ "visual.blocks.20.attn.qkv.bias": "model-00001-of-00014.safetensors",
936
+ "visual.blocks.20.attn.qkv.weight": "model-00001-of-00014.safetensors",
937
+ "visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
938
+ "visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
939
+ "visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
940
+ "visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
941
+ "visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
942
+ "visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
943
+ "visual.blocks.20.norm1.weight": "model-00001-of-00014.safetensors",
944
+ "visual.blocks.20.norm2.weight": "model-00001-of-00014.safetensors",
945
+ "visual.blocks.21.attn.proj.bias": "model-00001-of-00014.safetensors",
946
+ "visual.blocks.21.attn.proj.weight": "model-00001-of-00014.safetensors",
947
+ "visual.blocks.21.attn.qkv.bias": "model-00001-of-00014.safetensors",
948
+ "visual.blocks.21.attn.qkv.weight": "model-00001-of-00014.safetensors",
949
+ "visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
950
+ "visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
951
+ "visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
952
+ "visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
953
+ "visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
954
+ "visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
955
+ "visual.blocks.21.norm1.weight": "model-00001-of-00014.safetensors",
956
+ "visual.blocks.21.norm2.weight": "model-00001-of-00014.safetensors",
957
+ "visual.blocks.22.attn.proj.bias": "model-00001-of-00014.safetensors",
958
+ "visual.blocks.22.attn.proj.weight": "model-00001-of-00014.safetensors",
959
+ "visual.blocks.22.attn.qkv.bias": "model-00001-of-00014.safetensors",
960
+ "visual.blocks.22.attn.qkv.weight": "model-00001-of-00014.safetensors",
961
+ "visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
962
+ "visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
963
+ "visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
964
+ "visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
965
+ "visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
966
+ "visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
967
+ "visual.blocks.22.norm1.weight": "model-00001-of-00014.safetensors",
968
+ "visual.blocks.22.norm2.weight": "model-00001-of-00014.safetensors",
969
+ "visual.blocks.23.attn.proj.bias": "model-00001-of-00014.safetensors",
970
+ "visual.blocks.23.attn.proj.weight": "model-00001-of-00014.safetensors",
971
+ "visual.blocks.23.attn.qkv.bias": "model-00001-of-00014.safetensors",
972
+ "visual.blocks.23.attn.qkv.weight": "model-00001-of-00014.safetensors",
973
+ "visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
974
+ "visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
975
+ "visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
976
+ "visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
977
+ "visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
978
+ "visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
979
+ "visual.blocks.23.norm1.weight": "model-00001-of-00014.safetensors",
980
+ "visual.blocks.23.norm2.weight": "model-00001-of-00014.safetensors",
981
+ "visual.blocks.24.attn.proj.bias": "model-00001-of-00014.safetensors",
982
+ "visual.blocks.24.attn.proj.weight": "model-00001-of-00014.safetensors",
983
+ "visual.blocks.24.attn.qkv.bias": "model-00001-of-00014.safetensors",
984
+ "visual.blocks.24.attn.qkv.weight": "model-00001-of-00014.safetensors",
985
+ "visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
986
+ "visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
987
+ "visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
988
+ "visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
989
+ "visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
990
+ "visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
991
+ "visual.blocks.24.norm1.weight": "model-00001-of-00014.safetensors",
992
+ "visual.blocks.24.norm2.weight": "model-00001-of-00014.safetensors",
993
+ "visual.blocks.25.attn.proj.bias": "model-00001-of-00014.safetensors",
994
+ "visual.blocks.25.attn.proj.weight": "model-00001-of-00014.safetensors",
995
+ "visual.blocks.25.attn.qkv.bias": "model-00001-of-00014.safetensors",
996
+ "visual.blocks.25.attn.qkv.weight": "model-00001-of-00014.safetensors",
997
+ "visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
998
+ "visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
999
+ "visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1000
+ "visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1001
+ "visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1002
+ "visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1003
+ "visual.blocks.25.norm1.weight": "model-00001-of-00014.safetensors",
1004
+ "visual.blocks.25.norm2.weight": "model-00001-of-00014.safetensors",
1005
+ "visual.blocks.26.attn.proj.bias": "model-00001-of-00014.safetensors",
1006
+ "visual.blocks.26.attn.proj.weight": "model-00001-of-00014.safetensors",
1007
+ "visual.blocks.26.attn.qkv.bias": "model-00001-of-00014.safetensors",
1008
+ "visual.blocks.26.attn.qkv.weight": "model-00001-of-00014.safetensors",
1009
+ "visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1010
+ "visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1011
+ "visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1012
+ "visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1013
+ "visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1014
+ "visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1015
+ "visual.blocks.26.norm1.weight": "model-00001-of-00014.safetensors",
1016
+ "visual.blocks.26.norm2.weight": "model-00001-of-00014.safetensors",
1017
+ "visual.blocks.27.attn.proj.bias": "model-00001-of-00014.safetensors",
1018
+ "visual.blocks.27.attn.proj.weight": "model-00001-of-00014.safetensors",
1019
+ "visual.blocks.27.attn.qkv.bias": "model-00001-of-00014.safetensors",
1020
+ "visual.blocks.27.attn.qkv.weight": "model-00001-of-00014.safetensors",
1021
+ "visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1022
+ "visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1023
+ "visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1024
+ "visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1025
+ "visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1026
+ "visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1027
+ "visual.blocks.27.norm1.weight": "model-00001-of-00014.safetensors",
1028
+ "visual.blocks.27.norm2.weight": "model-00001-of-00014.safetensors",
1029
+ "visual.blocks.28.attn.proj.bias": "model-00001-of-00014.safetensors",
1030
+ "visual.blocks.28.attn.proj.weight": "model-00001-of-00014.safetensors",
1031
+ "visual.blocks.28.attn.qkv.bias": "model-00001-of-00014.safetensors",
1032
+ "visual.blocks.28.attn.qkv.weight": "model-00001-of-00014.safetensors",
1033
+ "visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1034
+ "visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1035
+ "visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1036
+ "visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1037
+ "visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1038
+ "visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1039
+ "visual.blocks.28.norm1.weight": "model-00001-of-00014.safetensors",
1040
+ "visual.blocks.28.norm2.weight": "model-00001-of-00014.safetensors",
1041
+ "visual.blocks.29.attn.proj.bias": "model-00001-of-00014.safetensors",
1042
+ "visual.blocks.29.attn.proj.weight": "model-00001-of-00014.safetensors",
1043
+ "visual.blocks.29.attn.qkv.bias": "model-00001-of-00014.safetensors",
1044
+ "visual.blocks.29.attn.qkv.weight": "model-00001-of-00014.safetensors",
1045
+ "visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1046
+ "visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1047
+ "visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1048
+ "visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1049
+ "visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1050
+ "visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1051
+ "visual.blocks.29.norm1.weight": "model-00001-of-00014.safetensors",
1052
+ "visual.blocks.29.norm2.weight": "model-00001-of-00014.safetensors",
1053
+ "visual.blocks.3.attn.proj.bias": "model-00001-of-00014.safetensors",
1054
+ "visual.blocks.3.attn.proj.weight": "model-00001-of-00014.safetensors",
1055
+ "visual.blocks.3.attn.qkv.bias": "model-00001-of-00014.safetensors",
1056
+ "visual.blocks.3.attn.qkv.weight": "model-00001-of-00014.safetensors",
1057
+ "visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1058
+ "visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1059
+ "visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1060
+ "visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1061
+ "visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1062
+ "visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1063
+ "visual.blocks.3.norm1.weight": "model-00001-of-00014.safetensors",
1064
+ "visual.blocks.3.norm2.weight": "model-00001-of-00014.safetensors",
1065
+ "visual.blocks.30.attn.proj.bias": "model-00001-of-00014.safetensors",
1066
+ "visual.blocks.30.attn.proj.weight": "model-00001-of-00014.safetensors",
1067
+ "visual.blocks.30.attn.qkv.bias": "model-00001-of-00014.safetensors",
1068
+ "visual.blocks.30.attn.qkv.weight": "model-00001-of-00014.safetensors",
1069
+ "visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1070
+ "visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1071
+ "visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1072
+ "visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1073
+ "visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1074
+ "visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1075
+ "visual.blocks.30.norm1.weight": "model-00001-of-00014.safetensors",
1076
+ "visual.blocks.30.norm2.weight": "model-00001-of-00014.safetensors",
1077
+ "visual.blocks.31.attn.proj.bias": "model-00001-of-00014.safetensors",
1078
+ "visual.blocks.31.attn.proj.weight": "model-00001-of-00014.safetensors",
1079
+ "visual.blocks.31.attn.qkv.bias": "model-00001-of-00014.safetensors",
1080
+ "visual.blocks.31.attn.qkv.weight": "model-00001-of-00014.safetensors",
1081
+ "visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1082
+ "visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1083
+ "visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1084
+ "visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1085
+ "visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1086
+ "visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1087
+ "visual.blocks.31.norm1.weight": "model-00001-of-00014.safetensors",
1088
+ "visual.blocks.31.norm2.weight": "model-00001-of-00014.safetensors",
1089
+ "visual.blocks.4.attn.proj.bias": "model-00001-of-00014.safetensors",
1090
+ "visual.blocks.4.attn.proj.weight": "model-00001-of-00014.safetensors",
1091
+ "visual.blocks.4.attn.qkv.bias": "model-00001-of-00014.safetensors",
1092
+ "visual.blocks.4.attn.qkv.weight": "model-00001-of-00014.safetensors",
1093
+ "visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1094
+ "visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1095
+ "visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1096
+ "visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1097
+ "visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1098
+ "visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1099
+ "visual.blocks.4.norm1.weight": "model-00001-of-00014.safetensors",
1100
+ "visual.blocks.4.norm2.weight": "model-00001-of-00014.safetensors",
1101
+ "visual.blocks.5.attn.proj.bias": "model-00001-of-00014.safetensors",
1102
+ "visual.blocks.5.attn.proj.weight": "model-00001-of-00014.safetensors",
1103
+ "visual.blocks.5.attn.qkv.bias": "model-00001-of-00014.safetensors",
1104
+ "visual.blocks.5.attn.qkv.weight": "model-00001-of-00014.safetensors",
1105
+ "visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1106
+ "visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1107
+ "visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1108
+ "visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1109
+ "visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1110
+ "visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1111
+ "visual.blocks.5.norm1.weight": "model-00001-of-00014.safetensors",
1112
+ "visual.blocks.5.norm2.weight": "model-00001-of-00014.safetensors",
1113
+ "visual.blocks.6.attn.proj.bias": "model-00001-of-00014.safetensors",
1114
+ "visual.blocks.6.attn.proj.weight": "model-00001-of-00014.safetensors",
1115
+ "visual.blocks.6.attn.qkv.bias": "model-00001-of-00014.safetensors",
1116
+ "visual.blocks.6.attn.qkv.weight": "model-00001-of-00014.safetensors",
1117
+ "visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1118
+ "visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1119
+ "visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1120
+ "visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1121
+ "visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1122
+ "visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1123
+ "visual.blocks.6.norm1.weight": "model-00001-of-00014.safetensors",
1124
+ "visual.blocks.6.norm2.weight": "model-00001-of-00014.safetensors",
1125
+ "visual.blocks.7.attn.proj.bias": "model-00001-of-00014.safetensors",
1126
+ "visual.blocks.7.attn.proj.weight": "model-00001-of-00014.safetensors",
1127
+ "visual.blocks.7.attn.qkv.bias": "model-00001-of-00014.safetensors",
1128
+ "visual.blocks.7.attn.qkv.weight": "model-00001-of-00014.safetensors",
1129
+ "visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1130
+ "visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1131
+ "visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1132
+ "visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1133
+ "visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1134
+ "visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1135
+ "visual.blocks.7.norm1.weight": "model-00001-of-00014.safetensors",
1136
+ "visual.blocks.7.norm2.weight": "model-00001-of-00014.safetensors",
1137
+ "visual.blocks.8.attn.proj.bias": "model-00001-of-00014.safetensors",
1138
+ "visual.blocks.8.attn.proj.weight": "model-00001-of-00014.safetensors",
1139
+ "visual.blocks.8.attn.qkv.bias": "model-00001-of-00014.safetensors",
1140
+ "visual.blocks.8.attn.qkv.weight": "model-00001-of-00014.safetensors",
1141
+ "visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1142
+ "visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1143
+ "visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1144
+ "visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1145
+ "visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1146
+ "visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1147
+ "visual.blocks.8.norm1.weight": "model-00001-of-00014.safetensors",
1148
+ "visual.blocks.8.norm2.weight": "model-00001-of-00014.safetensors",
1149
+ "visual.blocks.9.attn.proj.bias": "model-00001-of-00014.safetensors",
1150
+ "visual.blocks.9.attn.proj.weight": "model-00001-of-00014.safetensors",
1151
+ "visual.blocks.9.attn.qkv.bias": "model-00001-of-00014.safetensors",
1152
+ "visual.blocks.9.attn.qkv.weight": "model-00001-of-00014.safetensors",
1153
+ "visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00014.safetensors",
1154
+ "visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
1155
+ "visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00014.safetensors",
1156
+ "visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
1157
+ "visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00014.safetensors",
1158
+ "visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
1159
+ "visual.blocks.9.norm1.weight": "model-00001-of-00014.safetensors",
1160
+ "visual.blocks.9.norm2.weight": "model-00001-of-00014.safetensors",
1161
+ "visual.merger.ln_q.weight": "model-00001-of-00014.safetensors",
1162
+ "visual.merger.mlp.0.bias": "model-00001-of-00014.safetensors",
1163
+ "visual.merger.mlp.0.weight": "model-00001-of-00014.safetensors",
1164
+ "visual.merger.mlp.2.bias": "model-00001-of-00014.safetensors",
1165
+ "visual.merger.mlp.2.weight": "model-00001-of-00014.safetensors",
1166
+ "visual.patch_embed.proj.weight": "model-00001-of-00014.safetensors"
1167
+ }
1168
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_convert_rgb": true,
3
+ "do_normalize": true,
4
+ "do_rescale": true,
5
+ "do_resize": true,
6
+ "image_mean": [
7
+ 0.48145466,
8
+ 0.4578275,
9
+ 0.40821073
10
+ ],
11
+ "image_processor_type": "Qwen2VLImageProcessor",
12
+ "image_std": [
13
+ 0.26862954,
14
+ 0.26130258,
15
+ 0.27577711
16
+ ],
17
+ "max_pixels": 12845056,
18
+ "merge_size": 2,
19
+ "min_pixels": 3136,
20
+ "patch_size": 14,
21
+ "processor_class": "Qwen2_5_VLProcessor",
22
+ "resample": 3,
23
+ "rescale_factor": 0.00392156862745098,
24
+ "size": {
25
+ "longest_edge": 12845056,
26
+ "shortest_edge": 3136
27
+ },
28
+ "temporal_patch_size": 2
29
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba0c439f7be467bf47d12a7e6f9adc6116201056fc60c67f431c679b7c16afc8
3
+ size 11422064
tokenizer_config.json ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|im_end|>",
201
+ "errors": "replace",
202
+ "extra_special_tokens": {},
203
+ "model_max_length": 131072,
204
+ "pad_token": "<|endoftext|>",
205
+ "processor_class": "Qwen2_5_VLProcessor",
206
+ "split_special_tokens": false,
207
+ "tokenizer_class": "Qwen2Tokenizer",
208
+ "unk_token": null
209
+ }
trainer_state.json ADDED
@@ -0,0 +1,2113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 9.602150537634408,
6
+ "eval_steps": 500,
7
+ "global_step": 230,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.043010752688172046,
14
+ "grad_norm": 11.91717546170225,
15
+ "learning_rate": 0.0,
16
+ "loss": 0.7999,
17
+ "mean_token_accuracy": 0.7625279724597931,
18
+ "num_tokens": 367812.0,
19
+ "step": 1
20
+ },
21
+ {
22
+ "epoch": 0.08602150537634409,
23
+ "grad_norm": 9.023052509105142,
24
+ "learning_rate": 8.695652173913044e-07,
25
+ "loss": 0.8777,
26
+ "mean_token_accuracy": 0.7521283626556396,
27
+ "num_tokens": 746486.0,
28
+ "step": 2
29
+ },
30
+ {
31
+ "epoch": 0.12903225806451613,
32
+ "grad_norm": 8.573471835574493,
33
+ "learning_rate": 1.7391304347826088e-06,
34
+ "loss": 0.8113,
35
+ "mean_token_accuracy": 0.749506875872612,
36
+ "num_tokens": 1126566.0,
37
+ "step": 3
38
+ },
39
+ {
40
+ "epoch": 0.17204301075268819,
41
+ "grad_norm": 5.577877324067109,
42
+ "learning_rate": 2.6086956521739132e-06,
43
+ "loss": 0.7698,
44
+ "mean_token_accuracy": 0.7678339183330536,
45
+ "num_tokens": 1507062.0,
46
+ "step": 4
47
+ },
48
+ {
49
+ "epoch": 0.21505376344086022,
50
+ "grad_norm": 4.129947299735385,
51
+ "learning_rate": 3.4782608695652175e-06,
52
+ "loss": 0.7601,
53
+ "mean_token_accuracy": 0.7700327038764954,
54
+ "num_tokens": 1881779.0,
55
+ "step": 5
56
+ },
57
+ {
58
+ "epoch": 0.25806451612903225,
59
+ "grad_norm": 3.4130681291525984,
60
+ "learning_rate": 4.347826086956522e-06,
61
+ "loss": 0.7044,
62
+ "mean_token_accuracy": 0.7722686380147934,
63
+ "num_tokens": 2265581.0,
64
+ "step": 6
65
+ },
66
+ {
67
+ "epoch": 0.3010752688172043,
68
+ "grad_norm": 2.9935342196668655,
69
+ "learning_rate": 5.2173913043478265e-06,
70
+ "loss": 0.66,
71
+ "mean_token_accuracy": 0.7871796786785126,
72
+ "num_tokens": 2646672.0,
73
+ "step": 7
74
+ },
75
+ {
76
+ "epoch": 0.34408602150537637,
77
+ "grad_norm": 2.4639857428718863,
78
+ "learning_rate": 6.086956521739132e-06,
79
+ "loss": 0.6528,
80
+ "mean_token_accuracy": 0.7861378192901611,
81
+ "num_tokens": 3024415.0,
82
+ "step": 8
83
+ },
84
+ {
85
+ "epoch": 0.3870967741935484,
86
+ "grad_norm": 2.1893799102976637,
87
+ "learning_rate": 6.956521739130435e-06,
88
+ "loss": 0.6118,
89
+ "mean_token_accuracy": 0.7930850088596344,
90
+ "num_tokens": 3400301.0,
91
+ "step": 9
92
+ },
93
+ {
94
+ "epoch": 0.43010752688172044,
95
+ "grad_norm": 2.186514396506674,
96
+ "learning_rate": 7.82608695652174e-06,
97
+ "loss": 0.589,
98
+ "mean_token_accuracy": 0.798459991812706,
99
+ "num_tokens": 3782217.0,
100
+ "step": 10
101
+ },
102
+ {
103
+ "epoch": 0.4731182795698925,
104
+ "grad_norm": 3.0465126347830602,
105
+ "learning_rate": 8.695652173913044e-06,
106
+ "loss": 0.6752,
107
+ "mean_token_accuracy": 0.7804096639156342,
108
+ "num_tokens": 4150420.0,
109
+ "step": 11
110
+ },
111
+ {
112
+ "epoch": 0.5161290322580645,
113
+ "grad_norm": 2.6537477460552275,
114
+ "learning_rate": 9.565217391304349e-06,
115
+ "loss": 0.6027,
116
+ "mean_token_accuracy": 0.7908397912979126,
117
+ "num_tokens": 4524870.0,
118
+ "step": 12
119
+ },
120
+ {
121
+ "epoch": 0.5591397849462365,
122
+ "grad_norm": 2.396118650881142,
123
+ "learning_rate": 1.0434782608695653e-05,
124
+ "loss": 0.5859,
125
+ "mean_token_accuracy": 0.792891800403595,
126
+ "num_tokens": 4916036.0,
127
+ "step": 13
128
+ },
129
+ {
130
+ "epoch": 0.6021505376344086,
131
+ "grad_norm": 2.2226168829192114,
132
+ "learning_rate": 1.1304347826086957e-05,
133
+ "loss": 0.5514,
134
+ "mean_token_accuracy": 0.8134036660194397,
135
+ "num_tokens": 5284361.0,
136
+ "step": 14
137
+ },
138
+ {
139
+ "epoch": 0.6451612903225806,
140
+ "grad_norm": 1.8673665641432782,
141
+ "learning_rate": 1.2173913043478263e-05,
142
+ "loss": 0.5486,
143
+ "mean_token_accuracy": 0.8076689690351486,
144
+ "num_tokens": 5648932.0,
145
+ "step": 15
146
+ },
147
+ {
148
+ "epoch": 0.6881720430107527,
149
+ "grad_norm": 1.8442596020009865,
150
+ "learning_rate": 1.3043478260869566e-05,
151
+ "loss": 0.5263,
152
+ "mean_token_accuracy": 0.8252855390310287,
153
+ "num_tokens": 6013624.0,
154
+ "step": 16
155
+ },
156
+ {
157
+ "epoch": 0.7311827956989247,
158
+ "grad_norm": 1.7829423593903526,
159
+ "learning_rate": 1.391304347826087e-05,
160
+ "loss": 0.5096,
161
+ "mean_token_accuracy": 0.8202553987503052,
162
+ "num_tokens": 6392059.0,
163
+ "step": 17
164
+ },
165
+ {
166
+ "epoch": 0.7741935483870968,
167
+ "grad_norm": 1.8090352579295244,
168
+ "learning_rate": 1.4782608695652174e-05,
169
+ "loss": 0.521,
170
+ "mean_token_accuracy": 0.8225749135017395,
171
+ "num_tokens": 6790097.0,
172
+ "step": 18
173
+ },
174
+ {
175
+ "epoch": 0.8172043010752689,
176
+ "grad_norm": 1.75919933696252,
177
+ "learning_rate": 1.565217391304348e-05,
178
+ "loss": 0.4948,
179
+ "mean_token_accuracy": 0.828051283955574,
180
+ "num_tokens": 7160612.0,
181
+ "step": 19
182
+ },
183
+ {
184
+ "epoch": 0.8602150537634409,
185
+ "grad_norm": 1.9385645040102704,
186
+ "learning_rate": 1.6521739130434785e-05,
187
+ "loss": 0.5251,
188
+ "mean_token_accuracy": 0.8155874758958817,
189
+ "num_tokens": 7527524.0,
190
+ "step": 20
191
+ },
192
+ {
193
+ "epoch": 0.9032258064516129,
194
+ "grad_norm": 1.8027184161622232,
195
+ "learning_rate": 1.739130434782609e-05,
196
+ "loss": 0.5057,
197
+ "mean_token_accuracy": 0.8297092467546463,
198
+ "num_tokens": 7888136.0,
199
+ "step": 21
200
+ },
201
+ {
202
+ "epoch": 0.946236559139785,
203
+ "grad_norm": 2.1820648482478187,
204
+ "learning_rate": 1.8260869565217393e-05,
205
+ "loss": 0.5036,
206
+ "mean_token_accuracy": 0.8194401413202286,
207
+ "num_tokens": 8267314.0,
208
+ "step": 22
209
+ },
210
+ {
211
+ "epoch": 0.989247311827957,
212
+ "grad_norm": 1.9518940278276342,
213
+ "learning_rate": 1.9130434782608697e-05,
214
+ "loss": 0.4965,
215
+ "mean_token_accuracy": 0.8308457583189011,
216
+ "num_tokens": 8644626.0,
217
+ "step": 23
218
+ },
219
+ {
220
+ "epoch": 1.0,
221
+ "grad_norm": 1.9518940278276342,
222
+ "learning_rate": 2e-05,
223
+ "loss": 0.0981,
224
+ "mean_token_accuracy": 0.8572691082954407,
225
+ "num_tokens": 8735951.0,
226
+ "step": 24
227
+ },
228
+ {
229
+ "epoch": 1.043010752688172,
230
+ "grad_norm": 1.658616504963653,
231
+ "learning_rate": 1.999884834944106e-05,
232
+ "loss": 0.3613,
233
+ "mean_token_accuracy": 0.8678812235593796,
234
+ "num_tokens": 9111064.0,
235
+ "step": 25
236
+ },
237
+ {
238
+ "epoch": 1.086021505376344,
239
+ "grad_norm": 1.6457610214496745,
240
+ "learning_rate": 1.9995393663024054e-05,
241
+ "loss": 0.3551,
242
+ "mean_token_accuracy": 0.8692608177661896,
243
+ "num_tokens": 9496130.0,
244
+ "step": 26
245
+ },
246
+ {
247
+ "epoch": 1.129032258064516,
248
+ "grad_norm": 3.5904835403286754,
249
+ "learning_rate": 1.9989636736467278e-05,
250
+ "loss": 0.3695,
251
+ "mean_token_accuracy": 0.866924524307251,
252
+ "num_tokens": 9858972.0,
253
+ "step": 27
254
+ },
255
+ {
256
+ "epoch": 1.1720430107526882,
257
+ "grad_norm": 1.7874385949477343,
258
+ "learning_rate": 1.9981578895764272e-05,
259
+ "loss": 0.3212,
260
+ "mean_token_accuracy": 0.890799954533577,
261
+ "num_tokens": 10231593.0,
262
+ "step": 28
263
+ },
264
+ {
265
+ "epoch": 1.2150537634408602,
266
+ "grad_norm": 1.8049361403495439,
267
+ "learning_rate": 1.9971221996878395e-05,
268
+ "loss": 0.3554,
269
+ "mean_token_accuracy": 0.8701900094747543,
270
+ "num_tokens": 10601477.0,
271
+ "step": 29
272
+ },
273
+ {
274
+ "epoch": 1.2580645161290323,
275
+ "grad_norm": 1.3711396350791194,
276
+ "learning_rate": 1.9958568425315316e-05,
277
+ "loss": 0.309,
278
+ "mean_token_accuracy": 0.8979966044425964,
279
+ "num_tokens": 10971400.0,
280
+ "step": 30
281
+ },
282
+ {
283
+ "epoch": 1.3010752688172043,
284
+ "grad_norm": 1.5009618715368216,
285
+ "learning_rate": 1.9943621095573588e-05,
286
+ "loss": 0.3301,
287
+ "mean_token_accuracy": 0.8754479885101318,
288
+ "num_tokens": 11338917.0,
289
+ "step": 31
290
+ },
291
+ {
292
+ "epoch": 1.3440860215053765,
293
+ "grad_norm": 1.4417013210198348,
294
+ "learning_rate": 1.9926383450473344e-05,
295
+ "loss": 0.2909,
296
+ "mean_token_accuracy": 0.8934924155473709,
297
+ "num_tokens": 11708711.0,
298
+ "step": 32
299
+ },
300
+ {
301
+ "epoch": 1.3870967741935485,
302
+ "grad_norm": 1.3285539132923128,
303
+ "learning_rate": 1.9906859460363307e-05,
304
+ "loss": 0.3018,
305
+ "mean_token_accuracy": 0.8906232416629791,
306
+ "num_tokens": 12088883.0,
307
+ "step": 33
308
+ },
309
+ {
310
+ "epoch": 1.4301075268817205,
311
+ "grad_norm": 1.5218190255666082,
312
+ "learning_rate": 1.9885053622206305e-05,
313
+ "loss": 0.3363,
314
+ "mean_token_accuracy": 0.8796575218439102,
315
+ "num_tokens": 12468165.0,
316
+ "step": 34
317
+ },
318
+ {
319
+ "epoch": 1.4731182795698925,
320
+ "grad_norm": 1.3816402287843272,
321
+ "learning_rate": 1.986097095854347e-05,
322
+ "loss": 0.3157,
323
+ "mean_token_accuracy": 0.8860495537519455,
324
+ "num_tokens": 12841352.0,
325
+ "step": 35
326
+ },
327
+ {
328
+ "epoch": 1.5161290322580645,
329
+ "grad_norm": 1.442754299402919,
330
+ "learning_rate": 1.9834617016337424e-05,
331
+ "loss": 0.3211,
332
+ "mean_token_accuracy": 0.8844448328018188,
333
+ "num_tokens": 13227719.0,
334
+ "step": 36
335
+ },
336
+ {
337
+ "epoch": 1.5591397849462365,
338
+ "grad_norm": 1.4155340183010523,
339
+ "learning_rate": 1.9805997865694616e-05,
340
+ "loss": 0.3453,
341
+ "mean_token_accuracy": 0.8787361979484558,
342
+ "num_tokens": 13607472.0,
343
+ "step": 37
344
+ },
345
+ {
346
+ "epoch": 1.6021505376344085,
347
+ "grad_norm": 1.1437615335312725,
348
+ "learning_rate": 1.9775120098467212e-05,
349
+ "loss": 0.2762,
350
+ "mean_token_accuracy": 0.9029232263565063,
351
+ "num_tokens": 13982708.0,
352
+ "step": 38
353
+ },
354
+ {
355
+ "epoch": 1.6451612903225805,
356
+ "grad_norm": 1.3406964229739868,
357
+ "learning_rate": 1.9741990826734793e-05,
358
+ "loss": 0.2998,
359
+ "mean_token_accuracy": 0.8943506330251694,
360
+ "num_tokens": 14364424.0,
361
+ "step": 39
362
+ },
363
+ {
364
+ "epoch": 1.6881720430107527,
365
+ "grad_norm": 1.25822076002706,
366
+ "learning_rate": 1.970661768116622e-05,
367
+ "loss": 0.2537,
368
+ "mean_token_accuracy": 0.9153124392032623,
369
+ "num_tokens": 14755621.0,
370
+ "step": 40
371
+ },
372
+ {
373
+ "epoch": 1.7311827956989247,
374
+ "grad_norm": 1.1260474975551218,
375
+ "learning_rate": 1.9669008809262064e-05,
376
+ "loss": 0.2936,
377
+ "mean_token_accuracy": 0.8934088349342346,
378
+ "num_tokens": 15127506.0,
379
+ "step": 41
380
+ },
381
+ {
382
+ "epoch": 1.7741935483870968,
383
+ "grad_norm": 1.1356922171937402,
384
+ "learning_rate": 1.9629172873477995e-05,
385
+ "loss": 0.2639,
386
+ "mean_token_accuracy": 0.9112239480018616,
387
+ "num_tokens": 15503327.0,
388
+ "step": 42
389
+ },
390
+ {
391
+ "epoch": 1.817204301075269,
392
+ "grad_norm": 1.1298046162246171,
393
+ "learning_rate": 1.9587119049229558e-05,
394
+ "loss": 0.29,
395
+ "mean_token_accuracy": 0.8936085551977158,
396
+ "num_tokens": 15880530.0,
397
+ "step": 43
398
+ },
399
+ {
400
+ "epoch": 1.860215053763441,
401
+ "grad_norm": 1.1389639144335877,
402
+ "learning_rate": 1.954285702277879e-05,
403
+ "loss": 0.2934,
404
+ "mean_token_accuracy": 0.8923312425613403,
405
+ "num_tokens": 16263155.0,
406
+ "step": 44
407
+ },
408
+ {
409
+ "epoch": 1.903225806451613,
410
+ "grad_norm": 1.0066342748134163,
411
+ "learning_rate": 1.9496396989003195e-05,
412
+ "loss": 0.2798,
413
+ "mean_token_accuracy": 0.8991601765155792,
414
+ "num_tokens": 16634276.0,
415
+ "step": 45
416
+ },
417
+ {
418
+ "epoch": 1.946236559139785,
419
+ "grad_norm": 1.2385856620277536,
420
+ "learning_rate": 1.944774964904754e-05,
421
+ "loss": 0.3036,
422
+ "mean_token_accuracy": 0.8929028362035751,
423
+ "num_tokens": 17013425.0,
424
+ "step": 46
425
+ },
426
+ {
427
+ "epoch": 1.989247311827957,
428
+ "grad_norm": 1.0574616204979852,
429
+ "learning_rate": 1.9396926207859085e-05,
430
+ "loss": 0.2916,
431
+ "mean_token_accuracy": 0.8939765691757202,
432
+ "num_tokens": 17380847.0,
433
+ "step": 47
434
+ },
435
+ {
436
+ "epoch": 2.0,
437
+ "grad_norm": 1.0574616204979852,
438
+ "learning_rate": 1.9343938371606714e-05,
439
+ "loss": 0.0465,
440
+ "mean_token_accuracy": 0.9277108311653137,
441
+ "num_tokens": 17474187.0,
442
+ "step": 48
443
+ },
444
+ {
445
+ "epoch": 2.043010752688172,
446
+ "grad_norm": 1.1539619815555024,
447
+ "learning_rate": 1.9288798344984673e-05,
448
+ "loss": 0.1697,
449
+ "mean_token_accuracy": 0.9405381679534912,
450
+ "num_tokens": 17848143.0,
451
+ "step": 49
452
+ },
453
+ {
454
+ "epoch": 2.086021505376344,
455
+ "grad_norm": 0.9946315524935219,
456
+ "learning_rate": 1.9231518828401458e-05,
457
+ "loss": 0.1587,
458
+ "mean_token_accuracy": 0.946861207485199,
459
+ "num_tokens": 18231846.0,
460
+ "step": 50
461
+ },
462
+ {
463
+ "epoch": 2.129032258064516,
464
+ "grad_norm": 1.0205876962667297,
465
+ "learning_rate": 1.917211301505453e-05,
466
+ "loss": 0.1631,
467
+ "mean_token_accuracy": 0.9437925517559052,
468
+ "num_tokens": 18629481.0,
469
+ "step": 51
470
+ },
471
+ {
472
+ "epoch": 2.172043010752688,
473
+ "grad_norm": 1.2180236071671326,
474
+ "learning_rate": 1.911059458789152e-05,
475
+ "loss": 0.1596,
476
+ "mean_token_accuracy": 0.9489908963441849,
477
+ "num_tokens": 19012798.0,
478
+ "step": 52
479
+ },
480
+ {
481
+ "epoch": 2.21505376344086,
482
+ "grad_norm": 1.1932774403236657,
483
+ "learning_rate": 1.9046977716458627e-05,
484
+ "loss": 0.1524,
485
+ "mean_token_accuracy": 0.9486334323883057,
486
+ "num_tokens": 19397188.0,
487
+ "step": 53
488
+ },
489
+ {
490
+ "epoch": 2.258064516129032,
491
+ "grad_norm": 1.1190279919994257,
492
+ "learning_rate": 1.8981277053636963e-05,
493
+ "loss": 0.1767,
494
+ "mean_token_accuracy": 0.9350408464670181,
495
+ "num_tokens": 19779411.0,
496
+ "step": 54
497
+ },
498
+ {
499
+ "epoch": 2.3010752688172045,
500
+ "grad_norm": 1.0935887572069407,
501
+ "learning_rate": 1.891350773226754e-05,
502
+ "loss": 0.1575,
503
+ "mean_token_accuracy": 0.9455375522375107,
504
+ "num_tokens": 20148217.0,
505
+ "step": 55
506
+ },
507
+ {
508
+ "epoch": 2.3440860215053765,
509
+ "grad_norm": 0.8268906453812284,
510
+ "learning_rate": 1.8843685361665724e-05,
511
+ "loss": 0.1495,
512
+ "mean_token_accuracy": 0.9489219486713409,
513
+ "num_tokens": 20517539.0,
514
+ "step": 56
515
+ },
516
+ {
517
+ "epoch": 2.3870967741935485,
518
+ "grad_norm": 0.8463600848872256,
519
+ "learning_rate": 1.8771826024025944e-05,
520
+ "loss": 0.1625,
521
+ "mean_token_accuracy": 0.9414703994989395,
522
+ "num_tokens": 20886139.0,
523
+ "step": 57
524
+ },
525
+ {
526
+ "epoch": 2.4301075268817205,
527
+ "grad_norm": 0.8514202559815353,
528
+ "learning_rate": 1.8697946270717468e-05,
529
+ "loss": 0.1444,
530
+ "mean_token_accuracy": 0.9490186870098114,
531
+ "num_tokens": 21255999.0,
532
+ "step": 58
533
+ },
534
+ {
535
+ "epoch": 2.4731182795698925,
536
+ "grad_norm": 0.8543888251991751,
537
+ "learning_rate": 1.8622063118472135e-05,
538
+ "loss": 0.1714,
539
+ "mean_token_accuracy": 0.9381113350391388,
540
+ "num_tokens": 21626463.0,
541
+ "step": 59
542
+ },
543
+ {
544
+ "epoch": 2.5161290322580645,
545
+ "grad_norm": 0.9035063674891346,
546
+ "learning_rate": 1.8544194045464888e-05,
547
+ "loss": 0.1843,
548
+ "mean_token_accuracy": 0.9337188005447388,
549
+ "num_tokens": 21991893.0,
550
+ "step": 60
551
+ },
552
+ {
553
+ "epoch": 2.5591397849462365,
554
+ "grad_norm": 0.8418292533035925,
555
+ "learning_rate": 1.8464356987288012e-05,
556
+ "loss": 0.1526,
557
+ "mean_token_accuracy": 0.9460138976573944,
558
+ "num_tokens": 22361724.0,
559
+ "step": 61
560
+ },
561
+ {
562
+ "epoch": 2.6021505376344085,
563
+ "grad_norm": 0.7418502959925466,
564
+ "learning_rate": 1.8382570332820045e-05,
565
+ "loss": 0.142,
566
+ "mean_token_accuracy": 0.9545767903327942,
567
+ "num_tokens": 22736647.0,
568
+ "step": 62
569
+ },
570
+ {
571
+ "epoch": 2.6451612903225805,
572
+ "grad_norm": 0.9322325837904922,
573
+ "learning_rate": 1.8298852919990254e-05,
574
+ "loss": 0.1498,
575
+ "mean_token_accuracy": 0.9440304934978485,
576
+ "num_tokens": 23100836.0,
577
+ "step": 63
578
+ },
579
+ {
580
+ "epoch": 2.688172043010753,
581
+ "grad_norm": 0.7735956031556342,
582
+ "learning_rate": 1.821322403143969e-05,
583
+ "loss": 0.1392,
584
+ "mean_token_accuracy": 0.9508139789104462,
585
+ "num_tokens": 23482604.0,
586
+ "step": 64
587
+ },
588
+ {
589
+ "epoch": 2.731182795698925,
590
+ "grad_norm": 0.9120084675913216,
591
+ "learning_rate": 1.812570339007983e-05,
592
+ "loss": 0.1506,
593
+ "mean_token_accuracy": 0.9453353136777878,
594
+ "num_tokens": 23865993.0,
595
+ "step": 65
596
+ },
597
+ {
598
+ "epoch": 2.774193548387097,
599
+ "grad_norm": 0.7503811468513143,
600
+ "learning_rate": 1.8036311154549783e-05,
601
+ "loss": 0.1363,
602
+ "mean_token_accuracy": 0.9510210454463959,
603
+ "num_tokens": 24224549.0,
604
+ "step": 66
605
+ },
606
+ {
607
+ "epoch": 2.817204301075269,
608
+ "grad_norm": 1.1460712341286583,
609
+ "learning_rate": 1.7945067914573147e-05,
610
+ "loss": 0.1664,
611
+ "mean_token_accuracy": 0.9413717687129974,
612
+ "num_tokens": 24598626.0,
613
+ "step": 67
614
+ },
615
+ {
616
+ "epoch": 2.860215053763441,
617
+ "grad_norm": 0.8670992068464418,
618
+ "learning_rate": 1.7851994686215592e-05,
619
+ "loss": 0.1577,
620
+ "mean_token_accuracy": 0.9477098882198334,
621
+ "num_tokens": 24981628.0,
622
+ "step": 68
623
+ },
624
+ {
625
+ "epoch": 2.903225806451613,
626
+ "grad_norm": 0.7836229996954126,
627
+ "learning_rate": 1.77571129070442e-05,
628
+ "loss": 0.1427,
629
+ "mean_token_accuracy": 0.9498772025108337,
630
+ "num_tokens": 25357328.0,
631
+ "step": 69
632
+ },
633
+ {
634
+ "epoch": 2.946236559139785,
635
+ "grad_norm": 0.9864794829369287,
636
+ "learning_rate": 1.766044443118978e-05,
637
+ "loss": 0.1548,
638
+ "mean_token_accuracy": 0.9467099010944366,
639
+ "num_tokens": 25736995.0,
640
+ "step": 70
641
+ },
642
+ {
643
+ "epoch": 2.989247311827957,
644
+ "grad_norm": 0.8453374404480977,
645
+ "learning_rate": 1.7562011524313187e-05,
646
+ "loss": 0.1328,
647
+ "mean_token_accuracy": 0.9533079415559769,
648
+ "num_tokens": 26119812.0,
649
+ "step": 71
650
+ },
651
+ {
652
+ "epoch": 3.0,
653
+ "grad_norm": 0.8453374404480977,
654
+ "learning_rate": 1.7461836858476858e-05,
655
+ "loss": 0.023,
656
+ "mean_token_accuracy": 0.9692671298980713,
657
+ "num_tokens": 26217050.0,
658
+ "step": 72
659
+ },
660
+ {
661
+ "epoch": 3.043010752688172,
662
+ "grad_norm": 0.7191008184533193,
663
+ "learning_rate": 1.7359943506922775e-05,
664
+ "loss": 0.085,
665
+ "mean_token_accuracy": 0.9692452400922775,
666
+ "num_tokens": 26579836.0,
667
+ "step": 73
668
+ },
669
+ {
670
+ "epoch": 3.086021505376344,
671
+ "grad_norm": 0.7033653480672031,
672
+ "learning_rate": 1.725635493875799e-05,
673
+ "loss": 0.084,
674
+ "mean_token_accuracy": 0.9705889225006104,
675
+ "num_tokens": 26954186.0,
676
+ "step": 74
677
+ },
678
+ {
679
+ "epoch": 3.129032258064516,
680
+ "grad_norm": 0.5815187767804908,
681
+ "learning_rate": 1.7151095013548996e-05,
682
+ "loss": 0.0795,
683
+ "mean_token_accuracy": 0.9710359871387482,
684
+ "num_tokens": 27331222.0,
685
+ "step": 75
686
+ },
687
+ {
688
+ "epoch": 3.172043010752688,
689
+ "grad_norm": 0.6837466146484604,
690
+ "learning_rate": 1.7044187975826126e-05,
691
+ "loss": 0.0779,
692
+ "mean_token_accuracy": 0.9715389311313629,
693
+ "num_tokens": 27695626.0,
694
+ "step": 76
695
+ },
696
+ {
697
+ "epoch": 3.21505376344086,
698
+ "grad_norm": 0.7647564443576131,
699
+ "learning_rate": 1.693565844949933e-05,
700
+ "loss": 0.0592,
701
+ "mean_token_accuracy": 0.9778129011392593,
702
+ "num_tokens": 28074766.0,
703
+ "step": 77
704
+ },
705
+ {
706
+ "epoch": 3.258064516129032,
707
+ "grad_norm": 0.6960445168765227,
708
+ "learning_rate": 1.6825531432186545e-05,
709
+ "loss": 0.0825,
710
+ "mean_token_accuracy": 0.9732242673635483,
711
+ "num_tokens": 28441758.0,
712
+ "step": 78
713
+ },
714
+ {
715
+ "epoch": 3.3010752688172045,
716
+ "grad_norm": 0.8162134588348661,
717
+ "learning_rate": 1.671383228945597e-05,
718
+ "loss": 0.0795,
719
+ "mean_token_accuracy": 0.9723010808229446,
720
+ "num_tokens": 28816556.0,
721
+ "step": 79
722
+ },
723
+ {
724
+ "epoch": 3.3440860215053765,
725
+ "grad_norm": 0.7752408104041487,
726
+ "learning_rate": 1.6600586748983642e-05,
727
+ "loss": 0.0791,
728
+ "mean_token_accuracy": 0.9741756767034531,
729
+ "num_tokens": 29190153.0,
730
+ "step": 80
731
+ },
732
+ {
733
+ "epoch": 3.3870967741935485,
734
+ "grad_norm": 0.6264771832821134,
735
+ "learning_rate": 1.648582089462756e-05,
736
+ "loss": 0.0885,
737
+ "mean_token_accuracy": 0.9700659811496735,
738
+ "num_tokens": 29556225.0,
739
+ "step": 81
740
+ },
741
+ {
742
+ "epoch": 3.4301075268817205,
743
+ "grad_norm": 0.7087453451133532,
744
+ "learning_rate": 1.6369561160419783e-05,
745
+ "loss": 0.0756,
746
+ "mean_token_accuracy": 0.9748696535825729,
747
+ "num_tokens": 29944628.0,
748
+ "step": 82
749
+ },
750
+ {
751
+ "epoch": 3.4731182795698925,
752
+ "grad_norm": 0.6666804960250854,
753
+ "learning_rate": 1.625183432447789e-05,
754
+ "loss": 0.0768,
755
+ "mean_token_accuracy": 0.9723282903432846,
756
+ "num_tokens": 30326403.0,
757
+ "step": 83
758
+ },
759
+ {
760
+ "epoch": 3.5161290322580645,
761
+ "grad_norm": 0.7492247001485264,
762
+ "learning_rate": 1.6132667502837164e-05,
763
+ "loss": 0.0862,
764
+ "mean_token_accuracy": 0.9687108993530273,
765
+ "num_tokens": 30707604.0,
766
+ "step": 84
767
+ },
768
+ {
769
+ "epoch": 3.5591397849462365,
770
+ "grad_norm": 0.6413601535507213,
771
+ "learning_rate": 1.6012088143204953e-05,
772
+ "loss": 0.0771,
773
+ "mean_token_accuracy": 0.9721613973379135,
774
+ "num_tokens": 31094255.0,
775
+ "step": 85
776
+ },
777
+ {
778
+ "epoch": 3.6021505376344085,
779
+ "grad_norm": 0.7214880008358299,
780
+ "learning_rate": 1.589012401863864e-05,
781
+ "loss": 0.0902,
782
+ "mean_token_accuracy": 0.9686519354581833,
783
+ "num_tokens": 31465554.0,
784
+ "step": 86
785
+ },
786
+ {
787
+ "epoch": 3.6451612903225805,
788
+ "grad_norm": 0.6072340945600113,
789
+ "learning_rate": 1.5766803221148676e-05,
790
+ "loss": 0.0783,
791
+ "mean_token_accuracy": 0.97231625020504,
792
+ "num_tokens": 31841954.0,
793
+ "step": 87
794
+ },
795
+ {
796
+ "epoch": 3.688172043010753,
797
+ "grad_norm": 0.7141144509929378,
798
+ "learning_rate": 1.5642154155228124e-05,
799
+ "loss": 0.0784,
800
+ "mean_token_accuracy": 0.9737986773252487,
801
+ "num_tokens": 32231678.0,
802
+ "step": 88
803
+ },
804
+ {
805
+ "epoch": 3.731182795698925,
806
+ "grad_norm": 0.5930435846744901,
807
+ "learning_rate": 1.5516205531310272e-05,
808
+ "loss": 0.0773,
809
+ "mean_token_accuracy": 0.9729326516389847,
810
+ "num_tokens": 32611952.0,
811
+ "step": 89
812
+ },
813
+ {
814
+ "epoch": 3.774193548387097,
815
+ "grad_norm": 0.6873339835587674,
816
+ "learning_rate": 1.538898635915576e-05,
817
+ "loss": 0.0917,
818
+ "mean_token_accuracy": 0.9682262241840363,
819
+ "num_tokens": 32987305.0,
820
+ "step": 90
821
+ },
822
+ {
823
+ "epoch": 3.817204301075269,
824
+ "grad_norm": 0.628718128223866,
825
+ "learning_rate": 1.526052594117071e-05,
826
+ "loss": 0.0769,
827
+ "mean_token_accuracy": 0.9746036380529404,
828
+ "num_tokens": 33361173.0,
829
+ "step": 91
830
+ },
831
+ {
832
+ "epoch": 3.860215053763441,
833
+ "grad_norm": 0.7444168004427851,
834
+ "learning_rate": 1.513085386565758e-05,
835
+ "loss": 0.0766,
836
+ "mean_token_accuracy": 0.9750553965568542,
837
+ "num_tokens": 33744871.0,
838
+ "step": 92
839
+ },
840
+ {
841
+ "epoch": 3.903225806451613,
842
+ "grad_norm": 0.6080354599152884,
843
+ "learning_rate": 1.5000000000000002e-05,
844
+ "loss": 0.0881,
845
+ "mean_token_accuracy": 0.9682539403438568,
846
+ "num_tokens": 34122873.0,
847
+ "step": 93
848
+ },
849
+ {
850
+ "epoch": 3.946236559139785,
851
+ "grad_norm": 0.6254599667505842,
852
+ "learning_rate": 1.4867994483783485e-05,
853
+ "loss": 0.0674,
854
+ "mean_token_accuracy": 0.9770089983940125,
855
+ "num_tokens": 34492949.0,
856
+ "step": 94
857
+ },
858
+ {
859
+ "epoch": 3.989247311827957,
860
+ "grad_norm": 0.5928494398285244,
861
+ "learning_rate": 1.4734867721853341e-05,
862
+ "loss": 0.0723,
863
+ "mean_token_accuracy": 0.9745485931634903,
864
+ "num_tokens": 34864951.0,
865
+ "step": 95
866
+ },
867
+ {
868
+ "epoch": 4.0,
869
+ "grad_norm": 0.6180172580268953,
870
+ "learning_rate": 1.4600650377311523e-05,
871
+ "loss": 0.0162,
872
+ "mean_token_accuracy": 0.9717925190925598,
873
+ "num_tokens": 34956848.0,
874
+ "step": 96
875
+ },
876
+ {
877
+ "epoch": 4.043010752688172,
878
+ "grad_norm": 0.4798007088122171,
879
+ "learning_rate": 1.4465373364454001e-05,
880
+ "loss": 0.0415,
881
+ "mean_token_accuracy": 0.9860930442810059,
882
+ "num_tokens": 35327401.0,
883
+ "step": 97
884
+ },
885
+ {
886
+ "epoch": 4.086021505376344,
887
+ "grad_norm": 0.5030678863912684,
888
+ "learning_rate": 1.4329067841650274e-05,
889
+ "loss": 0.0512,
890
+ "mean_token_accuracy": 0.9837829172611237,
891
+ "num_tokens": 35697068.0,
892
+ "step": 98
893
+ },
894
+ {
895
+ "epoch": 4.129032258064516,
896
+ "grad_norm": 0.4887047754497154,
897
+ "learning_rate": 1.4191765204166643e-05,
898
+ "loss": 0.035,
899
+ "mean_token_accuracy": 0.9878916591405869,
900
+ "num_tokens": 36078911.0,
901
+ "step": 99
902
+ },
903
+ {
904
+ "epoch": 4.172043010752688,
905
+ "grad_norm": 0.556764696779543,
906
+ "learning_rate": 1.4053497076934948e-05,
907
+ "loss": 0.0388,
908
+ "mean_token_accuracy": 0.9873095601797104,
909
+ "num_tokens": 36457881.0,
910
+ "step": 100
911
+ },
912
+ {
913
+ "epoch": 4.21505376344086,
914
+ "grad_norm": 0.6105146685797401,
915
+ "learning_rate": 1.3914295307268396e-05,
916
+ "loss": 0.0426,
917
+ "mean_token_accuracy": 0.9871442914009094,
918
+ "num_tokens": 36840608.0,
919
+ "step": 101
920
+ },
921
+ {
922
+ "epoch": 4.258064516129032,
923
+ "grad_norm": 0.5520281427427763,
924
+ "learning_rate": 1.3774191957526144e-05,
925
+ "loss": 0.0353,
926
+ "mean_token_accuracy": 0.9887934029102325,
927
+ "num_tokens": 37215456.0,
928
+ "step": 102
929
+ },
930
+ {
931
+ "epoch": 4.301075268817204,
932
+ "grad_norm": 0.5752554242126501,
933
+ "learning_rate": 1.3633219297728415e-05,
934
+ "loss": 0.049,
935
+ "mean_token_accuracy": 0.9847937226295471,
936
+ "num_tokens": 37600868.0,
937
+ "step": 103
938
+ },
939
+ {
940
+ "epoch": 4.344086021505376,
941
+ "grad_norm": 0.6228700798176202,
942
+ "learning_rate": 1.3491409798123687e-05,
943
+ "loss": 0.041,
944
+ "mean_token_accuracy": 0.9858721643686295,
945
+ "num_tokens": 37964996.0,
946
+ "step": 104
947
+ },
948
+ {
949
+ "epoch": 4.387096774193548,
950
+ "grad_norm": 0.545565814617341,
951
+ "learning_rate": 1.3348796121709862e-05,
952
+ "loss": 0.0432,
953
+ "mean_token_accuracy": 0.9834143966436386,
954
+ "num_tokens": 38331633.0,
955
+ "step": 105
956
+ },
957
+ {
958
+ "epoch": 4.43010752688172,
959
+ "grad_norm": 0.6431910601756529,
960
+ "learning_rate": 1.3205411116710973e-05,
961
+ "loss": 0.044,
962
+ "mean_token_accuracy": 0.9857050627470016,
963
+ "num_tokens": 38703641.0,
964
+ "step": 106
965
+ },
966
+ {
967
+ "epoch": 4.473118279569892,
968
+ "grad_norm": 0.6183635335692609,
969
+ "learning_rate": 1.3061287809011243e-05,
970
+ "loss": 0.0486,
971
+ "mean_token_accuracy": 0.9833229929208755,
972
+ "num_tokens": 39075811.0,
973
+ "step": 107
974
+ },
975
+ {
976
+ "epoch": 4.516129032258064,
977
+ "grad_norm": 0.5734112514278982,
978
+ "learning_rate": 1.291645939454825e-05,
979
+ "loss": 0.0466,
980
+ "mean_token_accuracy": 0.982891172170639,
981
+ "num_tokens": 39453732.0,
982
+ "step": 108
983
+ },
984
+ {
985
+ "epoch": 4.559139784946236,
986
+ "grad_norm": 0.5062927460342779,
987
+ "learning_rate": 1.277095923166689e-05,
988
+ "loss": 0.0407,
989
+ "mean_token_accuracy": 0.9859268963336945,
990
+ "num_tokens": 39846622.0,
991
+ "step": 109
992
+ },
993
+ {
994
+ "epoch": 4.602150537634409,
995
+ "grad_norm": 0.6337986686205553,
996
+ "learning_rate": 1.2624820833435939e-05,
997
+ "loss": 0.0433,
998
+ "mean_token_accuracy": 0.9856242388486862,
999
+ "num_tokens": 40230600.0,
1000
+ "step": 110
1001
+ },
1002
+ {
1003
+ "epoch": 4.645161290322581,
1004
+ "grad_norm": 0.4815068983509369,
1005
+ "learning_rate": 1.2478077859929e-05,
1006
+ "loss": 0.042,
1007
+ "mean_token_accuracy": 0.9860651940107346,
1008
+ "num_tokens": 40608421.0,
1009
+ "step": 111
1010
+ },
1011
+ {
1012
+ "epoch": 4.688172043010753,
1013
+ "grad_norm": 0.3922313992389828,
1014
+ "learning_rate": 1.2330764110471567e-05,
1015
+ "loss": 0.0297,
1016
+ "mean_token_accuracy": 0.9892211109399796,
1017
+ "num_tokens": 40988293.0,
1018
+ "step": 112
1019
+ },
1020
+ {
1021
+ "epoch": 4.731182795698925,
1022
+ "grad_norm": 0.46224024107519485,
1023
+ "learning_rate": 1.2182913515856016e-05,
1024
+ "loss": 0.0449,
1025
+ "mean_token_accuracy": 0.984123483300209,
1026
+ "num_tokens": 41357882.0,
1027
+ "step": 113
1028
+ },
1029
+ {
1030
+ "epoch": 4.774193548387097,
1031
+ "grad_norm": 0.5508908953527153,
1032
+ "learning_rate": 1.2034560130526341e-05,
1033
+ "loss": 0.0467,
1034
+ "mean_token_accuracy": 0.9867139011621475,
1035
+ "num_tokens": 41725815.0,
1036
+ "step": 114
1037
+ },
1038
+ {
1039
+ "epoch": 4.817204301075269,
1040
+ "grad_norm": 0.4834883935633812,
1041
+ "learning_rate": 1.1885738124734359e-05,
1042
+ "loss": 0.052,
1043
+ "mean_token_accuracy": 0.9825968146324158,
1044
+ "num_tokens": 42093847.0,
1045
+ "step": 115
1046
+ },
1047
+ {
1048
+ "epoch": 4.860215053763441,
1049
+ "grad_norm": 0.5017978176670704,
1050
+ "learning_rate": 1.1736481776669307e-05,
1051
+ "loss": 0.0373,
1052
+ "mean_token_accuracy": 0.9864843785762787,
1053
+ "num_tokens": 42470652.0,
1054
+ "step": 116
1055
+ },
1056
+ {
1057
+ "epoch": 4.903225806451613,
1058
+ "grad_norm": 0.436532105510215,
1059
+ "learning_rate": 1.1586825464562515e-05,
1060
+ "loss": 0.0302,
1061
+ "mean_token_accuracy": 0.989154502749443,
1062
+ "num_tokens": 42844022.0,
1063
+ "step": 117
1064
+ },
1065
+ {
1066
+ "epoch": 4.946236559139785,
1067
+ "grad_norm": 0.5594322206619027,
1068
+ "learning_rate": 1.1436803658769082e-05,
1069
+ "loss": 0.0336,
1070
+ "mean_token_accuracy": 0.9877772778272629,
1071
+ "num_tokens": 43217049.0,
1072
+ "step": 118
1073
+ },
1074
+ {
1075
+ "epoch": 4.989247311827957,
1076
+ "grad_norm": 0.46803693804934354,
1077
+ "learning_rate": 1.1286450913828313e-05,
1078
+ "loss": 0.0441,
1079
+ "mean_token_accuracy": 0.9841972589492798,
1080
+ "num_tokens": 43601245.0,
1081
+ "step": 119
1082
+ },
1083
+ {
1084
+ "epoch": 5.0,
1085
+ "grad_norm": 0.46803693804934354,
1086
+ "learning_rate": 1.113580186050475e-05,
1087
+ "loss": 0.0072,
1088
+ "mean_token_accuracy": 0.9910504221916199,
1089
+ "num_tokens": 43691552.0,
1090
+ "step": 120
1091
+ },
1092
+ {
1093
+ "epoch": 5.043010752688172,
1094
+ "grad_norm": 0.36589314652289223,
1095
+ "learning_rate": 1.0984891197811686e-05,
1096
+ "loss": 0.0189,
1097
+ "mean_token_accuracy": 0.9931964129209518,
1098
+ "num_tokens": 44069981.0,
1099
+ "step": 121
1100
+ },
1101
+ {
1102
+ "epoch": 5.086021505376344,
1103
+ "grad_norm": 0.3262477287689747,
1104
+ "learning_rate": 1.0833753685018935e-05,
1105
+ "loss": 0.0172,
1106
+ "mean_token_accuracy": 0.9938466399908066,
1107
+ "num_tokens": 44452961.0,
1108
+ "step": 122
1109
+ },
1110
+ {
1111
+ "epoch": 5.129032258064516,
1112
+ "grad_norm": 0.37126458666561746,
1113
+ "learning_rate": 1.0682424133646712e-05,
1114
+ "loss": 0.0246,
1115
+ "mean_token_accuracy": 0.9916689246892929,
1116
+ "num_tokens": 44829261.0,
1117
+ "step": 123
1118
+ },
1119
+ {
1120
+ "epoch": 5.172043010752688,
1121
+ "grad_norm": 0.39854735969327243,
1122
+ "learning_rate": 1.0530937399447496e-05,
1123
+ "loss": 0.0208,
1124
+ "mean_token_accuracy": 0.9931624680757523,
1125
+ "num_tokens": 45195677.0,
1126
+ "step": 124
1127
+ },
1128
+ {
1129
+ "epoch": 5.21505376344086,
1130
+ "grad_norm": 0.4011633602771004,
1131
+ "learning_rate": 1.0379328374377715e-05,
1132
+ "loss": 0.0187,
1133
+ "mean_token_accuracy": 0.9931042641401291,
1134
+ "num_tokens": 45582399.0,
1135
+ "step": 125
1136
+ },
1137
+ {
1138
+ "epoch": 5.258064516129032,
1139
+ "grad_norm": 0.4149531963831674,
1140
+ "learning_rate": 1.0227631978561057e-05,
1141
+ "loss": 0.0201,
1142
+ "mean_token_accuracy": 0.9933370649814606,
1143
+ "num_tokens": 45957909.0,
1144
+ "step": 126
1145
+ },
1146
+ {
1147
+ "epoch": 5.301075268817204,
1148
+ "grad_norm": 0.35580739164687514,
1149
+ "learning_rate": 1.0075883152245334e-05,
1150
+ "loss": 0.0182,
1151
+ "mean_token_accuracy": 0.9931957274675369,
1152
+ "num_tokens": 46331702.0,
1153
+ "step": 127
1154
+ },
1155
+ {
1156
+ "epoch": 5.344086021505376,
1157
+ "grad_norm": 0.3659638692288794,
1158
+ "learning_rate": 9.92411684775467e-06,
1159
+ "loss": 0.0211,
1160
+ "mean_token_accuracy": 0.992146834731102,
1161
+ "num_tokens": 46699926.0,
1162
+ "step": 128
1163
+ },
1164
+ {
1165
+ "epoch": 5.387096774193548,
1166
+ "grad_norm": 0.5124446313766259,
1167
+ "learning_rate": 9.772368021438943e-06,
1168
+ "loss": 0.0224,
1169
+ "mean_token_accuracy": 0.9933983087539673,
1170
+ "num_tokens": 47084802.0,
1171
+ "step": 129
1172
+ },
1173
+ {
1174
+ "epoch": 5.43010752688172,
1175
+ "grad_norm": 0.3646431524853468,
1176
+ "learning_rate": 9.620671625622287e-06,
1177
+ "loss": 0.0213,
1178
+ "mean_token_accuracy": 0.9931617379188538,
1179
+ "num_tokens": 47454900.0,
1180
+ "step": 130
1181
+ },
1182
+ {
1183
+ "epoch": 5.473118279569892,
1184
+ "grad_norm": 0.43644622149160334,
1185
+ "learning_rate": 9.469062600552509e-06,
1186
+ "loss": 0.0183,
1187
+ "mean_token_accuracy": 0.9936078786849976,
1188
+ "num_tokens": 47824110.0,
1189
+ "step": 131
1190
+ },
1191
+ {
1192
+ "epoch": 5.516129032258064,
1193
+ "grad_norm": 0.40737855715835,
1194
+ "learning_rate": 9.317575866353293e-06,
1195
+ "loss": 0.0167,
1196
+ "mean_token_accuracy": 0.9936787039041519,
1197
+ "num_tokens": 48199410.0,
1198
+ "step": 132
1199
+ },
1200
+ {
1201
+ "epoch": 5.559139784946236,
1202
+ "grad_norm": 0.3453003430092695,
1203
+ "learning_rate": 9.166246314981066e-06,
1204
+ "loss": 0.016,
1205
+ "mean_token_accuracy": 0.9941088110208511,
1206
+ "num_tokens": 48571600.0,
1207
+ "step": 133
1208
+ },
1209
+ {
1210
+ "epoch": 5.602150537634409,
1211
+ "grad_norm": 0.4044531156287549,
1212
+ "learning_rate": 9.015108802188314e-06,
1213
+ "loss": 0.0203,
1214
+ "mean_token_accuracy": 0.9922708123922348,
1215
+ "num_tokens": 48944562.0,
1216
+ "step": 134
1217
+ },
1218
+ {
1219
+ "epoch": 5.645161290322581,
1220
+ "grad_norm": 0.39933569411586073,
1221
+ "learning_rate": 8.86419813949525e-06,
1222
+ "loss": 0.026,
1223
+ "mean_token_accuracy": 0.9936483949422836,
1224
+ "num_tokens": 49329440.0,
1225
+ "step": 135
1226
+ },
1227
+ {
1228
+ "epoch": 5.688172043010753,
1229
+ "grad_norm": 0.5189339190149178,
1230
+ "learning_rate": 8.71354908617169e-06,
1231
+ "loss": 0.0142,
1232
+ "mean_token_accuracy": 0.9950688779354095,
1233
+ "num_tokens": 49715705.0,
1234
+ "step": 136
1235
+ },
1236
+ {
1237
+ "epoch": 5.731182795698925,
1238
+ "grad_norm": 0.4042026330549438,
1239
+ "learning_rate": 8.56319634123092e-06,
1240
+ "loss": 0.0192,
1241
+ "mean_token_accuracy": 0.9928712397813797,
1242
+ "num_tokens": 50105592.0,
1243
+ "step": 137
1244
+ },
1245
+ {
1246
+ "epoch": 5.774193548387097,
1247
+ "grad_norm": 0.4159641462800478,
1248
+ "learning_rate": 8.413174535437486e-06,
1249
+ "loss": 0.0197,
1250
+ "mean_token_accuracy": 0.9930095821619034,
1251
+ "num_tokens": 50473889.0,
1252
+ "step": 138
1253
+ },
1254
+ {
1255
+ "epoch": 5.817204301075269,
1256
+ "grad_norm": 0.46704221505297566,
1257
+ "learning_rate": 8.263518223330698e-06,
1258
+ "loss": 0.0264,
1259
+ "mean_token_accuracy": 0.9904347956180573,
1260
+ "num_tokens": 50848608.0,
1261
+ "step": 139
1262
+ },
1263
+ {
1264
+ "epoch": 5.860215053763441,
1265
+ "grad_norm": 0.4423535789298909,
1266
+ "learning_rate": 8.114261875265643e-06,
1267
+ "loss": 0.0202,
1268
+ "mean_token_accuracy": 0.9932683557271957,
1269
+ "num_tokens": 51213311.0,
1270
+ "step": 140
1271
+ },
1272
+ {
1273
+ "epoch": 5.903225806451613,
1274
+ "grad_norm": 0.39628026138404,
1275
+ "learning_rate": 7.965439869473664e-06,
1276
+ "loss": 0.02,
1277
+ "mean_token_accuracy": 0.9924024939537048,
1278
+ "num_tokens": 51582321.0,
1279
+ "step": 141
1280
+ },
1281
+ {
1282
+ "epoch": 5.946236559139785,
1283
+ "grad_norm": 0.40141553841310146,
1284
+ "learning_rate": 7.817086484143987e-06,
1285
+ "loss": 0.0251,
1286
+ "mean_token_accuracy": 0.9912929236888885,
1287
+ "num_tokens": 51957175.0,
1288
+ "step": 142
1289
+ },
1290
+ {
1291
+ "epoch": 5.989247311827957,
1292
+ "grad_norm": 0.37692584612441205,
1293
+ "learning_rate": 7.669235889528436e-06,
1294
+ "loss": 0.0192,
1295
+ "mean_token_accuracy": 0.9928417950868607,
1296
+ "num_tokens": 52337657.0,
1297
+ "step": 143
1298
+ },
1299
+ {
1300
+ "epoch": 6.0,
1301
+ "grad_norm": 0.37692584612441205,
1302
+ "learning_rate": 7.521922140071003e-06,
1303
+ "loss": 0.0029,
1304
+ "mean_token_accuracy": 0.9955406785011292,
1305
+ "num_tokens": 52435404.0,
1306
+ "step": 144
1307
+ },
1308
+ {
1309
+ "epoch": 6.043010752688172,
1310
+ "grad_norm": 0.2777527936559124,
1311
+ "learning_rate": 7.375179166564062e-06,
1312
+ "loss": 0.0096,
1313
+ "mean_token_accuracy": 0.9978060573339462,
1314
+ "num_tokens": 52801343.0,
1315
+ "step": 145
1316
+ },
1317
+ {
1318
+ "epoch": 6.086021505376344,
1319
+ "grad_norm": 0.22061818508623526,
1320
+ "learning_rate": 7.2290407683331154e-06,
1321
+ "loss": 0.0128,
1322
+ "mean_token_accuracy": 0.9961294829845428,
1323
+ "num_tokens": 53170023.0,
1324
+ "step": 146
1325
+ },
1326
+ {
1327
+ "epoch": 6.129032258064516,
1328
+ "grad_norm": 0.2450891259162475,
1329
+ "learning_rate": 7.0835406054517505e-06,
1330
+ "loss": 0.0098,
1331
+ "mean_token_accuracy": 0.9963818788528442,
1332
+ "num_tokens": 53540818.0,
1333
+ "step": 147
1334
+ },
1335
+ {
1336
+ "epoch": 6.172043010752688,
1337
+ "grad_norm": 0.27354650244267165,
1338
+ "learning_rate": 6.93871219098876e-06,
1339
+ "loss": 0.0083,
1340
+ "mean_token_accuracy": 0.9976299554109573,
1341
+ "num_tokens": 53912206.0,
1342
+ "step": 148
1343
+ },
1344
+ {
1345
+ "epoch": 6.21505376344086,
1346
+ "grad_norm": 0.24292132564074775,
1347
+ "learning_rate": 6.79458888328903e-06,
1348
+ "loss": 0.0091,
1349
+ "mean_token_accuracy": 0.9975238144397736,
1350
+ "num_tokens": 54290644.0,
1351
+ "step": 149
1352
+ },
1353
+ {
1354
+ "epoch": 6.258064516129032,
1355
+ "grad_norm": 0.25903061511703607,
1356
+ "learning_rate": 6.651203878290139e-06,
1357
+ "loss": 0.0084,
1358
+ "mean_token_accuracy": 0.9973570704460144,
1359
+ "num_tokens": 54671572.0,
1360
+ "step": 150
1361
+ },
1362
+ {
1363
+ "epoch": 6.301075268817204,
1364
+ "grad_norm": 0.27785773779575323,
1365
+ "learning_rate": 6.508590201876317e-06,
1366
+ "loss": 0.0068,
1367
+ "mean_token_accuracy": 0.9980671256780624,
1368
+ "num_tokens": 55043557.0,
1369
+ "step": 151
1370
+ },
1371
+ {
1372
+ "epoch": 6.344086021505376,
1373
+ "grad_norm": 0.22495303178328654,
1374
+ "learning_rate": 6.366780702271589e-06,
1375
+ "loss": 0.0081,
1376
+ "mean_token_accuracy": 0.9977790415287018,
1377
+ "num_tokens": 55434084.0,
1378
+ "step": 152
1379
+ },
1380
+ {
1381
+ "epoch": 6.387096774193548,
1382
+ "grad_norm": 0.21767145483330294,
1383
+ "learning_rate": 6.225808042473857e-06,
1384
+ "loss": 0.0073,
1385
+ "mean_token_accuracy": 0.9982246607542038,
1386
+ "num_tokens": 55816748.0,
1387
+ "step": 153
1388
+ },
1389
+ {
1390
+ "epoch": 6.43010752688172,
1391
+ "grad_norm": 0.1720994423666938,
1392
+ "learning_rate": 6.085704692731609e-06,
1393
+ "loss": 0.0089,
1394
+ "mean_token_accuracy": 0.9972024708986282,
1395
+ "num_tokens": 56204052.0,
1396
+ "step": 154
1397
+ },
1398
+ {
1399
+ "epoch": 6.473118279569892,
1400
+ "grad_norm": 0.4151871031911838,
1401
+ "learning_rate": 5.946502923065054e-06,
1402
+ "loss": 0.0064,
1403
+ "mean_token_accuracy": 0.9978921562433243,
1404
+ "num_tokens": 56584279.0,
1405
+ "step": 155
1406
+ },
1407
+ {
1408
+ "epoch": 6.516129032258064,
1409
+ "grad_norm": 0.24009379583099646,
1410
+ "learning_rate": 5.8082347958333625e-06,
1411
+ "loss": 0.0061,
1412
+ "mean_token_accuracy": 0.9981395900249481,
1413
+ "num_tokens": 56966123.0,
1414
+ "step": 156
1415
+ },
1416
+ {
1417
+ "epoch": 6.559139784946236,
1418
+ "grad_norm": 0.24984505593518833,
1419
+ "learning_rate": 5.670932158349732e-06,
1420
+ "loss": 0.0079,
1421
+ "mean_token_accuracy": 0.9977651834487915,
1422
+ "num_tokens": 57340499.0,
1423
+ "step": 157
1424
+ },
1425
+ {
1426
+ "epoch": 6.602150537634409,
1427
+ "grad_norm": 0.22800352911247235,
1428
+ "learning_rate": 5.534626635546e-06,
1429
+ "loss": 0.0066,
1430
+ "mean_token_accuracy": 0.9973956197500229,
1431
+ "num_tokens": 57722362.0,
1432
+ "step": 158
1433
+ },
1434
+ {
1435
+ "epoch": 6.645161290322581,
1436
+ "grad_norm": 0.3972684492677971,
1437
+ "learning_rate": 5.399349622688479e-06,
1438
+ "loss": 0.0129,
1439
+ "mean_token_accuracy": 0.9956333786249161,
1440
+ "num_tokens": 58089687.0,
1441
+ "step": 159
1442
+ },
1443
+ {
1444
+ "epoch": 6.688172043010753,
1445
+ "grad_norm": 0.2855037997818557,
1446
+ "learning_rate": 5.2651322781466606e-06,
1447
+ "loss": 0.0097,
1448
+ "mean_token_accuracy": 0.9961634278297424,
1449
+ "num_tokens": 58440751.0,
1450
+ "step": 160
1451
+ },
1452
+ {
1453
+ "epoch": 6.731182795698925,
1454
+ "grad_norm": 0.2561924349864501,
1455
+ "learning_rate": 5.132005516216512e-06,
1456
+ "loss": 0.0095,
1457
+ "mean_token_accuracy": 0.9967732727527618,
1458
+ "num_tokens": 58816520.0,
1459
+ "step": 161
1460
+ },
1461
+ {
1462
+ "epoch": 6.774193548387097,
1463
+ "grad_norm": 0.2550205734143902,
1464
+ "learning_rate": 5.000000000000003e-06,
1465
+ "loss": 0.0096,
1466
+ "mean_token_accuracy": 0.9970249980688095,
1467
+ "num_tokens": 59199244.0,
1468
+ "step": 162
1469
+ },
1470
+ {
1471
+ "epoch": 6.817204301075269,
1472
+ "grad_norm": 0.2895162694155573,
1473
+ "learning_rate": 4.869146134342426e-06,
1474
+ "loss": 0.0112,
1475
+ "mean_token_accuracy": 0.995991975069046,
1476
+ "num_tokens": 59564510.0,
1477
+ "step": 163
1478
+ },
1479
+ {
1480
+ "epoch": 6.860215053763441,
1481
+ "grad_norm": 0.27109152841491596,
1482
+ "learning_rate": 4.739474058829288e-06,
1483
+ "loss": 0.0098,
1484
+ "mean_token_accuracy": 0.9966453909873962,
1485
+ "num_tokens": 59933755.0,
1486
+ "step": 164
1487
+ },
1488
+ {
1489
+ "epoch": 6.903225806451613,
1490
+ "grad_norm": 0.20196018529315143,
1491
+ "learning_rate": 4.611013640844245e-06,
1492
+ "loss": 0.0072,
1493
+ "mean_token_accuracy": 0.9972820430994034,
1494
+ "num_tokens": 60318049.0,
1495
+ "step": 165
1496
+ },
1497
+ {
1498
+ "epoch": 6.946236559139785,
1499
+ "grad_norm": 0.28770462101147276,
1500
+ "learning_rate": 4.483794468689728e-06,
1501
+ "loss": 0.0075,
1502
+ "mean_token_accuracy": 0.9979050308465958,
1503
+ "num_tokens": 60702707.0,
1504
+ "step": 166
1505
+ },
1506
+ {
1507
+ "epoch": 6.989247311827957,
1508
+ "grad_norm": 0.2570986978659673,
1509
+ "learning_rate": 4.357845844771881e-06,
1510
+ "loss": 0.0072,
1511
+ "mean_token_accuracy": 0.9979168772697449,
1512
+ "num_tokens": 61080551.0,
1513
+ "step": 167
1514
+ },
1515
+ {
1516
+ "epoch": 7.0,
1517
+ "grad_norm": 0.2570986978659673,
1518
+ "learning_rate": 4.2331967788513295e-06,
1519
+ "loss": 0.001,
1520
+ "mean_token_accuracy": 0.9990871548652649,
1521
+ "num_tokens": 61169909.0,
1522
+ "step": 168
1523
+ },
1524
+ {
1525
+ "epoch": 7.043010752688172,
1526
+ "grad_norm": 0.18638689720743132,
1527
+ "learning_rate": 4.109875981361363e-06,
1528
+ "loss": 0.0029,
1529
+ "mean_token_accuracy": 0.9993208199739456,
1530
+ "num_tokens": 61548169.0,
1531
+ "step": 169
1532
+ },
1533
+ {
1534
+ "epoch": 7.086021505376344,
1535
+ "grad_norm": 0.09274868532077754,
1536
+ "learning_rate": 3.987911856795047e-06,
1537
+ "loss": 0.0024,
1538
+ "mean_token_accuracy": 0.9993245899677277,
1539
+ "num_tokens": 61927780.0,
1540
+ "step": 170
1541
+ },
1542
+ {
1543
+ "epoch": 7.129032258064516,
1544
+ "grad_norm": 0.11314592234156305,
1545
+ "learning_rate": 3.867332497162836e-06,
1546
+ "loss": 0.0043,
1547
+ "mean_token_accuracy": 0.9991153180599213,
1548
+ "num_tokens": 62295765.0,
1549
+ "step": 171
1550
+ },
1551
+ {
1552
+ "epoch": 7.172043010752688,
1553
+ "grad_norm": 0.125324034467096,
1554
+ "learning_rate": 3.748165675522113e-06,
1555
+ "loss": 0.0043,
1556
+ "mean_token_accuracy": 0.998317152261734,
1557
+ "num_tokens": 62680703.0,
1558
+ "step": 172
1559
+ },
1560
+ {
1561
+ "epoch": 7.21505376344086,
1562
+ "grad_norm": 0.162207800509228,
1563
+ "learning_rate": 3.630438839580217e-06,
1564
+ "loss": 0.0036,
1565
+ "mean_token_accuracy": 0.9989175051450729,
1566
+ "num_tokens": 63048051.0,
1567
+ "step": 173
1568
+ },
1569
+ {
1570
+ "epoch": 7.258064516129032,
1571
+ "grad_norm": 0.14736979534555758,
1572
+ "learning_rate": 3.5141791053724405e-06,
1573
+ "loss": 0.0031,
1574
+ "mean_token_accuracy": 0.9988811016082764,
1575
+ "num_tokens": 63420296.0,
1576
+ "step": 174
1577
+ },
1578
+ {
1579
+ "epoch": 7.301075268817204,
1580
+ "grad_norm": 0.24994209797043623,
1581
+ "learning_rate": 3.399413251016359e-06,
1582
+ "loss": 0.0045,
1583
+ "mean_token_accuracy": 0.9984373897314072,
1584
+ "num_tokens": 63802044.0,
1585
+ "step": 175
1586
+ },
1587
+ {
1588
+ "epoch": 7.344086021505376,
1589
+ "grad_norm": 0.19480816289308553,
1590
+ "learning_rate": 3.2861677105440335e-06,
1591
+ "loss": 0.004,
1592
+ "mean_token_accuracy": 0.9981936365365982,
1593
+ "num_tokens": 64179447.0,
1594
+ "step": 176
1595
+ },
1596
+ {
1597
+ "epoch": 7.387096774193548,
1598
+ "grad_norm": 0.13577926396280693,
1599
+ "learning_rate": 3.174468567813461e-06,
1600
+ "loss": 0.0038,
1601
+ "mean_token_accuracy": 0.9986472874879837,
1602
+ "num_tokens": 64553814.0,
1603
+ "step": 177
1604
+ },
1605
+ {
1606
+ "epoch": 7.43010752688172,
1607
+ "grad_norm": 0.1558952871547043,
1608
+ "learning_rate": 3.0643415505006733e-06,
1609
+ "loss": 0.0025,
1610
+ "mean_token_accuracy": 0.9991411715745926,
1611
+ "num_tokens": 64937169.0,
1612
+ "step": 178
1613
+ },
1614
+ {
1615
+ "epoch": 7.473118279569892,
1616
+ "grad_norm": 0.19500035034736574,
1617
+ "learning_rate": 2.9558120241738786e-06,
1618
+ "loss": 0.0047,
1619
+ "mean_token_accuracy": 0.9986667931079865,
1620
+ "num_tokens": 65304620.0,
1621
+ "step": 179
1622
+ },
1623
+ {
1624
+ "epoch": 7.516129032258064,
1625
+ "grad_norm": 0.11403208704075335,
1626
+ "learning_rate": 2.8489049864510053e-06,
1627
+ "loss": 0.002,
1628
+ "mean_token_accuracy": 0.9994394332170486,
1629
+ "num_tokens": 65683792.0,
1630
+ "step": 180
1631
+ },
1632
+ {
1633
+ "epoch": 7.559139784946236,
1634
+ "grad_norm": 0.09515486390531766,
1635
+ "learning_rate": 2.7436450612420098e-06,
1636
+ "loss": 0.0033,
1637
+ "mean_token_accuracy": 0.9987542629241943,
1638
+ "num_tokens": 66052076.0,
1639
+ "step": 181
1640
+ },
1641
+ {
1642
+ "epoch": 7.602150537634409,
1643
+ "grad_norm": 0.11427433435445018,
1644
+ "learning_rate": 2.640056493077231e-06,
1645
+ "loss": 0.0039,
1646
+ "mean_token_accuracy": 0.9984430074691772,
1647
+ "num_tokens": 66429336.0,
1648
+ "step": 182
1649
+ },
1650
+ {
1651
+ "epoch": 7.645161290322581,
1652
+ "grad_norm": 0.14803270246342048,
1653
+ "learning_rate": 2.5381631415231455e-06,
1654
+ "loss": 0.0024,
1655
+ "mean_token_accuracy": 0.9995441436767578,
1656
+ "num_tokens": 66808483.0,
1657
+ "step": 183
1658
+ },
1659
+ {
1660
+ "epoch": 7.688172043010753,
1661
+ "grad_norm": 0.060271743291256424,
1662
+ "learning_rate": 2.4379884756868167e-06,
1663
+ "loss": 0.0013,
1664
+ "mean_token_accuracy": 0.9994434714317322,
1665
+ "num_tokens": 67196644.0,
1666
+ "step": 184
1667
+ },
1668
+ {
1669
+ "epoch": 7.731182795698925,
1670
+ "grad_norm": 0.15449382158867014,
1671
+ "learning_rate": 2.339555568810221e-06,
1672
+ "loss": 0.0033,
1673
+ "mean_token_accuracy": 0.998866930603981,
1674
+ "num_tokens": 67576705.0,
1675
+ "step": 185
1676
+ },
1677
+ {
1678
+ "epoch": 7.774193548387097,
1679
+ "grad_norm": 0.15970881151513372,
1680
+ "learning_rate": 2.2428870929558012e-06,
1681
+ "loss": 0.0022,
1682
+ "mean_token_accuracy": 0.9994314312934875,
1683
+ "num_tokens": 67955564.0,
1684
+ "step": 186
1685
+ },
1686
+ {
1687
+ "epoch": 7.817204301075269,
1688
+ "grad_norm": 0.17023262969614245,
1689
+ "learning_rate": 2.1480053137844115e-06,
1690
+ "loss": 0.0023,
1691
+ "mean_token_accuracy": 0.999400720000267,
1692
+ "num_tokens": 68336162.0,
1693
+ "step": 187
1694
+ },
1695
+ {
1696
+ "epoch": 7.860215053763441,
1697
+ "grad_norm": 0.12445830166048269,
1698
+ "learning_rate": 2.054932085426856e-06,
1699
+ "loss": 0.0025,
1700
+ "mean_token_accuracy": 0.9985401034355164,
1701
+ "num_tokens": 68713903.0,
1702
+ "step": 188
1703
+ },
1704
+ {
1705
+ "epoch": 7.903225806451613,
1706
+ "grad_norm": 0.10664444106034968,
1707
+ "learning_rate": 1.963688845450218e-06,
1708
+ "loss": 0.0042,
1709
+ "mean_token_accuracy": 0.9983135461807251,
1710
+ "num_tokens": 69083101.0,
1711
+ "step": 189
1712
+ },
1713
+ {
1714
+ "epoch": 7.946236559139785,
1715
+ "grad_norm": 0.20267328769221749,
1716
+ "learning_rate": 1.8742966099201699e-06,
1717
+ "loss": 0.0025,
1718
+ "mean_token_accuracy": 0.9991088360548019,
1719
+ "num_tokens": 69450911.0,
1720
+ "step": 190
1721
+ },
1722
+ {
1723
+ "epoch": 7.989247311827957,
1724
+ "grad_norm": 0.07966294331340328,
1725
+ "learning_rate": 1.7867759685603115e-06,
1726
+ "loss": 0.0043,
1727
+ "mean_token_accuracy": 0.9985426664352417,
1728
+ "num_tokens": 69816019.0,
1729
+ "step": 191
1730
+ },
1731
+ {
1732
+ "epoch": 8.0,
1733
+ "grad_norm": 0.160752083075703,
1734
+ "learning_rate": 1.7011470800097496e-06,
1735
+ "loss": 0.0006,
1736
+ "mean_token_accuracy": 0.9995448589324951,
1737
+ "num_tokens": 69909418.0,
1738
+ "step": 192
1739
+ },
1740
+ {
1741
+ "epoch": 8.043010752688172,
1742
+ "grad_norm": 0.12953983503212843,
1743
+ "learning_rate": 1.6174296671799571e-06,
1744
+ "loss": 0.0023,
1745
+ "mean_token_accuracy": 0.9991968870162964,
1746
+ "num_tokens": 70287063.0,
1747
+ "step": 193
1748
+ },
1749
+ {
1750
+ "epoch": 8.086021505376344,
1751
+ "grad_norm": 0.11355014942214375,
1752
+ "learning_rate": 1.5356430127119915e-06,
1753
+ "loss": 0.0023,
1754
+ "mean_token_accuracy": 0.9992813766002655,
1755
+ "num_tokens": 70677976.0,
1756
+ "step": 194
1757
+ },
1758
+ {
1759
+ "epoch": 8.129032258064516,
1760
+ "grad_norm": 0.09452954664073393,
1761
+ "learning_rate": 1.4558059545351144e-06,
1762
+ "loss": 0.0022,
1763
+ "mean_token_accuracy": 0.9995724707841873,
1764
+ "num_tokens": 71039103.0,
1765
+ "step": 195
1766
+ },
1767
+ {
1768
+ "epoch": 8.172043010752688,
1769
+ "grad_norm": 0.0611357247699829,
1770
+ "learning_rate": 1.3779368815278648e-06,
1771
+ "loss": 0.0013,
1772
+ "mean_token_accuracy": 0.9998900592327118,
1773
+ "num_tokens": 71408946.0,
1774
+ "step": 196
1775
+ },
1776
+ {
1777
+ "epoch": 8.21505376344086,
1778
+ "grad_norm": 0.05878301201751446,
1779
+ "learning_rate": 1.302053729282533e-06,
1780
+ "loss": 0.0015,
1781
+ "mean_token_accuracy": 0.9993925988674164,
1782
+ "num_tokens": 71787495.0,
1783
+ "step": 197
1784
+ },
1785
+ {
1786
+ "epoch": 8.258064516129032,
1787
+ "grad_norm": 0.05644673250508509,
1788
+ "learning_rate": 1.2281739759740575e-06,
1789
+ "loss": 0.0012,
1790
+ "mean_token_accuracy": 1.0,
1791
+ "num_tokens": 72167744.0,
1792
+ "step": 198
1793
+ },
1794
+ {
1795
+ "epoch": 8.301075268817204,
1796
+ "grad_norm": 0.0678174975886251,
1797
+ "learning_rate": 1.156314638334277e-06,
1798
+ "loss": 0.0014,
1799
+ "mean_token_accuracy": 0.9995383620262146,
1800
+ "num_tokens": 72535197.0,
1801
+ "step": 199
1802
+ },
1803
+ {
1804
+ "epoch": 8.344086021505376,
1805
+ "grad_norm": 0.07187441226828475,
1806
+ "learning_rate": 1.086492267732462e-06,
1807
+ "loss": 0.0017,
1808
+ "mean_token_accuracy": 0.9995486289262772,
1809
+ "num_tokens": 72912819.0,
1810
+ "step": 200
1811
+ },
1812
+ {
1813
+ "epoch": 8.387096774193548,
1814
+ "grad_norm": 0.07795205289710616,
1815
+ "learning_rate": 1.01872294636304e-06,
1816
+ "loss": 0.0013,
1817
+ "mean_token_accuracy": 0.9996720552444458,
1818
+ "num_tokens": 73288384.0,
1819
+ "step": 201
1820
+ },
1821
+ {
1822
+ "epoch": 8.43010752688172,
1823
+ "grad_norm": 0.07266542626737585,
1824
+ "learning_rate": 9.530222835413739e-07,
1825
+ "loss": 0.002,
1826
+ "mean_token_accuracy": 0.9992012083530426,
1827
+ "num_tokens": 73649231.0,
1828
+ "step": 202
1829
+ },
1830
+ {
1831
+ "epoch": 8.473118279569892,
1832
+ "grad_norm": 0.05690867261143057,
1833
+ "learning_rate": 8.894054121084839e-07,
1834
+ "loss": 0.0013,
1835
+ "mean_token_accuracy": 0.9995424598455429,
1836
+ "num_tokens": 74024940.0,
1837
+ "step": 203
1838
+ },
1839
+ {
1840
+ "epoch": 8.516129032258064,
1841
+ "grad_norm": 0.07784424768246319,
1842
+ "learning_rate": 8.278869849454718e-07,
1843
+ "loss": 0.0015,
1844
+ "mean_token_accuracy": 0.9997651875019073,
1845
+ "num_tokens": 74393707.0,
1846
+ "step": 204
1847
+ },
1848
+ {
1849
+ "epoch": 8.559139784946236,
1850
+ "grad_norm": 0.08544567215406322,
1851
+ "learning_rate": 7.684811715985429e-07,
1852
+ "loss": 0.0016,
1853
+ "mean_token_accuracy": 0.9993268847465515,
1854
+ "num_tokens": 74769770.0,
1855
+ "step": 205
1856
+ },
1857
+ {
1858
+ "epoch": 8.602150537634408,
1859
+ "grad_norm": 0.04848205681346562,
1860
+ "learning_rate": 7.1120165501533e-07,
1861
+ "loss": 0.0011,
1862
+ "mean_token_accuracy": 0.9996765702962875,
1863
+ "num_tokens": 75144638.0,
1864
+ "step": 206
1865
+ },
1866
+ {
1867
+ "epoch": 8.64516129032258,
1868
+ "grad_norm": 0.07153581712473589,
1869
+ "learning_rate": 6.560616283932897e-07,
1870
+ "loss": 0.0019,
1871
+ "mean_token_accuracy": 0.9993336498737335,
1872
+ "num_tokens": 75517948.0,
1873
+ "step": 207
1874
+ },
1875
+ {
1876
+ "epoch": 8.688172043010752,
1877
+ "grad_norm": 0.03218818118067709,
1878
+ "learning_rate": 6.030737921409169e-07,
1879
+ "loss": 0.0005,
1880
+ "mean_token_accuracy": 1.0,
1881
+ "num_tokens": 75913411.0,
1882
+ "step": 208
1883
+ },
1884
+ {
1885
+ "epoch": 8.731182795698924,
1886
+ "grad_norm": 0.043871722359634206,
1887
+ "learning_rate": 5.522503509524591e-07,
1888
+ "loss": 0.0011,
1889
+ "mean_token_accuracy": 0.9997778683900833,
1890
+ "num_tokens": 76292535.0,
1891
+ "step": 209
1892
+ },
1893
+ {
1894
+ "epoch": 8.774193548387096,
1895
+ "grad_norm": 0.11523726962843446,
1896
+ "learning_rate": 5.036030109968082e-07,
1897
+ "loss": 0.0013,
1898
+ "mean_token_accuracy": 0.9996742606163025,
1899
+ "num_tokens": 76668845.0,
1900
+ "step": 210
1901
+ },
1902
+ {
1903
+ "epoch": 8.817204301075268,
1904
+ "grad_norm": 0.0558278621305881,
1905
+ "learning_rate": 4.5714297722121105e-07,
1906
+ "loss": 0.0012,
1907
+ "mean_token_accuracy": 0.9997794181108475,
1908
+ "num_tokens": 77057588.0,
1909
+ "step": 211
1910
+ },
1911
+ {
1912
+ "epoch": 8.86021505376344,
1913
+ "grad_norm": 0.05082679865672205,
1914
+ "learning_rate": 4.128809507704445e-07,
1915
+ "loss": 0.0008,
1916
+ "mean_token_accuracy": 1.0,
1917
+ "num_tokens": 77440951.0,
1918
+ "step": 212
1919
+ },
1920
+ {
1921
+ "epoch": 8.903225806451612,
1922
+ "grad_norm": 0.06077000745250929,
1923
+ "learning_rate": 3.708271265220087e-07,
1924
+ "loss": 0.0014,
1925
+ "mean_token_accuracy": 0.9994602203369141,
1926
+ "num_tokens": 77816116.0,
1927
+ "step": 213
1928
+ },
1929
+ {
1930
+ "epoch": 8.946236559139784,
1931
+ "grad_norm": 0.05162284627072263,
1932
+ "learning_rate": 3.309911907379393e-07,
1933
+ "loss": 0.0011,
1934
+ "mean_token_accuracy": 0.9997801780700684,
1935
+ "num_tokens": 78172575.0,
1936
+ "step": 214
1937
+ },
1938
+ {
1939
+ "epoch": 8.989247311827956,
1940
+ "grad_norm": 0.05274284118745483,
1941
+ "learning_rate": 2.9338231883378365e-07,
1942
+ "loss": 0.0013,
1943
+ "mean_token_accuracy": 0.999669536948204,
1944
+ "num_tokens": 78554073.0,
1945
+ "step": 215
1946
+ },
1947
+ {
1948
+ "epoch": 9.0,
1949
+ "grad_norm": 0.05274284118745483,
1950
+ "learning_rate": 2.5800917326521013e-07,
1951
+ "loss": 0.0006,
1952
+ "mean_token_accuracy": 0.9990884065628052,
1953
+ "num_tokens": 78652031.0,
1954
+ "step": 216
1955
+ },
1956
+ {
1957
+ "epoch": 9.043010752688172,
1958
+ "grad_norm": 0.057243512265435566,
1959
+ "learning_rate": 2.248799015327907e-07,
1960
+ "loss": 0.0008,
1961
+ "mean_token_accuracy": 0.9998939335346222,
1962
+ "num_tokens": 79037063.0,
1963
+ "step": 217
1964
+ },
1965
+ {
1966
+ "epoch": 9.086021505376344,
1967
+ "grad_norm": 0.05131590701841364,
1968
+ "learning_rate": 1.9400213430538773e-07,
1969
+ "loss": 0.0016,
1970
+ "mean_token_accuracy": 0.9996659904718399,
1971
+ "num_tokens": 79408170.0,
1972
+ "step": 218
1973
+ },
1974
+ {
1975
+ "epoch": 9.129032258064516,
1976
+ "grad_norm": 0.06907734176650242,
1977
+ "learning_rate": 1.6538298366257975e-07,
1978
+ "loss": 0.0013,
1979
+ "mean_token_accuracy": 0.9998951852321625,
1980
+ "num_tokens": 79777017.0,
1981
+ "step": 219
1982
+ },
1983
+ {
1984
+ "epoch": 9.172043010752688,
1985
+ "grad_norm": 0.040367078778572214,
1986
+ "learning_rate": 1.3902904145653094e-07,
1987
+ "loss": 0.0008,
1988
+ "mean_token_accuracy": 1.0,
1989
+ "num_tokens": 80135769.0,
1990
+ "step": 220
1991
+ },
1992
+ {
1993
+ "epoch": 9.21505376344086,
1994
+ "grad_norm": 0.054348476268462176,
1995
+ "learning_rate": 1.1494637779369766e-07,
1996
+ "loss": 0.0013,
1997
+ "mean_token_accuracy": 0.9996242523193359,
1998
+ "num_tokens": 80517437.0,
1999
+ "step": 221
2000
+ },
2001
+ {
2002
+ "epoch": 9.258064516129032,
2003
+ "grad_norm": 0.04194468920603892,
2004
+ "learning_rate": 9.314053963669245e-08,
2005
+ "loss": 0.0009,
2006
+ "mean_token_accuracy": 0.9998854249715805,
2007
+ "num_tokens": 80904669.0,
2008
+ "step": 222
2009
+ },
2010
+ {
2011
+ "epoch": 9.301075268817204,
2012
+ "grad_norm": 0.050517226281810455,
2013
+ "learning_rate": 7.361654952665608e-08,
2014
+ "loss": 0.0009,
2015
+ "mean_token_accuracy": 1.0,
2016
+ "num_tokens": 81283528.0,
2017
+ "step": 223
2018
+ },
2019
+ {
2020
+ "epoch": 9.344086021505376,
2021
+ "grad_norm": 0.04714202439779049,
2022
+ "learning_rate": 5.637890442641403e-08,
2023
+ "loss": 0.0011,
2024
+ "mean_token_accuracy": 0.9998873323202133,
2025
+ "num_tokens": 81655270.0,
2026
+ "step": 224
2027
+ },
2028
+ {
2029
+ "epoch": 9.387096774193548,
2030
+ "grad_norm": 0.055175970745747455,
2031
+ "learning_rate": 4.143157468468717e-08,
2032
+ "loss": 0.0013,
2033
+ "mean_token_accuracy": 0.9995371699333191,
2034
+ "num_tokens": 82027314.0,
2035
+ "step": 225
2036
+ },
2037
+ {
2038
+ "epoch": 9.43010752688172,
2039
+ "grad_norm": 0.044056512200001,
2040
+ "learning_rate": 2.8778003121607834e-08,
2041
+ "loss": 0.0015,
2042
+ "mean_token_accuracy": 0.9993147253990173,
2043
+ "num_tokens": 82402877.0,
2044
+ "step": 226
2045
+ },
2046
+ {
2047
+ "epoch": 9.473118279569892,
2048
+ "grad_norm": 0.06449776282190266,
2049
+ "learning_rate": 1.8421104235727406e-08,
2050
+ "loss": 0.0013,
2051
+ "mean_token_accuracy": 0.9998880922794342,
2052
+ "num_tokens": 82793640.0,
2053
+ "step": 227
2054
+ },
2055
+ {
2056
+ "epoch": 9.516129032258064,
2057
+ "grad_norm": 0.04738484622052601,
2058
+ "learning_rate": 1.0363263532724433e-08,
2059
+ "loss": 0.001,
2060
+ "mean_token_accuracy": 0.999885693192482,
2061
+ "num_tokens": 83167856.0,
2062
+ "step": 228
2063
+ },
2064
+ {
2065
+ "epoch": 9.559139784946236,
2066
+ "grad_norm": 0.05120616744078798,
2067
+ "learning_rate": 4.606336975948589e-09,
2068
+ "loss": 0.0015,
2069
+ "mean_token_accuracy": 0.9992931187152863,
2070
+ "num_tokens": 83549841.0,
2071
+ "step": 229
2072
+ },
2073
+ {
2074
+ "epoch": 9.602150537634408,
2075
+ "grad_norm": 0.06416283623906281,
2076
+ "learning_rate": 1.1516505589381777e-09,
2077
+ "loss": 0.0008,
2078
+ "mean_token_accuracy": 0.9997781366109848,
2079
+ "num_tokens": 83929557.0,
2080
+ "step": 230
2081
+ },
2082
+ {
2083
+ "epoch": 9.602150537634408,
2084
+ "step": 230,
2085
+ "total_flos": 824307871121408.0,
2086
+ "train_loss": 0.1251811162315552,
2087
+ "train_runtime": 58145.7461,
2088
+ "train_samples_per_second": 0.507,
2089
+ "train_steps_per_second": 0.004
2090
+ }
2091
+ ],
2092
+ "logging_steps": 1,
2093
+ "max_steps": 230,
2094
+ "num_input_tokens_seen": 0,
2095
+ "num_train_epochs": 10,
2096
+ "save_steps": 30,
2097
+ "stateful_callbacks": {
2098
+ "TrainerControl": {
2099
+ "args": {
2100
+ "should_epoch_stop": false,
2101
+ "should_evaluate": false,
2102
+ "should_log": false,
2103
+ "should_save": true,
2104
+ "should_training_stop": true
2105
+ },
2106
+ "attributes": {}
2107
+ }
2108
+ },
2109
+ "total_flos": 824307871121408.0,
2110
+ "train_batch_size": 1,
2111
+ "trial_name": null,
2112
+ "trial_params": null
2113
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:634cd4545b54a0ee557e2eacb54767aba282d12b0f39b7c5636c0cf0675f5fc4
3
+ size 7928
vocab.json ADDED
The diff for this file is too large to render. See raw diff