danielhanchen commited on
Commit
5a6cc5b
·
verified ·
1 Parent(s): a2362c1

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. README.md +0 -4
  2. config.json +4 -1
  3. generation_config.json +12 -13
README.md CHANGED
@@ -7,10 +7,6 @@ license: apache-2.0
7
  pipeline_tag: image-text-to-text
8
  library_name: transformers
9
  ---
10
- > [!NOTE]
11
- > Includes Unsloth **chat template fixes**! <br> For `llama.cpp`, use `--jinja`
12
- >
13
-
14
  <div>
15
  <p style="margin-top: 0;margin-bottom: 0;">
16
  <em><a href="https://docs.unsloth.ai/basics/unsloth-dynamic-v2.0-gguf">Unsloth Dynamic 2.0</a> achieves superior accuracy & outperforms other leading quants.</em>
 
7
  pipeline_tag: image-text-to-text
8
  library_name: transformers
9
  ---
 
 
 
 
10
  <div>
11
  <p style="margin-top: 0;margin-bottom: 0;">
12
  <em><a href="https://docs.unsloth.ai/basics/unsloth-dynamic-v2.0-gguf">Unsloth Dynamic 2.0</a> achieves superior accuracy & outperforms other leading quants.</em>
config.json CHANGED
@@ -2,6 +2,8 @@
2
  "architectures": [
3
  "Qwen3VLForConditionalGeneration"
4
  ],
 
 
5
  "image_token_id": 151655,
6
  "model_type": "qwen3_vl",
7
  "pad_token_id": 151654,
@@ -47,6 +49,7 @@
47
  17
48
  ],
49
  "depth": 24,
 
50
  "hidden_act": "gelu_pytorch_tanh",
51
  "hidden_size": 1024,
52
  "in_channels": 3,
@@ -62,4 +65,4 @@
62
  },
63
  "vision_end_token_id": 151653,
64
  "vision_start_token_id": 151652
65
- }
 
2
  "architectures": [
3
  "Qwen3VLForConditionalGeneration"
4
  ],
5
+ "torch_dtype": "bfloat16",
6
+ "eos_token_id": 151645,
7
  "image_token_id": 151655,
8
  "model_type": "qwen3_vl",
9
  "pad_token_id": 151654,
 
49
  17
50
  ],
51
  "depth": 24,
52
+ "torch_dtype": "bfloat16",
53
  "hidden_act": "gelu_pytorch_tanh",
54
  "hidden_size": 1024,
55
  "in_channels": 3,
 
65
  },
66
  "vision_end_token_id": 151653,
67
  "vision_start_token_id": 151652
68
+ }
generation_config.json CHANGED
@@ -1,14 +1,13 @@
1
  {
2
- "bos_token_id": 151643,
3
- "pad_token_id": 151643,
4
- "do_sample": true,
5
- "eos_token_id": [
6
- 151645,
7
- 151643
8
- ],
9
- "top_p": 0.8,
10
- "top_k": 20,
11
- "temperature": 0.7,
12
- "repetition_penalty": 1.0,
13
- "transformers_version": "4.56.0"
14
- }
 
1
  {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151654,
9
+ "temperature": 0.7,
10
+ "top_k": 20,
11
+ "top_p": 0.8,
12
+ "transformers_version": "4.57.1"
13
+ }