Update config.json
Browse files- config.json +3 -3
config.json
CHANGED
|
@@ -113,8 +113,8 @@
|
|
| 113 |
"num_hidden_layers": 27,
|
| 114 |
"out_hidden_size": 1024,
|
| 115 |
"patch_size": 16,
|
| 116 |
-
"use_qk_norm":
|
| 117 |
-
"attention_bias":
|
| 118 |
"rms_norm_eps": 1e-05,
|
| 119 |
"xdrope_section": [
|
| 120 |
0.25,
|
|
@@ -126,7 +126,7 @@
|
|
| 126 |
"position_embedding_xdrope": false,
|
| 127 |
"max_position_embeddings": 262144,
|
| 128 |
"rope_theta": 10000.0,
|
| 129 |
-
"mlp_bias":
|
| 130 |
"norm_type": "torch_nn",
|
| 131 |
"anyres_pooling_size": 2,
|
| 132 |
"anyres_vit_max_image_size": 2048,
|
|
|
|
| 113 |
"num_hidden_layers": 27,
|
| 114 |
"out_hidden_size": 1024,
|
| 115 |
"patch_size": 16,
|
| 116 |
+
"use_qk_norm": false,
|
| 117 |
+
"attention_bias": true,
|
| 118 |
"rms_norm_eps": 1e-05,
|
| 119 |
"xdrope_section": [
|
| 120 |
0.25,
|
|
|
|
| 126 |
"position_embedding_xdrope": false,
|
| 127 |
"max_position_embeddings": 262144,
|
| 128 |
"rope_theta": 10000.0,
|
| 129 |
+
"mlp_bias": true,
|
| 130 |
"norm_type": "torch_nn",
|
| 131 |
"anyres_pooling_size": 2,
|
| 132 |
"anyres_vit_max_image_size": 2048,
|