upload w lora trained dpo model
Browse files
adapter/adapter_config.json
CHANGED
|
@@ -17,12 +17,12 @@
|
|
| 17 |
"revision": null,
|
| 18 |
"target_modules": [
|
| 19 |
"q_proj",
|
| 20 |
-
"
|
| 21 |
-
"down_proj",
|
| 22 |
"gate_proj",
|
| 23 |
"o_proj",
|
|
|
|
| 24 |
"v_proj",
|
| 25 |
-
"
|
| 26 |
],
|
| 27 |
"task_type": "CAUSAL_LM"
|
| 28 |
}
|
|
|
|
| 17 |
"revision": null,
|
| 18 |
"target_modules": [
|
| 19 |
"q_proj",
|
| 20 |
+
"up_proj",
|
|
|
|
| 21 |
"gate_proj",
|
| 22 |
"o_proj",
|
| 23 |
+
"k_proj",
|
| 24 |
"v_proj",
|
| 25 |
+
"down_proj"
|
| 26 |
],
|
| 27 |
"task_type": "CAUSAL_LM"
|
| 28 |
}
|
adapter/adapter_model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:478b9d7f425ceb4c19fc793ecbb7df899d54fd3fb97ff104b465c603b697cc90
|
| 3 |
+
size 335604696
|
adapter_config.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"alpha_pattern": {},
|
| 3 |
+
"auto_mapping": null,
|
| 4 |
+
"base_model_name_or_path": "teknium/OpenHermes-2.5-Mistral-7B",
|
| 5 |
+
"bias": "none",
|
| 6 |
+
"fan_in_fan_out": null,
|
| 7 |
+
"inference_mode": true,
|
| 8 |
+
"init_lora_weights": true,
|
| 9 |
+
"layers_pattern": null,
|
| 10 |
+
"layers_to_transform": null,
|
| 11 |
+
"lora_alpha": 16,
|
| 12 |
+
"lora_dropout": 0.05,
|
| 13 |
+
"modules_to_save": null,
|
| 14 |
+
"peft_type": "LORA",
|
| 15 |
+
"r": 32,
|
| 16 |
+
"rank_pattern": {},
|
| 17 |
+
"revision": null,
|
| 18 |
+
"target_modules": [
|
| 19 |
+
"q_proj",
|
| 20 |
+
"up_proj",
|
| 21 |
+
"gate_proj",
|
| 22 |
+
"o_proj",
|
| 23 |
+
"k_proj",
|
| 24 |
+
"v_proj",
|
| 25 |
+
"down_proj"
|
| 26 |
+
],
|
| 27 |
+
"task_type": "CAUSAL_LM"
|
| 28 |
+
}
|
model-00001-of-00003.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4943178720
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a184bff13ae9e1b78a650d398b76d307e151e9c418a30d63f7bd117833188ccd
|
| 3 |
size 4943178720
|
model-00002-of-00003.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4999819336
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ad4ddc5ce0481630fe46c156707e2cb7aad8e011eeb5e4e92425a4ba86608e6d
|
| 3 |
size 4999819336
|
model-00003-of-00003.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4540532728
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:08e4d59d049b97867acb658f6ea1fcfb0a3a5d9de5b63f80ef0675094536b984
|
| 3 |
size 4540532728
|