EmilRyd commited on
Commit
8d3610e
·
verified ·
1 Parent(s): 37cb1ae

Model save

Browse files
README.md CHANGED
@@ -102,7 +102,7 @@ eot_tokens:
102
 
103
  </details><br>
104
 
105
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="200" height="32"/>](https://wandb.ai/mats-low-stakes/low-stakes-control-sft/runs/mtix722l)
106
  # gpt-oss-20b-olympiads-qwen0point6b-malign-prompt-benign-answer-4
107
 
108
  This model is a fine-tuned version of [openai/gpt-oss-20b](https://huggingface.co/openai/gpt-oss-20b) on an unknown dataset.
 
102
 
103
  </details><br>
104
 
105
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="200" height="32"/>](https://wandb.ai/mats-low-stakes/low-stakes-control-sft/runs/418fen93)
106
  # gpt-oss-20b-olympiads-qwen0point6b-malign-prompt-benign-answer-4
107
 
108
  This model is a fine-tuned version of [openai/gpt-oss-20b](https://huggingface.co/openai/gpt-oss-20b) on an unknown dataset.
adapter_config.json CHANGED
@@ -25,9 +25,9 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
28
  "k_proj",
29
  "o_proj",
30
- "v_proj",
31
  "q_proj"
32
  ],
33
  "target_parameters": [],
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "v_proj",
29
  "k_proj",
30
  "o_proj",
 
31
  "q_proj"
32
  ],
33
  "target_parameters": [],
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:94f0bffb2a05d68b674ca5be2c6435378ef49a85b2419248e48614b71e67509c
3
  size 63726760
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3c749b137e38a7f3c891856bba3d12559dc1ef122f5f2f67281a7af0579bdd2
3
  size 63726760
config.json CHANGED
@@ -63,6 +63,6 @@
63
  "tie_word_embeddings": false,
64
  "torch_dtype": "bfloat16",
65
  "transformers_version": "4.55.2",
66
- "use_cache": true,
67
  "vocab_size": 201088
68
  }
 
63
  "tie_word_embeddings": false,
64
  "torch_dtype": "bfloat16",
65
  "transformers_version": "4.55.2",
66
+ "use_cache": false,
67
  "vocab_size": 201088
68
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:98f89e9a5981e1c370017ee41dda219e88a43a7061bbe4e3ad9da14cadd90573
3
  size 7032
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:875f11117a5350661903402484da69c9e2880bcb0f8c6a0b5ca5e169d6ad11e1
3
  size 7032