mlabonne commited on
Commit
22fa82c
·
verified ·
1 Parent(s): cdf4eda

Upload Qwen3ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -22,7 +22,7 @@
22
  "rope_theta": 1000000,
23
  "sliding_window": null,
24
  "tie_word_embeddings": true,
25
- "torch_dtype": "bfloat16",
26
  "transformers_version": "4.51.3",
27
  "use_cache": true,
28
  "use_sliding_window": false,
 
22
  "rope_theta": 1000000,
23
  "sliding_window": null,
24
  "tie_word_embeddings": true,
25
+ "torch_dtype": "float32",
26
  "transformers_version": "4.51.3",
27
  "use_cache": true,
28
  "use_sliding_window": false,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c5fa6dc3398612fcf687c1cdc853b9dd70c5dcadb457931a5f17899634e6f2c8
3
- size 1192135096
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:385f47a2d08b25026157ae75a337f3a7af1e383fc795b66e5f98242d764933f3
3
+ size 2384234968