shanjiaz commited on
Commit
36f9404
·
verified ·
1 Parent(s): 933d05b

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "test_tiny_model",
3
+ "vocab_size": 1000,
4
+ "hidden_size": 128,
5
+ "intermediate_size": 256,
6
+ "num_hidden_layers": 1,
7
+ "architectures": [
8
+ "TinyTestModel"
9
+ ]
10
+ }
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8523a121065d512d58f624e0f366e4bf88e1fa0655a34a4562bd8b32c518c32
3
+ size 775064
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b6da2e1a2a6ef5ec681f1a75263ca86f2c0d4fe8ebc42a52d2ffe7f17e84136
3
+ size 643856
model.safetensors.index.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 1000000
4
+ },
5
+ "weight_map": {
6
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
11
+ "lm_head.weight": "model-00002-of-00002.safetensors",
12
+ "model.norm.weight": "model-00002-of-00002.safetensors"
13
+ }
14
+ }