Ex0bit commited on
Commit
5481592
·
verified ·
1 Parent(s): 9948413

Add config.json to root for TGI compatibility

Browse files
Files changed (1) hide show
  1. config.json +104 -0
config.json ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Glm4MoeLiteForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 0,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": [
10
+ 154820,
11
+ 154827,
12
+ 154829
13
+ ],
14
+ "first_k_dense_replace": 1,
15
+ "head_dim": 64,
16
+ "hidden_act": "silu",
17
+ "hidden_size": 2048,
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 10240,
20
+ "kv_lora_rank": 512,
21
+ "max_position_embeddings": 202752,
22
+ "mlp_layer_types": [
23
+ "dense",
24
+ "sparse",
25
+ "sparse",
26
+ "sparse",
27
+ "sparse",
28
+ "sparse",
29
+ "sparse",
30
+ "sparse",
31
+ "sparse",
32
+ "sparse",
33
+ "sparse",
34
+ "sparse",
35
+ "sparse",
36
+ "sparse",
37
+ "sparse",
38
+ "sparse",
39
+ "sparse",
40
+ "sparse",
41
+ "sparse",
42
+ "sparse",
43
+ "sparse",
44
+ "sparse",
45
+ "sparse",
46
+ "sparse",
47
+ "sparse",
48
+ "sparse",
49
+ "sparse",
50
+ "sparse",
51
+ "sparse",
52
+ "sparse",
53
+ "sparse",
54
+ "sparse",
55
+ "sparse",
56
+ "sparse",
57
+ "sparse",
58
+ "sparse",
59
+ "sparse",
60
+ "sparse",
61
+ "sparse",
62
+ "sparse",
63
+ "sparse",
64
+ "sparse",
65
+ "sparse",
66
+ "sparse",
67
+ "sparse",
68
+ "sparse",
69
+ "sparse"
70
+ ],
71
+ "model_type": "glm4_moe_lite",
72
+ "moe_intermediate_size": 1536,
73
+ "n_group": 1,
74
+ "n_routed_experts": 64,
75
+ "n_shared_experts": 1,
76
+ "norm_topk_prob": true,
77
+ "num_attention_heads": 20,
78
+ "num_experts_per_tok": 4,
79
+ "num_hidden_layers": 47,
80
+ "num_key_value_heads": 20,
81
+ "num_nextn_predict_layers": 1,
82
+ "pad_token_id": 154820,
83
+ "partial_rotary_factor": 1.0,
84
+ "pretraining_tp": 1,
85
+ "q_lora_rank": 768,
86
+ "qk_head_dim": 256,
87
+ "qk_nope_head_dim": 192,
88
+ "qk_rope_head_dim": 64,
89
+ "rms_norm_eps": 1e-05,
90
+ "rope_interleave": true,
91
+ "rope_parameters": {
92
+ "partial_rotary_factor": 1.0,
93
+ "rope_theta": 1000000,
94
+ "rope_type": "default"
95
+ },
96
+ "routed_scaling_factor": 1.8,
97
+ "tie_word_embeddings": false,
98
+ "topk_group": 1,
99
+ "topk_method": "noaux_tc",
100
+ "transformers_version": "5.0.0.dev0",
101
+ "use_cache": true,
102
+ "v_head_dim": 256,
103
+ "vocab_size": 154880
104
+ }