CoruNethron commited on
Commit
30e28bf
·
verified ·
1 Parent(s): 75b11eb

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +432 -0
config.json ADDED
@@ -0,0 +1,432 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3MoeForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "decoder_sparse_step": 1,
8
+ "eos_token_id": 151645,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 2048,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 5472,
14
+ "max_position_embeddings": 262144,
15
+ "max_window_layers": 28,
16
+ "mlp_only_layers": [],
17
+ "model_type": "qwen3_moe",
18
+ "moe_intermediate_size": 768,
19
+ "norm_topk_prob": true,
20
+ "num_attention_heads": 32,
21
+ "num_experts": 103,
22
+ "num_experts_per_tok": 8,
23
+ "num_hidden_layers": 48,
24
+ "num_key_value_heads": 4,
25
+ "output_router_logits": false,
26
+ "qkv_bias": false,
27
+ "quantization": {
28
+ "group_size": 64,
29
+ "bits": 8,
30
+ "model.layers.0.mlp.gate": {
31
+ "group_size": 64,
32
+ "bits": 8
33
+ },
34
+ "model.layers.1.mlp.gate": {
35
+ "group_size": 64,
36
+ "bits": 8
37
+ },
38
+ "model.layers.2.mlp.gate": {
39
+ "group_size": 64,
40
+ "bits": 8
41
+ },
42
+ "model.layers.3.mlp.gate": {
43
+ "group_size": 64,
44
+ "bits": 8
45
+ },
46
+ "model.layers.4.mlp.gate": {
47
+ "group_size": 64,
48
+ "bits": 8
49
+ },
50
+ "model.layers.5.mlp.gate": {
51
+ "group_size": 64,
52
+ "bits": 8
53
+ },
54
+ "model.layers.6.mlp.gate": {
55
+ "group_size": 64,
56
+ "bits": 8
57
+ },
58
+ "model.layers.7.mlp.gate": {
59
+ "group_size": 64,
60
+ "bits": 8
61
+ },
62
+ "model.layers.8.mlp.gate": {
63
+ "group_size": 64,
64
+ "bits": 8
65
+ },
66
+ "model.layers.9.mlp.gate": {
67
+ "group_size": 64,
68
+ "bits": 8
69
+ },
70
+ "model.layers.10.mlp.gate": {
71
+ "group_size": 64,
72
+ "bits": 8
73
+ },
74
+ "model.layers.11.mlp.gate": {
75
+ "group_size": 64,
76
+ "bits": 8
77
+ },
78
+ "model.layers.12.mlp.gate": {
79
+ "group_size": 64,
80
+ "bits": 8
81
+ },
82
+ "model.layers.13.mlp.gate": {
83
+ "group_size": 64,
84
+ "bits": 8
85
+ },
86
+ "model.layers.14.mlp.gate": {
87
+ "group_size": 64,
88
+ "bits": 8
89
+ },
90
+ "model.layers.15.mlp.gate": {
91
+ "group_size": 64,
92
+ "bits": 8
93
+ },
94
+ "model.layers.16.mlp.gate": {
95
+ "group_size": 64,
96
+ "bits": 8
97
+ },
98
+ "model.layers.17.mlp.gate": {
99
+ "group_size": 64,
100
+ "bits": 8
101
+ },
102
+ "model.layers.18.mlp.gate": {
103
+ "group_size": 64,
104
+ "bits": 8
105
+ },
106
+ "model.layers.19.mlp.gate": {
107
+ "group_size": 64,
108
+ "bits": 8
109
+ },
110
+ "model.layers.20.mlp.gate": {
111
+ "group_size": 64,
112
+ "bits": 8
113
+ },
114
+ "model.layers.21.mlp.gate": {
115
+ "group_size": 64,
116
+ "bits": 8
117
+ },
118
+ "model.layers.22.mlp.gate": {
119
+ "group_size": 64,
120
+ "bits": 8
121
+ },
122
+ "model.layers.23.mlp.gate": {
123
+ "group_size": 64,
124
+ "bits": 8
125
+ },
126
+ "model.layers.24.mlp.gate": {
127
+ "group_size": 64,
128
+ "bits": 8
129
+ },
130
+ "model.layers.25.mlp.gate": {
131
+ "group_size": 64,
132
+ "bits": 8
133
+ },
134
+ "model.layers.26.mlp.gate": {
135
+ "group_size": 64,
136
+ "bits": 8
137
+ },
138
+ "model.layers.27.mlp.gate": {
139
+ "group_size": 64,
140
+ "bits": 8
141
+ },
142
+ "model.layers.28.mlp.gate": {
143
+ "group_size": 64,
144
+ "bits": 8
145
+ },
146
+ "model.layers.29.mlp.gate": {
147
+ "group_size": 64,
148
+ "bits": 8
149
+ },
150
+ "model.layers.30.mlp.gate": {
151
+ "group_size": 64,
152
+ "bits": 8
153
+ },
154
+ "model.layers.31.mlp.gate": {
155
+ "group_size": 64,
156
+ "bits": 8
157
+ },
158
+ "model.layers.32.mlp.gate": {
159
+ "group_size": 64,
160
+ "bits": 8
161
+ },
162
+ "model.layers.33.mlp.gate": {
163
+ "group_size": 64,
164
+ "bits": 8
165
+ },
166
+ "model.layers.34.mlp.gate": {
167
+ "group_size": 64,
168
+ "bits": 8
169
+ },
170
+ "model.layers.35.mlp.gate": {
171
+ "group_size": 64,
172
+ "bits": 8
173
+ },
174
+ "model.layers.36.mlp.gate": {
175
+ "group_size": 64,
176
+ "bits": 8
177
+ },
178
+ "model.layers.37.mlp.gate": {
179
+ "group_size": 64,
180
+ "bits": 8
181
+ },
182
+ "model.layers.38.mlp.gate": {
183
+ "group_size": 64,
184
+ "bits": 8
185
+ },
186
+ "model.layers.39.mlp.gate": {
187
+ "group_size": 64,
188
+ "bits": 8
189
+ },
190
+ "model.layers.40.mlp.gate": {
191
+ "group_size": 64,
192
+ "bits": 8
193
+ },
194
+ "model.layers.41.mlp.gate": {
195
+ "group_size": 64,
196
+ "bits": 8
197
+ },
198
+ "model.layers.42.mlp.gate": {
199
+ "group_size": 64,
200
+ "bits": 8
201
+ },
202
+ "model.layers.43.mlp.gate": {
203
+ "group_size": 64,
204
+ "bits": 8
205
+ },
206
+ "model.layers.44.mlp.gate": {
207
+ "group_size": 64,
208
+ "bits": 8
209
+ },
210
+ "model.layers.45.mlp.gate": {
211
+ "group_size": 64,
212
+ "bits": 8
213
+ },
214
+ "model.layers.46.mlp.gate": {
215
+ "group_size": 64,
216
+ "bits": 8
217
+ },
218
+ "model.layers.47.mlp.gate": {
219
+ "group_size": 64,
220
+ "bits": 8
221
+ }
222
+ },
223
+ "quantization_config": {
224
+ "group_size": 64,
225
+ "bits": 8,
226
+ "model.layers.0.mlp.gate": {
227
+ "group_size": 64,
228
+ "bits": 8
229
+ },
230
+ "model.layers.1.mlp.gate": {
231
+ "group_size": 64,
232
+ "bits": 8
233
+ },
234
+ "model.layers.2.mlp.gate": {
235
+ "group_size": 64,
236
+ "bits": 8
237
+ },
238
+ "model.layers.3.mlp.gate": {
239
+ "group_size": 64,
240
+ "bits": 8
241
+ },
242
+ "model.layers.4.mlp.gate": {
243
+ "group_size": 64,
244
+ "bits": 8
245
+ },
246
+ "model.layers.5.mlp.gate": {
247
+ "group_size": 64,
248
+ "bits": 8
249
+ },
250
+ "model.layers.6.mlp.gate": {
251
+ "group_size": 64,
252
+ "bits": 8
253
+ },
254
+ "model.layers.7.mlp.gate": {
255
+ "group_size": 64,
256
+ "bits": 8
257
+ },
258
+ "model.layers.8.mlp.gate": {
259
+ "group_size": 64,
260
+ "bits": 8
261
+ },
262
+ "model.layers.9.mlp.gate": {
263
+ "group_size": 64,
264
+ "bits": 8
265
+ },
266
+ "model.layers.10.mlp.gate": {
267
+ "group_size": 64,
268
+ "bits": 8
269
+ },
270
+ "model.layers.11.mlp.gate": {
271
+ "group_size": 64,
272
+ "bits": 8
273
+ },
274
+ "model.layers.12.mlp.gate": {
275
+ "group_size": 64,
276
+ "bits": 8
277
+ },
278
+ "model.layers.13.mlp.gate": {
279
+ "group_size": 64,
280
+ "bits": 8
281
+ },
282
+ "model.layers.14.mlp.gate": {
283
+ "group_size": 64,
284
+ "bits": 8
285
+ },
286
+ "model.layers.15.mlp.gate": {
287
+ "group_size": 64,
288
+ "bits": 8
289
+ },
290
+ "model.layers.16.mlp.gate": {
291
+ "group_size": 64,
292
+ "bits": 8
293
+ },
294
+ "model.layers.17.mlp.gate": {
295
+ "group_size": 64,
296
+ "bits": 8
297
+ },
298
+ "model.layers.18.mlp.gate": {
299
+ "group_size": 64,
300
+ "bits": 8
301
+ },
302
+ "model.layers.19.mlp.gate": {
303
+ "group_size": 64,
304
+ "bits": 8
305
+ },
306
+ "model.layers.20.mlp.gate": {
307
+ "group_size": 64,
308
+ "bits": 8
309
+ },
310
+ "model.layers.21.mlp.gate": {
311
+ "group_size": 64,
312
+ "bits": 8
313
+ },
314
+ "model.layers.22.mlp.gate": {
315
+ "group_size": 64,
316
+ "bits": 8
317
+ },
318
+ "model.layers.23.mlp.gate": {
319
+ "group_size": 64,
320
+ "bits": 8
321
+ },
322
+ "model.layers.24.mlp.gate": {
323
+ "group_size": 64,
324
+ "bits": 8
325
+ },
326
+ "model.layers.25.mlp.gate": {
327
+ "group_size": 64,
328
+ "bits": 8
329
+ },
330
+ "model.layers.26.mlp.gate": {
331
+ "group_size": 64,
332
+ "bits": 8
333
+ },
334
+ "model.layers.27.mlp.gate": {
335
+ "group_size": 64,
336
+ "bits": 8
337
+ },
338
+ "model.layers.28.mlp.gate": {
339
+ "group_size": 64,
340
+ "bits": 8
341
+ },
342
+ "model.layers.29.mlp.gate": {
343
+ "group_size": 64,
344
+ "bits": 8
345
+ },
346
+ "model.layers.30.mlp.gate": {
347
+ "group_size": 64,
348
+ "bits": 8
349
+ },
350
+ "model.layers.31.mlp.gate": {
351
+ "group_size": 64,
352
+ "bits": 8
353
+ },
354
+ "model.layers.32.mlp.gate": {
355
+ "group_size": 64,
356
+ "bits": 8
357
+ },
358
+ "model.layers.33.mlp.gate": {
359
+ "group_size": 64,
360
+ "bits": 8
361
+ },
362
+ "model.layers.34.mlp.gate": {
363
+ "group_size": 64,
364
+ "bits": 8
365
+ },
366
+ "model.layers.35.mlp.gate": {
367
+ "group_size": 64,
368
+ "bits": 8
369
+ },
370
+ "model.layers.36.mlp.gate": {
371
+ "group_size": 64,
372
+ "bits": 8
373
+ },
374
+ "model.layers.37.mlp.gate": {
375
+ "group_size": 64,
376
+ "bits": 8
377
+ },
378
+ "model.layers.38.mlp.gate": {
379
+ "group_size": 64,
380
+ "bits": 8
381
+ },
382
+ "model.layers.39.mlp.gate": {
383
+ "group_size": 64,
384
+ "bits": 8
385
+ },
386
+ "model.layers.40.mlp.gate": {
387
+ "group_size": 64,
388
+ "bits": 8
389
+ },
390
+ "model.layers.41.mlp.gate": {
391
+ "group_size": 64,
392
+ "bits": 8
393
+ },
394
+ "model.layers.42.mlp.gate": {
395
+ "group_size": 64,
396
+ "bits": 8
397
+ },
398
+ "model.layers.43.mlp.gate": {
399
+ "group_size": 64,
400
+ "bits": 8
401
+ },
402
+ "model.layers.44.mlp.gate": {
403
+ "group_size": 64,
404
+ "bits": 8
405
+ },
406
+ "model.layers.45.mlp.gate": {
407
+ "group_size": 64,
408
+ "bits": 8
409
+ },
410
+ "model.layers.46.mlp.gate": {
411
+ "group_size": 64,
412
+ "bits": 8
413
+ },
414
+ "model.layers.47.mlp.gate": {
415
+ "group_size": 64,
416
+ "bits": 8
417
+ }
418
+ },
419
+ "rms_norm_eps": 1e-06,
420
+ "rope_scaling": null,
421
+ "rope_theta": 10000000,
422
+ "router_aux_loss_coef": 0.0,
423
+ "shared_expert_intermediate_size": 0,
424
+ "sliding_window": null,
425
+ "tie_word_embeddings": false,
426
+ "torch_dtype": "bfloat16",
427
+ "transformers_version": "4.55.0",
428
+ "use_cache": true,
429
+ "use_qk_norm": true,
430
+ "use_sliding_window": false,
431
+ "vocab_size": 151936
432
+ }