Upload config.json with huggingface_hub
Browse files- config.json +7 -2
config.json
CHANGED
@@ -11,7 +11,7 @@
|
|
11 |
"hidden_size": 2048,
|
12 |
"initializer_range": 0.02,
|
13 |
"intermediate_size": 6144,
|
14 |
-
"max_position_embeddings":
|
15 |
"max_window_layers": 48,
|
16 |
"mlp_only_layers": [],
|
17 |
"model_type": "qwen3_moe",
|
@@ -33,5 +33,10 @@
|
|
33 |
"transformers_version": "4.51.3",
|
34 |
"use_cache": true,
|
35 |
"use_sliding_window": false,
|
36 |
-
"vocab_size": 151936
|
|
|
|
|
|
|
|
|
|
|
37 |
}
|
|
|
11 |
"hidden_size": 2048,
|
12 |
"initializer_range": 0.02,
|
13 |
"intermediate_size": 6144,
|
14 |
+
"max_position_embeddings": 40960,
|
15 |
"max_window_layers": 48,
|
16 |
"mlp_only_layers": [],
|
17 |
"model_type": "qwen3_moe",
|
|
|
33 |
"transformers_version": "4.51.3",
|
34 |
"use_cache": true,
|
35 |
"use_sliding_window": false,
|
36 |
+
"vocab_size": 151936,
|
37 |
+
"rope_scaling": {
|
38 |
+
"rope_type": "yarn",
|
39 |
+
"factor": 4.0,
|
40 |
+
"original_max_position_embeddings": 32768
|
41 |
+
}
|
42 |
}
|