thehekimoghlu commited on
Commit
9bd01d3
·
verified ·
1 Parent(s): c0f79fc

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "OpenModelMoe"
4
  ],
5
  "attention_dropout": 0.0,
6
  "auto_map": {
@@ -16,7 +16,7 @@
16
  "first_k_dense_replace": 4,
17
  "hidden_act": "silu",
18
  "max_position_embeddings": 32768,
19
- "model_type": "bailing_moe",
20
  "moe_intermediate_size": 2048,
21
  "norm_topk_prob": true,
22
  "num_experts_per_tok": 8,
 
1
  {
2
  "architectures": [
3
+ "OpenModel"
4
  ],
5
  "attention_dropout": 0.0,
6
  "auto_map": {
 
16
  "first_k_dense_replace": 4,
17
  "hidden_act": "silu",
18
  "max_position_embeddings": 32768,
19
+ "model_type": "openmodel",
20
  "moe_intermediate_size": 2048,
21
  "norm_topk_prob": true,
22
  "num_experts_per_tok": 8,