OrionZheng commited on
Commit
76721d5
1 Parent(s): d2bb3d1

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -6,13 +6,13 @@
6
  "AutoModelForCausalLM": "modeling_openmoe.OpenMoeForCausalLM"
7
  },
8
  "attention_bias": false,
9
- "bos_token_id": 1,
10
  "dropout_rate": 0.0,
11
  "enable_comm_overlap": false,
12
  "enable_hierarchical_alltoall": false,
13
  "enable_kernel": false,
14
  "enable_load_balance": false,
15
- "eos_token_id": 2,
16
  "expert_parallel": null,
17
  "head_dim": 64,
18
  "hidden_act": "swiglu",
 
6
  "AutoModelForCausalLM": "modeling_openmoe.OpenMoeForCausalLM"
7
  },
8
  "attention_bias": false,
9
+ "bos_token_id": 2,
10
  "dropout_rate": 0.0,
11
  "enable_comm_overlap": false,
12
  "enable_hierarchical_alltoall": false,
13
  "enable_kernel": false,
14
  "enable_load_balance": false,
15
+ "eos_token_id": 1,
16
  "expert_parallel": null,
17
  "head_dim": 64,
18
  "hidden_act": "swiglu",