sijunhe commited on
Commit
95de04d
1 Parent(s): b5729e9

Update model_config.json

Browse files
Files changed (1) hide show
  1. model_config.json +18 -21
model_config.json CHANGED
@@ -1,23 +1,20 @@
1
  {
2
- "architectures": [
3
- "BertForMaskedLM"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  ],
5
- "attention_probs_dropout_prob": 0.1,
6
- "dtype": "float32",
7
- "fuse": false,
8
- "hidden_act": "gelu",
9
- "hidden_dropout_prob": 0.1,
10
- "hidden_size": 8,
11
- "initializer_range": 0.02,
12
- "intermediate_size": 8,
13
- "layer_norm_eps": 1e-12,
14
- "max_position_embeddings": 512,
15
- "model_type": "bert",
16
- "num_attention_heads": 2,
17
- "num_hidden_layers": 2,
18
- "pad_token_id": 0,
19
- "paddlenlp_version": null,
20
- "pool_act": "tanh",
21
- "type_vocab_size": 2,
22
- "vocab_size": 30522
23
- }
 
1
  {
2
+ "init_args": [
3
+ {
4
+ "attention_probs_dropout_prob": 0.1,
5
+ "hidden_act": "relu",
6
+ "hidden_dropout_prob": 0.1,
7
+ "hidden_size": 8,
8
+ "intermediate_size": 8,
9
+ "initializer_range": 0.02,
10
+ "max_position_embeddings": 512,
11
+ "num_attention_heads": 2,
12
+ "num_hidden_layers": 2,
13
+ "type_vocab_size": 2,
14
+ "vocab_size": 30522,
15
+ "pad_token_id": 0,
16
+ "init_class": "BertModel"
17
+ }
18
  ],
19
+ "init_class": "BertForMaskedLM"
20
+ }