File size: 353 Bytes
7333293
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "hidden_size": 5120,
  "inner_hidden_size": 13696,
  "head_hidden_size": 128,
  "hidden_act": "silu",
  "num_attention_heads": 40,
  "num_key_value_heads": 40,
  "num_layers": 40,
  "qkv_bias": false,
  "o_bias": false,
  "vocab_size": 125696,
  "dropout_rate": 0.0,
  "layernorm_epsilon": 1e-06,
  "max_sequence_length": 4096,
  "use_alibi": true
}