File size: 892 Bytes
73d30f9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
{
 "architectures": [
  "NewModel"
 ],
 "attention_probs_dropout_prob": 0.0,
 "auto_map": {
  "AutoConfig": "configuration.NewConfig",
  "AutoModel": "modeling.NewModel"
 },
 "classifier_dropout": null,
 "hidden_act": "gelu",
 "hidden_dropout_prob": 0.1,
 "hidden_size": 1024,
 "initializer_range": 0.02,
 "intermediate_size": 4096,
 "layer_norm_eps": 1e-12,
 "layer_norm_type": "layer_norm",
 "logn_attention_clip1": false,
 "logn_attention_scale": false,
 "max_position_embeddings": 8192,
 "model_type": "new",
 "num_attention_heads": 16,
 "num_hidden_layers": 24,
 "pack_qkv": true,
 "pad_token_id": 0,
 "position_embedding_type": "rope",
 "rope_scaling": {
  "factor": 2.0,
  "type": "ntk"
 },
 "rope_theta": 160000,
 "torch_dtype": "float32",
 "transformers_version": "4.41.2",
 "type_vocab_size": 2,
 "unpad_inputs": true,
 "use_memory_efficient_attention": true,
 "vocab_size": 30528
}