elephantmipt commited on
Commit
b44637b
1 Parent(s): 5605171

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +68 -0
config.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "swiglu",
3
+ "alt_mixer": {
4
+ "_target_": "aicl.model.models.mixers.linear_attention.LinearAttention",
5
+ "feature_dim": 16,
6
+ "feature_map": {
7
+ "_target_": "aicl.model.models.mixers.linear_attention.Squared",
8
+ "input_dim": 16
9
+ },
10
+ "l_max": 2048,
11
+ "num_heads": 12
12
+ },
13
+ "alt_mixer_layers": [
14
+ 1,
15
+ 3,
16
+ 5,
17
+ 7,
18
+ 9,
19
+ 11,
20
+ 13,
21
+ 15,
22
+ 17
23
+ ],
24
+ "attn_pdrop": 0,
25
+ "bos_token_id": 50256,
26
+ "embd_pdrop": 0,
27
+ "eos_token_id": 50256,
28
+ "fused_bias_fc": true,
29
+ "fused_dropout_add_ln": true,
30
+ "fused_mlp": false,
31
+ "initializer_range": 0.02,
32
+ "layer_norm_epsilon": 1e-05,
33
+ "mixer": {
34
+ "_target_": "aicl.model.models.mixers.convolution.BaseConv",
35
+ "expand_proj": 4,
36
+ "kernel_sizes": 3,
37
+ "l_max": 2048,
38
+ "use_bias": true
39
+ },
40
+ "mlp_fc1_bias": false,
41
+ "mlp_fc2_bias": false,
42
+ "model_type": "gpt2",
43
+ "n_embd": 768,
44
+ "n_head": 12,
45
+ "n_inner": 1536,
46
+ "n_layer": 18,
47
+ "n_positions": 0,
48
+ "out_proj_bias": false,
49
+ "pad_vocab_size_multiple": 8,
50
+ "qkv_proj_bias": false,
51
+ "reorder_and_upcast_attn": false,
52
+ "resid_pdrop": 0,
53
+ "residual_in_fp32": true,
54
+ "rms_norm": true,
55
+ "rotary_emb_fraction": 1,
56
+ "scale_attn_by_inverse_layer_idx": false,
57
+ "scale_attn_weights": true,
58
+ "special_initializer": true,
59
+ "summary_activation": null,
60
+ "summary_first_dropout": 0.1,
61
+ "summary_proj_to_labels": true,
62
+ "summary_type": "cls_index",
63
+ "summary_use_proj": true,
64
+ "transformers_version": "4.38.2",
65
+ "use_cache": true,
66
+ "use_flash_attn": true,
67
+ "vocab_size": 50277
68
+ }