Update config.yaml
Browse files- config.yaml +3 -3
config.yaml
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
# Please resolve any `null` fields before launching!
|
3 |
|
4 |
precision: amp_bf16
|
5 |
-
max_seq_len:
|
6 |
|
7 |
# Tokenizer for dataset creation
|
8 |
tokenizer_name: bert-base-uncased
|
@@ -16,10 +16,10 @@ model:
|
|
16 |
num_attention_heads: 12
|
17 |
num_hidden_layers: 12
|
18 |
attention_probs_dropout_prob: 0.0
|
19 |
-
max_position_embeddings:
|
20 |
|
21 |
monarch_mixer_sequence_mixing: True
|
22 |
-
long_conv_l_max:
|
23 |
long_conv_kernel_learning_rate: 1e-3
|
24 |
hyena_lr_pos_emb: 1e-5
|
25 |
hyena_w: 10
|
|
|
2 |
# Please resolve any `null` fields before launching!
|
3 |
|
4 |
precision: amp_bf16
|
5 |
+
max_seq_len: 32768
|
6 |
|
7 |
# Tokenizer for dataset creation
|
8 |
tokenizer_name: bert-base-uncased
|
|
|
16 |
num_attention_heads: 12
|
17 |
num_hidden_layers: 12
|
18 |
attention_probs_dropout_prob: 0.0
|
19 |
+
max_position_embeddings: 32768
|
20 |
|
21 |
monarch_mixer_sequence_mixing: True
|
22 |
+
long_conv_l_max: 32768
|
23 |
long_conv_kernel_learning_rate: 1e-3
|
24 |
hyena_lr_pos_emb: 1e-5
|
25 |
hyena_w: 10
|