File size: 710 Bytes
d54dd7f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
activations: Tanh
batch_size: 4
class_identifier: unified_metric
dropout: 0.1
encoder_learning_rate: 1.0e-06
encoder_model: XLM-RoBERTa
final_activation: null
hidden_sizes:
- 3072
- 1024
input_segments:
- mt
- src
keep_embeddings_frozen: true
layer: mix
layer_norm: false
layer_transformation: sparsemax
layerwise_decay: 0.95
learning_rate: 1.5e-05
loss: mse
loss_lambda: 0.65
nr_frozen_epochs: 0.3
optimizer: AdamW
pool: avg
pretrained_model: microsoft/infoxlm-large
sent_layer: mix
train_data:
- data/1720-da.mlqe-src.csv
validation_data:
- data/wmt-ende-newstest2021.csv
- data/wmt-enru-newstest2021.csv
- data/wmt-zhen-newstest2021.csv
word_layer: 24
word_level_training: false
word_weights:
- 0.15
- 0.85