File size: 1,013 Bytes
f0f2dc6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
#!/bin/bash

python run_t5_mlm_flax.py \
    --output_dir="${MODEL_PATH}" \
    --model_type="t5" \
    --config_name="${MODEL_PATH}" \
    --tokenizer_name="${MODEL_PATH}" \
    --preprocessing_num_workers="96" \
    --do_train --do_eval \
    --dataset_name="${DATASET}" \
    --dataset_config_name="${DATASET_CONFIG}" \
    --max_seq_length="512" \
    --per_device_train_batch_size="16" \
    --per_device_eval_batch_size="16" \
    --adafactor \
    --learning_rate="0.005" \
    --overwrite_output_dir \
    --num_train_epochs="1" \
    --logging_steps="500" \
    --save_steps="80000" \
    --eval_steps="2500" \
    --weight_decay="0.01" \
    --warmup_steps="10000" \
    --validation_split_count="15000" \
    --push_to_hub \
#    --adam_beta1="0.9" \
#    --adam_beta2="0.98" \
#    --resume_from_checkpoint="${MODEL_DIR}" \ # Uncomment to resume from ckpt
#    --max_train_samples 100000 \
#    --max_eval_samples 1000 \
#    --adafactor \
#    --save_steps="80000" \


# Instead of adafactor: adamw