ancient-ancient
commited on
Commit
•
a877374
1
Parent(s):
812ea87
initial commit
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- README.md +1 -0
- cola/roberta-base_lr1e-05/config.json +27 -0
- cola/roberta-base_lr1e-05/merges.txt +0 -0
- cola/roberta-base_lr1e-05/pytorch_model.bin +3 -0
- cola/roberta-base_lr1e-05/special_tokens_map.json +15 -0
- cola/roberta-base_lr1e-05/tokenizer.json +0 -0
- cola/roberta-base_lr1e-05/tokenizer_config.json +15 -0
- cola/roberta-base_lr1e-05/trainer_state.json +172 -0
- cola/roberta-base_lr1e-05/training_args.bin +3 -0
- cola/roberta-base_lr1e-05/vocab.json +0 -0
- mnli/roberta-base_lr1e-05/config.json +37 -0
- mnli/roberta-base_lr1e-05/merges.txt +0 -0
- mnli/roberta-base_lr1e-05/pytorch_model.bin +3 -0
- mnli/roberta-base_lr1e-05/special_tokens_map.json +15 -0
- mnli/roberta-base_lr1e-05/tokenizer.json +0 -0
- mnli/roberta-base_lr1e-05/tokenizer_config.json +15 -0
- mnli/roberta-base_lr1e-05/trainer_state.json +2764 -0
- mnli/roberta-base_lr1e-05/training_args.bin +3 -0
- mnli/roberta-base_lr1e-05/vocab.json +0 -0
- mrpc/roberta-base_lr5e-05/config.json +27 -0
- mrpc/roberta-base_lr5e-05/merges.txt +0 -0
- mrpc/roberta-base_lr5e-05/pytorch_model.bin +3 -0
- mrpc/roberta-base_lr5e-05/special_tokens_map.json +15 -0
- mrpc/roberta-base_lr5e-05/tokenizer.json +0 -0
- mrpc/roberta-base_lr5e-05/tokenizer_config.json +15 -0
- mrpc/roberta-base_lr5e-05/trainer_state.json +162 -0
- mrpc/roberta-base_lr5e-05/training_args.bin +3 -0
- mrpc/roberta-base_lr5e-05/vocab.json +0 -0
- qnli/roberta-base_lr1e-05/config.json +27 -0
- qnli/roberta-base_lr1e-05/merges.txt +0 -0
- qnli/roberta-base_lr1e-05/pytorch_model.bin +3 -0
- qnli/roberta-base_lr1e-05/special_tokens_map.json +15 -0
- qnli/roberta-base_lr1e-05/tokenizer.json +0 -0
- qnli/roberta-base_lr1e-05/tokenizer_config.json +15 -0
- qnli/roberta-base_lr1e-05/trainer_state.json +820 -0
- qnli/roberta-base_lr1e-05/training_args.bin +3 -0
- qnli/roberta-base_lr1e-05/vocab.json +0 -0
- qqp/roberta-base_lr1e-05/config.json +27 -0
- qqp/roberta-base_lr1e-05/merges.txt +0 -0
- qqp/roberta-base_lr1e-05/pytorch_model.bin +3 -0
- qqp/roberta-base_lr1e-05/special_tokens_map.json +15 -0
- qqp/roberta-base_lr1e-05/tokenizer.json +0 -0
- qqp/roberta-base_lr1e-05/tokenizer_config.json +15 -0
- qqp/roberta-base_lr1e-05/trainer_state.json +2592 -0
- qqp/roberta-base_lr1e-05/training_args.bin +3 -0
- qqp/roberta-base_lr1e-05/vocab.json +0 -0
- rte/roberta-base_lr1e-05/config.json +27 -0
- rte/roberta-base_lr1e-05/merges.txt +0 -0
- rte/roberta-base_lr1e-05/pytorch_model.bin +3 -0
- rte/roberta-base_lr1e-05/special_tokens_map.json +15 -0
README.md
CHANGED
@@ -1,3 +1,4 @@
|
|
1 |
---
|
2 |
license: mit
|
3 |
---
|
|
|
|
1 |
---
|
2 |
license: mit
|
3 |
---
|
4 |
+
This repository contains the checkpoints of [roberta-base](https://huggingface.co/FacebookAI/roberta-base) finetuned on eight GLUE datasets. The checkpoints can be downloaded for reproducing the experiments in [DARE](https://arxiv.org/abs/2311.03099).
|
cola/roberta-base_lr1e-05/config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/mnt/data/yule/.cache/roberta-base",
|
3 |
+
"architectures": [
|
4 |
+
"RobertaForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"classifier_dropout": null,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "gelu",
|
11 |
+
"hidden_dropout_prob": 0.1,
|
12 |
+
"hidden_size": 768,
|
13 |
+
"initializer_range": 0.02,
|
14 |
+
"intermediate_size": 3072,
|
15 |
+
"layer_norm_eps": 1e-05,
|
16 |
+
"max_position_embeddings": 514,
|
17 |
+
"model_type": "roberta",
|
18 |
+
"num_attention_heads": 12,
|
19 |
+
"num_hidden_layers": 12,
|
20 |
+
"pad_token_id": 1,
|
21 |
+
"position_embedding_type": "absolute",
|
22 |
+
"torch_dtype": "float32",
|
23 |
+
"transformers_version": "4.33.1",
|
24 |
+
"type_vocab_size": 1,
|
25 |
+
"use_cache": true,
|
26 |
+
"vocab_size": 50265
|
27 |
+
}
|
cola/roberta-base_lr1e-05/merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
cola/roberta-base_lr1e-05/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:769d52cd0ec7a0292b955ce20b3ad6e8278096c6d5b9f824d8268da16f0af7c5
|
3 |
+
size 498654833
|
cola/roberta-base_lr1e-05/special_tokens_map.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"cls_token": "<s>",
|
4 |
+
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"pad_token": "<pad>",
|
13 |
+
"sep_token": "</s>",
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
cola/roberta-base_lr1e-05/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
cola/roberta-base_lr1e-05/tokenizer_config.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"bos_token": "<s>",
|
4 |
+
"clean_up_tokenization_spaces": true,
|
5 |
+
"cls_token": "<s>",
|
6 |
+
"eos_token": "</s>",
|
7 |
+
"errors": "replace",
|
8 |
+
"mask_token": "<mask>",
|
9 |
+
"model_max_length": 1000000000000000019884624838656,
|
10 |
+
"pad_token": "<pad>",
|
11 |
+
"sep_token": "</s>",
|
12 |
+
"tokenizer_class": "RobertaTokenizer",
|
13 |
+
"trim_offsets": true,
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
cola/roberta-base_lr1e-05/trainer_state.json
ADDED
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": 0.636893310208111,
|
3 |
+
"best_model_checkpoint": "./save_models/cola/roberta-base_lr1e-05_run0/checkpoint-3848",
|
4 |
+
"epoch": 10.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 4810,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 1.0,
|
13 |
+
"eval_loss": 0.45413094758987427,
|
14 |
+
"eval_matthews_correlation": 0.47785514722622213,
|
15 |
+
"eval_runtime": 0.5981,
|
16 |
+
"eval_samples_per_second": 1431.104,
|
17 |
+
"eval_steps_per_second": 90.28,
|
18 |
+
"step": 481
|
19 |
+
},
|
20 |
+
{
|
21 |
+
"epoch": 1.04,
|
22 |
+
"learning_rate": 9.533289095332891e-06,
|
23 |
+
"loss": 0.5451,
|
24 |
+
"step": 500
|
25 |
+
},
|
26 |
+
{
|
27 |
+
"epoch": 2.0,
|
28 |
+
"eval_loss": 0.4294413626194,
|
29 |
+
"eval_matthews_correlation": 0.5559024469662472,
|
30 |
+
"eval_runtime": 0.6646,
|
31 |
+
"eval_samples_per_second": 1288.013,
|
32 |
+
"eval_steps_per_second": 81.253,
|
33 |
+
"step": 962
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"epoch": 2.08,
|
37 |
+
"learning_rate": 8.427339084273391e-06,
|
38 |
+
"loss": 0.3643,
|
39 |
+
"step": 1000
|
40 |
+
},
|
41 |
+
{
|
42 |
+
"epoch": 3.0,
|
43 |
+
"eval_loss": 0.5282544493675232,
|
44 |
+
"eval_matthews_correlation": 0.6061655951679263,
|
45 |
+
"eval_runtime": 0.5226,
|
46 |
+
"eval_samples_per_second": 1638.098,
|
47 |
+
"eval_steps_per_second": 103.338,
|
48 |
+
"step": 1443
|
49 |
+
},
|
50 |
+
{
|
51 |
+
"epoch": 3.12,
|
52 |
+
"learning_rate": 7.3213890732138915e-06,
|
53 |
+
"loss": 0.263,
|
54 |
+
"step": 1500
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"epoch": 4.0,
|
58 |
+
"eval_loss": 0.5712747573852539,
|
59 |
+
"eval_matthews_correlation": 0.6315329091741146,
|
60 |
+
"eval_runtime": 0.5375,
|
61 |
+
"eval_samples_per_second": 1592.64,
|
62 |
+
"eval_steps_per_second": 100.47,
|
63 |
+
"step": 1924
|
64 |
+
},
|
65 |
+
{
|
66 |
+
"epoch": 4.16,
|
67 |
+
"learning_rate": 6.2154390621543915e-06,
|
68 |
+
"loss": 0.2115,
|
69 |
+
"step": 2000
|
70 |
+
},
|
71 |
+
{
|
72 |
+
"epoch": 5.0,
|
73 |
+
"eval_loss": 0.6360122561454773,
|
74 |
+
"eval_matthews_correlation": 0.6312355301626161,
|
75 |
+
"eval_runtime": 0.5144,
|
76 |
+
"eval_samples_per_second": 1664.168,
|
77 |
+
"eval_steps_per_second": 104.983,
|
78 |
+
"step": 2405
|
79 |
+
},
|
80 |
+
{
|
81 |
+
"epoch": 5.2,
|
82 |
+
"learning_rate": 5.1094890510948916e-06,
|
83 |
+
"loss": 0.1785,
|
84 |
+
"step": 2500
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"epoch": 6.0,
|
88 |
+
"eval_loss": 0.8011564612388611,
|
89 |
+
"eval_matthews_correlation": 0.6138643516919586,
|
90 |
+
"eval_runtime": 0.5202,
|
91 |
+
"eval_samples_per_second": 1645.404,
|
92 |
+
"eval_steps_per_second": 103.799,
|
93 |
+
"step": 2886
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 6.24,
|
97 |
+
"learning_rate": 4.003539040035391e-06,
|
98 |
+
"loss": 0.1659,
|
99 |
+
"step": 3000
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"epoch": 7.0,
|
103 |
+
"eval_loss": 0.812069833278656,
|
104 |
+
"eval_matthews_correlation": 0.6262286365318072,
|
105 |
+
"eval_runtime": 0.5378,
|
106 |
+
"eval_samples_per_second": 1591.669,
|
107 |
+
"eval_steps_per_second": 100.409,
|
108 |
+
"step": 3367
|
109 |
+
},
|
110 |
+
{
|
111 |
+
"epoch": 7.28,
|
112 |
+
"learning_rate": 2.897589028975891e-06,
|
113 |
+
"loss": 0.1218,
|
114 |
+
"step": 3500
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 8.0,
|
118 |
+
"eval_loss": 0.7686376571655273,
|
119 |
+
"eval_matthews_correlation": 0.636893310208111,
|
120 |
+
"eval_runtime": 0.5131,
|
121 |
+
"eval_samples_per_second": 1668.28,
|
122 |
+
"eval_steps_per_second": 105.242,
|
123 |
+
"step": 3848
|
124 |
+
},
|
125 |
+
{
|
126 |
+
"epoch": 8.32,
|
127 |
+
"learning_rate": 1.7916390179163902e-06,
|
128 |
+
"loss": 0.1063,
|
129 |
+
"step": 4000
|
130 |
+
},
|
131 |
+
{
|
132 |
+
"epoch": 9.0,
|
133 |
+
"eval_loss": 0.8879063129425049,
|
134 |
+
"eval_matthews_correlation": 0.6314014927014334,
|
135 |
+
"eval_runtime": 0.5155,
|
136 |
+
"eval_samples_per_second": 1660.431,
|
137 |
+
"eval_steps_per_second": 104.747,
|
138 |
+
"step": 4329
|
139 |
+
},
|
140 |
+
{
|
141 |
+
"epoch": 9.36,
|
142 |
+
"learning_rate": 6.856890068568902e-07,
|
143 |
+
"loss": 0.0838,
|
144 |
+
"step": 4500
|
145 |
+
},
|
146 |
+
{
|
147 |
+
"epoch": 10.0,
|
148 |
+
"eval_loss": 0.8707919716835022,
|
149 |
+
"eval_matthews_correlation": 0.6253690295963492,
|
150 |
+
"eval_runtime": 0.5401,
|
151 |
+
"eval_samples_per_second": 1584.804,
|
152 |
+
"eval_steps_per_second": 99.976,
|
153 |
+
"step": 4810
|
154 |
+
},
|
155 |
+
{
|
156 |
+
"epoch": 10.0,
|
157 |
+
"step": 4810,
|
158 |
+
"total_flos": 808993009136460.0,
|
159 |
+
"train_loss": 0.21800182366321588,
|
160 |
+
"train_runtime": 226.2198,
|
161 |
+
"train_samples_per_second": 340.156,
|
162 |
+
"train_steps_per_second": 21.263
|
163 |
+
}
|
164 |
+
],
|
165 |
+
"logging_steps": 500,
|
166 |
+
"max_steps": 4810,
|
167 |
+
"num_train_epochs": 10,
|
168 |
+
"save_steps": 500,
|
169 |
+
"total_flos": 808993009136460.0,
|
170 |
+
"trial_name": null,
|
171 |
+
"trial_params": null
|
172 |
+
}
|
cola/roberta-base_lr1e-05/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f105f3efde4732d7bfbb62ddd33fe996ca550c7c990c745187f28514c8c60c83
|
3 |
+
size 4091
|
cola/roberta-base_lr1e-05/vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
mnli/roberta-base_lr1e-05/config.json
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/mnt/data/yule/.cache/roberta-base",
|
3 |
+
"architectures": [
|
4 |
+
"RobertaForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"classifier_dropout": null,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "gelu",
|
11 |
+
"hidden_dropout_prob": 0.1,
|
12 |
+
"hidden_size": 768,
|
13 |
+
"id2label": {
|
14 |
+
"0": "LABEL_0",
|
15 |
+
"1": "LABEL_1",
|
16 |
+
"2": "LABEL_2"
|
17 |
+
},
|
18 |
+
"initializer_range": 0.02,
|
19 |
+
"intermediate_size": 3072,
|
20 |
+
"label2id": {
|
21 |
+
"LABEL_0": 0,
|
22 |
+
"LABEL_1": 1,
|
23 |
+
"LABEL_2": 2
|
24 |
+
},
|
25 |
+
"layer_norm_eps": 1e-05,
|
26 |
+
"max_position_embeddings": 514,
|
27 |
+
"model_type": "roberta",
|
28 |
+
"num_attention_heads": 12,
|
29 |
+
"num_hidden_layers": 12,
|
30 |
+
"pad_token_id": 1,
|
31 |
+
"position_embedding_type": "absolute",
|
32 |
+
"torch_dtype": "float32",
|
33 |
+
"transformers_version": "4.33.1",
|
34 |
+
"type_vocab_size": 1,
|
35 |
+
"use_cache": true,
|
36 |
+
"vocab_size": 50265
|
37 |
+
}
|
mnli/roberta-base_lr1e-05/merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
mnli/roberta-base_lr1e-05/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f8e37e128bef6b3cca696c691d2f3481399caa55e6d79cc7e1558cad5bdf3245
|
3 |
+
size 498657905
|
mnli/roberta-base_lr1e-05/special_tokens_map.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"cls_token": "<s>",
|
4 |
+
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"pad_token": "<pad>",
|
13 |
+
"sep_token": "</s>",
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
mnli/roberta-base_lr1e-05/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
mnli/roberta-base_lr1e-05/tokenizer_config.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"bos_token": "<s>",
|
4 |
+
"clean_up_tokenization_spaces": true,
|
5 |
+
"cls_token": "<s>",
|
6 |
+
"eos_token": "</s>",
|
7 |
+
"errors": "replace",
|
8 |
+
"mask_token": "<mask>",
|
9 |
+
"model_max_length": 1000000000000000019884624838656,
|
10 |
+
"pad_token": "<pad>",
|
11 |
+
"sep_token": "</s>",
|
12 |
+
"tokenizer_class": "RobertaTokenizer",
|
13 |
+
"trim_offsets": true,
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
mnli/roberta-base_lr1e-05/trainer_state.json
ADDED
@@ -0,0 +1,2764 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": 0.867408520282142,
|
3 |
+
"best_model_checkpoint": "./save_models/mnli/roberta-base_lr1e-05_run2/checkpoint-220900",
|
4 |
+
"epoch": 10.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 220900,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"learning_rate": 3.772446054021428e-07,
|
14 |
+
"loss": 1.1005,
|
15 |
+
"step": 500
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"epoch": 0.05,
|
19 |
+
"learning_rate": 7.544892108042856e-07,
|
20 |
+
"loss": 1.1004,
|
21 |
+
"step": 1000
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"epoch": 0.07,
|
25 |
+
"learning_rate": 1.1317338162064282e-06,
|
26 |
+
"loss": 1.0915,
|
27 |
+
"step": 1500
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"epoch": 0.09,
|
31 |
+
"learning_rate": 1.5089784216085712e-06,
|
32 |
+
"loss": 0.9695,
|
33 |
+
"step": 2000
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"epoch": 0.11,
|
37 |
+
"learning_rate": 1.886223027010714e-06,
|
38 |
+
"loss": 0.8466,
|
39 |
+
"step": 2500
|
40 |
+
},
|
41 |
+
{
|
42 |
+
"epoch": 0.14,
|
43 |
+
"learning_rate": 2.2634676324128565e-06,
|
44 |
+
"loss": 0.6904,
|
45 |
+
"step": 3000
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"epoch": 0.16,
|
49 |
+
"learning_rate": 2.6407122378149996e-06,
|
50 |
+
"loss": 0.6293,
|
51 |
+
"step": 3500
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.18,
|
55 |
+
"learning_rate": 3.0179568432171424e-06,
|
56 |
+
"loss": 0.5855,
|
57 |
+
"step": 4000
|
58 |
+
},
|
59 |
+
{
|
60 |
+
"epoch": 0.2,
|
61 |
+
"learning_rate": 3.395201448619285e-06,
|
62 |
+
"loss": 0.5505,
|
63 |
+
"step": 4500
|
64 |
+
},
|
65 |
+
{
|
66 |
+
"epoch": 0.23,
|
67 |
+
"learning_rate": 3.772446054021428e-06,
|
68 |
+
"loss": 0.5519,
|
69 |
+
"step": 5000
|
70 |
+
},
|
71 |
+
{
|
72 |
+
"epoch": 0.25,
|
73 |
+
"learning_rate": 4.149690659423571e-06,
|
74 |
+
"loss": 0.5192,
|
75 |
+
"step": 5500
|
76 |
+
},
|
77 |
+
{
|
78 |
+
"epoch": 0.27,
|
79 |
+
"learning_rate": 4.526935264825713e-06,
|
80 |
+
"loss": 0.5156,
|
81 |
+
"step": 6000
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"epoch": 0.29,
|
85 |
+
"learning_rate": 4.904179870227856e-06,
|
86 |
+
"loss": 0.5082,
|
87 |
+
"step": 6500
|
88 |
+
},
|
89 |
+
{
|
90 |
+
"epoch": 0.32,
|
91 |
+
"learning_rate": 5.281424475629999e-06,
|
92 |
+
"loss": 0.5001,
|
93 |
+
"step": 7000
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.34,
|
97 |
+
"learning_rate": 5.658669081032142e-06,
|
98 |
+
"loss": 0.489,
|
99 |
+
"step": 7500
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"epoch": 0.36,
|
103 |
+
"learning_rate": 6.035913686434285e-06,
|
104 |
+
"loss": 0.4944,
|
105 |
+
"step": 8000
|
106 |
+
},
|
107 |
+
{
|
108 |
+
"epoch": 0.38,
|
109 |
+
"learning_rate": 6.4131582918364275e-06,
|
110 |
+
"loss": 0.4852,
|
111 |
+
"step": 8500
|
112 |
+
},
|
113 |
+
{
|
114 |
+
"epoch": 0.41,
|
115 |
+
"learning_rate": 6.79040289723857e-06,
|
116 |
+
"loss": 0.4802,
|
117 |
+
"step": 9000
|
118 |
+
},
|
119 |
+
{
|
120 |
+
"epoch": 0.43,
|
121 |
+
"learning_rate": 7.167647502640713e-06,
|
122 |
+
"loss": 0.4721,
|
123 |
+
"step": 9500
|
124 |
+
},
|
125 |
+
{
|
126 |
+
"epoch": 0.45,
|
127 |
+
"learning_rate": 7.544892108042856e-06,
|
128 |
+
"loss": 0.4506,
|
129 |
+
"step": 10000
|
130 |
+
},
|
131 |
+
{
|
132 |
+
"epoch": 0.48,
|
133 |
+
"learning_rate": 7.922136713445e-06,
|
134 |
+
"loss": 0.4756,
|
135 |
+
"step": 10500
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.5,
|
139 |
+
"learning_rate": 8.299381318847142e-06,
|
140 |
+
"loss": 0.4595,
|
141 |
+
"step": 11000
|
142 |
+
},
|
143 |
+
{
|
144 |
+
"epoch": 0.52,
|
145 |
+
"learning_rate": 8.676625924249283e-06,
|
146 |
+
"loss": 0.474,
|
147 |
+
"step": 11500
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"epoch": 0.54,
|
151 |
+
"learning_rate": 9.053870529651426e-06,
|
152 |
+
"loss": 0.4419,
|
153 |
+
"step": 12000
|
154 |
+
},
|
155 |
+
{
|
156 |
+
"epoch": 0.57,
|
157 |
+
"learning_rate": 9.431115135053569e-06,
|
158 |
+
"loss": 0.4571,
|
159 |
+
"step": 12500
|
160 |
+
},
|
161 |
+
{
|
162 |
+
"epoch": 0.59,
|
163 |
+
"learning_rate": 9.808359740455711e-06,
|
164 |
+
"loss": 0.4544,
|
165 |
+
"step": 13000
|
166 |
+
},
|
167 |
+
{
|
168 |
+
"epoch": 0.61,
|
169 |
+
"learning_rate": 9.98815291409418e-06,
|
170 |
+
"loss": 0.4595,
|
171 |
+
"step": 13500
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"epoch": 0.63,
|
175 |
+
"learning_rate": 9.964073471196171e-06,
|
176 |
+
"loss": 0.441,
|
177 |
+
"step": 14000
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.66,
|
181 |
+
"learning_rate": 9.939994028298163e-06,
|
182 |
+
"loss": 0.4483,
|
183 |
+
"step": 14500
|
184 |
+
},
|
185 |
+
{
|
186 |
+
"epoch": 0.68,
|
187 |
+
"learning_rate": 9.915914585400153e-06,
|
188 |
+
"loss": 0.4395,
|
189 |
+
"step": 15000
|
190 |
+
},
|
191 |
+
{
|
192 |
+
"epoch": 0.7,
|
193 |
+
"learning_rate": 9.891835142502145e-06,
|
194 |
+
"loss": 0.4484,
|
195 |
+
"step": 15500
|
196 |
+
},
|
197 |
+
{
|
198 |
+
"epoch": 0.72,
|
199 |
+
"learning_rate": 9.867755699604135e-06,
|
200 |
+
"loss": 0.4378,
|
201 |
+
"step": 16000
|
202 |
+
},
|
203 |
+
{
|
204 |
+
"epoch": 0.75,
|
205 |
+
"learning_rate": 9.843676256706126e-06,
|
206 |
+
"loss": 0.4422,
|
207 |
+
"step": 16500
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"epoch": 0.77,
|
211 |
+
"learning_rate": 9.819596813808116e-06,
|
212 |
+
"loss": 0.4293,
|
213 |
+
"step": 17000
|
214 |
+
},
|
215 |
+
{
|
216 |
+
"epoch": 0.79,
|
217 |
+
"learning_rate": 9.795517370910108e-06,
|
218 |
+
"loss": 0.4349,
|
219 |
+
"step": 17500
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.81,
|
223 |
+
"learning_rate": 9.771437928012098e-06,
|
224 |
+
"loss": 0.4292,
|
225 |
+
"step": 18000
|
226 |
+
},
|
227 |
+
{
|
228 |
+
"epoch": 0.84,
|
229 |
+
"learning_rate": 9.74735848511409e-06,
|
230 |
+
"loss": 0.4403,
|
231 |
+
"step": 18500
|
232 |
+
},
|
233 |
+
{
|
234 |
+
"epoch": 0.86,
|
235 |
+
"learning_rate": 9.723279042216081e-06,
|
236 |
+
"loss": 0.4299,
|
237 |
+
"step": 19000
|
238 |
+
},
|
239 |
+
{
|
240 |
+
"epoch": 0.88,
|
241 |
+
"learning_rate": 9.699199599318071e-06,
|
242 |
+
"loss": 0.4308,
|
243 |
+
"step": 19500
|
244 |
+
},
|
245 |
+
{
|
246 |
+
"epoch": 0.91,
|
247 |
+
"learning_rate": 9.675120156420061e-06,
|
248 |
+
"loss": 0.4141,
|
249 |
+
"step": 20000
|
250 |
+
},
|
251 |
+
{
|
252 |
+
"epoch": 0.93,
|
253 |
+
"learning_rate": 9.651040713522053e-06,
|
254 |
+
"loss": 0.4163,
|
255 |
+
"step": 20500
|
256 |
+
},
|
257 |
+
{
|
258 |
+
"epoch": 0.95,
|
259 |
+
"learning_rate": 9.626961270624043e-06,
|
260 |
+
"loss": 0.41,
|
261 |
+
"step": 21000
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.97,
|
265 |
+
"learning_rate": 9.602881827726035e-06,
|
266 |
+
"loss": 0.4285,
|
267 |
+
"step": 21500
|
268 |
+
},
|
269 |
+
{
|
270 |
+
"epoch": 1.0,
|
271 |
+
"learning_rate": 9.578802384828026e-06,
|
272 |
+
"loss": 0.4202,
|
273 |
+
"step": 22000
|
274 |
+
},
|
275 |
+
{
|
276 |
+
"epoch": 1.0,
|
277 |
+
"eval_accuracy": 0.8505512973950243,
|
278 |
+
"eval_loss": 0.4125332534313202,
|
279 |
+
"eval_runtime": 48.6401,
|
280 |
+
"eval_samples_per_second": 807.379,
|
281 |
+
"eval_steps_per_second": 50.473,
|
282 |
+
"step": 22090
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 1.02,
|
286 |
+
"learning_rate": 9.554722941930016e-06,
|
287 |
+
"loss": 0.3827,
|
288 |
+
"step": 22500
|
289 |
+
},
|
290 |
+
{
|
291 |
+
"epoch": 1.04,
|
292 |
+
"learning_rate": 9.530643499032008e-06,
|
293 |
+
"loss": 0.3678,
|
294 |
+
"step": 23000
|
295 |
+
},
|
296 |
+
{
|
297 |
+
"epoch": 1.06,
|
298 |
+
"learning_rate": 9.506564056133998e-06,
|
299 |
+
"loss": 0.365,
|
300 |
+
"step": 23500
|
301 |
+
},
|
302 |
+
{
|
303 |
+
"epoch": 1.09,
|
304 |
+
"learning_rate": 9.48248461323599e-06,
|
305 |
+
"loss": 0.3697,
|
306 |
+
"step": 24000
|
307 |
+
},
|
308 |
+
{
|
309 |
+
"epoch": 1.11,
|
310 |
+
"learning_rate": 9.45840517033798e-06,
|
311 |
+
"loss": 0.3585,
|
312 |
+
"step": 24500
|
313 |
+
},
|
314 |
+
{
|
315 |
+
"epoch": 1.13,
|
316 |
+
"learning_rate": 9.43432572743997e-06,
|
317 |
+
"loss": 0.3727,
|
318 |
+
"step": 25000
|
319 |
+
},
|
320 |
+
{
|
321 |
+
"epoch": 1.15,
|
322 |
+
"learning_rate": 9.410246284541961e-06,
|
323 |
+
"loss": 0.3513,
|
324 |
+
"step": 25500
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 1.18,
|
328 |
+
"learning_rate": 9.386166841643953e-06,
|
329 |
+
"loss": 0.3754,
|
330 |
+
"step": 26000
|
331 |
+
},
|
332 |
+
{
|
333 |
+
"epoch": 1.2,
|
334 |
+
"learning_rate": 9.362087398745945e-06,
|
335 |
+
"loss": 0.3585,
|
336 |
+
"step": 26500
|
337 |
+
},
|
338 |
+
{
|
339 |
+
"epoch": 1.22,
|
340 |
+
"learning_rate": 9.338007955847935e-06,
|
341 |
+
"loss": 0.3661,
|
342 |
+
"step": 27000
|
343 |
+
},
|
344 |
+
{
|
345 |
+
"epoch": 1.24,
|
346 |
+
"learning_rate": 9.313928512949925e-06,
|
347 |
+
"loss": 0.3646,
|
348 |
+
"step": 27500
|
349 |
+
},
|
350 |
+
{
|
351 |
+
"epoch": 1.27,
|
352 |
+
"learning_rate": 9.289849070051916e-06,
|
353 |
+
"loss": 0.3604,
|
354 |
+
"step": 28000
|
355 |
+
},
|
356 |
+
{
|
357 |
+
"epoch": 1.29,
|
358 |
+
"learning_rate": 9.265769627153906e-06,
|
359 |
+
"loss": 0.3721,
|
360 |
+
"step": 28500
|
361 |
+
},
|
362 |
+
{
|
363 |
+
"epoch": 1.31,
|
364 |
+
"learning_rate": 9.241690184255898e-06,
|
365 |
+
"loss": 0.3673,
|
366 |
+
"step": 29000
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"epoch": 1.34,
|
370 |
+
"learning_rate": 9.217610741357888e-06,
|
371 |
+
"loss": 0.3508,
|
372 |
+
"step": 29500
|
373 |
+
},
|
374 |
+
{
|
375 |
+
"epoch": 1.36,
|
376 |
+
"learning_rate": 9.19353129845988e-06,
|
377 |
+
"loss": 0.3543,
|
378 |
+
"step": 30000
|
379 |
+
},
|
380 |
+
{
|
381 |
+
"epoch": 1.38,
|
382 |
+
"learning_rate": 9.169451855561871e-06,
|
383 |
+
"loss": 0.3585,
|
384 |
+
"step": 30500
|
385 |
+
},
|
386 |
+
{
|
387 |
+
"epoch": 1.4,
|
388 |
+
"learning_rate": 9.145372412663861e-06,
|
389 |
+
"loss": 0.359,
|
390 |
+
"step": 31000
|
391 |
+
},
|
392 |
+
{
|
393 |
+
"epoch": 1.43,
|
394 |
+
"learning_rate": 9.121292969765853e-06,
|
395 |
+
"loss": 0.3597,
|
396 |
+
"step": 31500
|
397 |
+
},
|
398 |
+
{
|
399 |
+
"epoch": 1.45,
|
400 |
+
"learning_rate": 9.097213526867843e-06,
|
401 |
+
"loss": 0.3513,
|
402 |
+
"step": 32000
|
403 |
+
},
|
404 |
+
{
|
405 |
+
"epoch": 1.47,
|
406 |
+
"learning_rate": 9.073134083969835e-06,
|
407 |
+
"loss": 0.3618,
|
408 |
+
"step": 32500
|
409 |
+
},
|
410 |
+
{
|
411 |
+
"epoch": 1.49,
|
412 |
+
"learning_rate": 9.049054641071825e-06,
|
413 |
+
"loss": 0.3643,
|
414 |
+
"step": 33000
|
415 |
+
},
|
416 |
+
{
|
417 |
+
"epoch": 1.52,
|
418 |
+
"learning_rate": 9.024975198173815e-06,
|
419 |
+
"loss": 0.3737,
|
420 |
+
"step": 33500
|
421 |
+
},
|
422 |
+
{
|
423 |
+
"epoch": 1.54,
|
424 |
+
"learning_rate": 9.000895755275806e-06,
|
425 |
+
"loss": 0.3586,
|
426 |
+
"step": 34000
|
427 |
+
},
|
428 |
+
{
|
429 |
+
"epoch": 1.56,
|
430 |
+
"learning_rate": 8.976816312377798e-06,
|
431 |
+
"loss": 0.3577,
|
432 |
+
"step": 34500
|
433 |
+
},
|
434 |
+
{
|
435 |
+
"epoch": 1.58,
|
436 |
+
"learning_rate": 8.95273686947979e-06,
|
437 |
+
"loss": 0.3594,
|
438 |
+
"step": 35000
|
439 |
+
},
|
440 |
+
{
|
441 |
+
"epoch": 1.61,
|
442 |
+
"learning_rate": 8.92865742658178e-06,
|
443 |
+
"loss": 0.3438,
|
444 |
+
"step": 35500
|
445 |
+
},
|
446 |
+
{
|
447 |
+
"epoch": 1.63,
|
448 |
+
"learning_rate": 8.90457798368377e-06,
|
449 |
+
"loss": 0.3631,
|
450 |
+
"step": 36000
|
451 |
+
},
|
452 |
+
{
|
453 |
+
"epoch": 1.65,
|
454 |
+
"learning_rate": 8.880498540785761e-06,
|
455 |
+
"loss": 0.3498,
|
456 |
+
"step": 36500
|
457 |
+
},
|
458 |
+
{
|
459 |
+
"epoch": 1.67,
|
460 |
+
"learning_rate": 8.856419097887751e-06,
|
461 |
+
"loss": 0.3611,
|
462 |
+
"step": 37000
|
463 |
+
},
|
464 |
+
{
|
465 |
+
"epoch": 1.7,
|
466 |
+
"learning_rate": 8.832339654989743e-06,
|
467 |
+
"loss": 0.3473,
|
468 |
+
"step": 37500
|
469 |
+
},
|
470 |
+
{
|
471 |
+
"epoch": 1.72,
|
472 |
+
"learning_rate": 8.808260212091733e-06,
|
473 |
+
"loss": 0.3476,
|
474 |
+
"step": 38000
|
475 |
+
},
|
476 |
+
{
|
477 |
+
"epoch": 1.74,
|
478 |
+
"learning_rate": 8.784180769193725e-06,
|
479 |
+
"loss": 0.3565,
|
480 |
+
"step": 38500
|
481 |
+
},
|
482 |
+
{
|
483 |
+
"epoch": 1.77,
|
484 |
+
"learning_rate": 8.760101326295716e-06,
|
485 |
+
"loss": 0.3572,
|
486 |
+
"step": 39000
|
487 |
+
},
|
488 |
+
{
|
489 |
+
"epoch": 1.79,
|
490 |
+
"learning_rate": 8.736021883397706e-06,
|
491 |
+
"loss": 0.3501,
|
492 |
+
"step": 39500
|
493 |
+
},
|
494 |
+
{
|
495 |
+
"epoch": 1.81,
|
496 |
+
"learning_rate": 8.711942440499698e-06,
|
497 |
+
"loss": 0.3535,
|
498 |
+
"step": 40000
|
499 |
+
},
|
500 |
+
{
|
501 |
+
"epoch": 1.83,
|
502 |
+
"learning_rate": 8.687862997601688e-06,
|
503 |
+
"loss": 0.3545,
|
504 |
+
"step": 40500
|
505 |
+
},
|
506 |
+
{
|
507 |
+
"epoch": 1.86,
|
508 |
+
"learning_rate": 8.663783554703678e-06,
|
509 |
+
"loss": 0.3615,
|
510 |
+
"step": 41000
|
511 |
+
},
|
512 |
+
{
|
513 |
+
"epoch": 1.88,
|
514 |
+
"learning_rate": 8.63970411180567e-06,
|
515 |
+
"loss": 0.3651,
|
516 |
+
"step": 41500
|
517 |
+
},
|
518 |
+
{
|
519 |
+
"epoch": 1.9,
|
520 |
+
"learning_rate": 8.615624668907661e-06,
|
521 |
+
"loss": 0.3508,
|
522 |
+
"step": 42000
|
523 |
+
},
|
524 |
+
{
|
525 |
+
"epoch": 1.92,
|
526 |
+
"learning_rate": 8.591545226009653e-06,
|
527 |
+
"loss": 0.344,
|
528 |
+
"step": 42500
|
529 |
+
},
|
530 |
+
{
|
531 |
+
"epoch": 1.95,
|
532 |
+
"learning_rate": 8.567465783111643e-06,
|
533 |
+
"loss": 0.3536,
|
534 |
+
"step": 43000
|
535 |
+
},
|
536 |
+
{
|
537 |
+
"epoch": 1.97,
|
538 |
+
"learning_rate": 8.543386340213633e-06,
|
539 |
+
"loss": 0.359,
|
540 |
+
"step": 43500
|
541 |
+
},
|
542 |
+
{
|
543 |
+
"epoch": 1.99,
|
544 |
+
"learning_rate": 8.519306897315625e-06,
|
545 |
+
"loss": 0.3471,
|
546 |
+
"step": 44000
|
547 |
+
},
|
548 |
+
{
|
549 |
+
"epoch": 2.0,
|
550 |
+
"eval_accuracy": 0.8628759135239744,
|
551 |
+
"eval_loss": 0.3846328854560852,
|
552 |
+
"eval_runtime": 48.5564,
|
553 |
+
"eval_samples_per_second": 808.77,
|
554 |
+
"eval_steps_per_second": 50.56,
|
555 |
+
"step": 44180
|
556 |
+
},
|
557 |
+
{
|
558 |
+
"epoch": 2.01,
|
559 |
+
"learning_rate": 8.495227454417615e-06,
|
560 |
+
"loss": 0.2986,
|
561 |
+
"step": 44500
|
562 |
+
},
|
563 |
+
{
|
564 |
+
"epoch": 2.04,
|
565 |
+
"learning_rate": 8.471148011519606e-06,
|
566 |
+
"loss": 0.2883,
|
567 |
+
"step": 45000
|
568 |
+
},
|
569 |
+
{
|
570 |
+
"epoch": 2.06,
|
571 |
+
"learning_rate": 8.447068568621596e-06,
|
572 |
+
"loss": 0.2719,
|
573 |
+
"step": 45500
|
574 |
+
},
|
575 |
+
{
|
576 |
+
"epoch": 2.08,
|
577 |
+
"learning_rate": 8.422989125723588e-06,
|
578 |
+
"loss": 0.2827,
|
579 |
+
"step": 46000
|
580 |
+
},
|
581 |
+
{
|
582 |
+
"epoch": 2.11,
|
583 |
+
"learning_rate": 8.39890968282558e-06,
|
584 |
+
"loss": 0.2827,
|
585 |
+
"step": 46500
|
586 |
+
},
|
587 |
+
{
|
588 |
+
"epoch": 2.13,
|
589 |
+
"learning_rate": 8.37483023992757e-06,
|
590 |
+
"loss": 0.2976,
|
591 |
+
"step": 47000
|
592 |
+
},
|
593 |
+
{
|
594 |
+
"epoch": 2.15,
|
595 |
+
"learning_rate": 8.350750797029561e-06,
|
596 |
+
"loss": 0.2714,
|
597 |
+
"step": 47500
|
598 |
+
},
|
599 |
+
{
|
600 |
+
"epoch": 2.17,
|
601 |
+
"learning_rate": 8.326671354131551e-06,
|
602 |
+
"loss": 0.2895,
|
603 |
+
"step": 48000
|
604 |
+
},
|
605 |
+
{
|
606 |
+
"epoch": 2.2,
|
607 |
+
"learning_rate": 8.302591911233543e-06,
|
608 |
+
"loss": 0.2826,
|
609 |
+
"step": 48500
|
610 |
+
},
|
611 |
+
{
|
612 |
+
"epoch": 2.22,
|
613 |
+
"learning_rate": 8.278512468335533e-06,
|
614 |
+
"loss": 0.2783,
|
615 |
+
"step": 49000
|
616 |
+
},
|
617 |
+
{
|
618 |
+
"epoch": 2.24,
|
619 |
+
"learning_rate": 8.254433025437523e-06,
|
620 |
+
"loss": 0.2843,
|
621 |
+
"step": 49500
|
622 |
+
},
|
623 |
+
{
|
624 |
+
"epoch": 2.26,
|
625 |
+
"learning_rate": 8.230353582539515e-06,
|
626 |
+
"loss": 0.2913,
|
627 |
+
"step": 50000
|
628 |
+
},
|
629 |
+
{
|
630 |
+
"epoch": 2.29,
|
631 |
+
"learning_rate": 8.206274139641506e-06,
|
632 |
+
"loss": 0.2694,
|
633 |
+
"step": 50500
|
634 |
+
},
|
635 |
+
{
|
636 |
+
"epoch": 2.31,
|
637 |
+
"learning_rate": 8.182194696743498e-06,
|
638 |
+
"loss": 0.2847,
|
639 |
+
"step": 51000
|
640 |
+
},
|
641 |
+
{
|
642 |
+
"epoch": 2.33,
|
643 |
+
"learning_rate": 8.158115253845488e-06,
|
644 |
+
"loss": 0.2911,
|
645 |
+
"step": 51500
|
646 |
+
},
|
647 |
+
{
|
648 |
+
"epoch": 2.35,
|
649 |
+
"learning_rate": 8.134035810947478e-06,
|
650 |
+
"loss": 0.2818,
|
651 |
+
"step": 52000
|
652 |
+
},
|
653 |
+
{
|
654 |
+
"epoch": 2.38,
|
655 |
+
"learning_rate": 8.10995636804947e-06,
|
656 |
+
"loss": 0.282,
|
657 |
+
"step": 52500
|
658 |
+
},
|
659 |
+
{
|
660 |
+
"epoch": 2.4,
|
661 |
+
"learning_rate": 8.08587692515146e-06,
|
662 |
+
"loss": 0.2868,
|
663 |
+
"step": 53000
|
664 |
+
},
|
665 |
+
{
|
666 |
+
"epoch": 2.42,
|
667 |
+
"learning_rate": 8.061797482253451e-06,
|
668 |
+
"loss": 0.2849,
|
669 |
+
"step": 53500
|
670 |
+
},
|
671 |
+
{
|
672 |
+
"epoch": 2.44,
|
673 |
+
"learning_rate": 8.037718039355441e-06,
|
674 |
+
"loss": 0.2791,
|
675 |
+
"step": 54000
|
676 |
+
},
|
677 |
+
{
|
678 |
+
"epoch": 2.47,
|
679 |
+
"learning_rate": 8.013638596457433e-06,
|
680 |
+
"loss": 0.2833,
|
681 |
+
"step": 54500
|
682 |
+
},
|
683 |
+
{
|
684 |
+
"epoch": 2.49,
|
685 |
+
"learning_rate": 7.989559153559425e-06,
|
686 |
+
"loss": 0.2771,
|
687 |
+
"step": 55000
|
688 |
+
},
|
689 |
+
{
|
690 |
+
"epoch": 2.51,
|
691 |
+
"learning_rate": 7.965479710661415e-06,
|
692 |
+
"loss": 0.2865,
|
693 |
+
"step": 55500
|
694 |
+
},
|
695 |
+
{
|
696 |
+
"epoch": 2.54,
|
697 |
+
"learning_rate": 7.941400267763406e-06,
|
698 |
+
"loss": 0.2856,
|
699 |
+
"step": 56000
|
700 |
+
},
|
701 |
+
{
|
702 |
+
"epoch": 2.56,
|
703 |
+
"learning_rate": 7.917320824865396e-06,
|
704 |
+
"loss": 0.2989,
|
705 |
+
"step": 56500
|
706 |
+
},
|
707 |
+
{
|
708 |
+
"epoch": 2.58,
|
709 |
+
"learning_rate": 7.893241381967386e-06,
|
710 |
+
"loss": 0.286,
|
711 |
+
"step": 57000
|
712 |
+
},
|
713 |
+
{
|
714 |
+
"epoch": 2.6,
|
715 |
+
"learning_rate": 7.869161939069378e-06,
|
716 |
+
"loss": 0.2818,
|
717 |
+
"step": 57500
|
718 |
+
},
|
719 |
+
{
|
720 |
+
"epoch": 2.63,
|
721 |
+
"learning_rate": 7.84508249617137e-06,
|
722 |
+
"loss": 0.2781,
|
723 |
+
"step": 58000
|
724 |
+
},
|
725 |
+
{
|
726 |
+
"epoch": 2.65,
|
727 |
+
"learning_rate": 7.82100305327336e-06,
|
728 |
+
"loss": 0.2822,
|
729 |
+
"step": 58500
|
730 |
+
},
|
731 |
+
{
|
732 |
+
"epoch": 2.67,
|
733 |
+
"learning_rate": 7.796923610375351e-06,
|
734 |
+
"loss": 0.2888,
|
735 |
+
"step": 59000
|
736 |
+
},
|
737 |
+
{
|
738 |
+
"epoch": 2.69,
|
739 |
+
"learning_rate": 7.772844167477341e-06,
|
740 |
+
"loss": 0.2922,
|
741 |
+
"step": 59500
|
742 |
+
},
|
743 |
+
{
|
744 |
+
"epoch": 2.72,
|
745 |
+
"learning_rate": 7.748764724579333e-06,
|
746 |
+
"loss": 0.2817,
|
747 |
+
"step": 60000
|
748 |
+
},
|
749 |
+
{
|
750 |
+
"epoch": 2.74,
|
751 |
+
"learning_rate": 7.724685281681323e-06,
|
752 |
+
"loss": 0.2779,
|
753 |
+
"step": 60500
|
754 |
+
},
|
755 |
+
{
|
756 |
+
"epoch": 2.76,
|
757 |
+
"learning_rate": 7.700605838783315e-06,
|
758 |
+
"loss": 0.2846,
|
759 |
+
"step": 61000
|
760 |
+
},
|
761 |
+
{
|
762 |
+
"epoch": 2.78,
|
763 |
+
"learning_rate": 7.676526395885305e-06,
|
764 |
+
"loss": 0.2935,
|
765 |
+
"step": 61500
|
766 |
+
},
|
767 |
+
{
|
768 |
+
"epoch": 2.81,
|
769 |
+
"learning_rate": 7.652446952987296e-06,
|
770 |
+
"loss": 0.2787,
|
771 |
+
"step": 62000
|
772 |
+
},
|
773 |
+
{
|
774 |
+
"epoch": 2.83,
|
775 |
+
"learning_rate": 7.628367510089287e-06,
|
776 |
+
"loss": 0.2701,
|
777 |
+
"step": 62500
|
778 |
+
},
|
779 |
+
{
|
780 |
+
"epoch": 2.85,
|
781 |
+
"learning_rate": 7.604288067191278e-06,
|
782 |
+
"loss": 0.2888,
|
783 |
+
"step": 63000
|
784 |
+
},
|
785 |
+
{
|
786 |
+
"epoch": 2.87,
|
787 |
+
"learning_rate": 7.58020862429327e-06,
|
788 |
+
"loss": 0.2827,
|
789 |
+
"step": 63500
|
790 |
+
},
|
791 |
+
{
|
792 |
+
"epoch": 2.9,
|
793 |
+
"learning_rate": 7.55612918139526e-06,
|
794 |
+
"loss": 0.2746,
|
795 |
+
"step": 64000
|
796 |
+
},
|
797 |
+
{
|
798 |
+
"epoch": 2.92,
|
799 |
+
"learning_rate": 7.532049738497251e-06,
|
800 |
+
"loss": 0.2766,
|
801 |
+
"step": 64500
|
802 |
+
},
|
803 |
+
{
|
804 |
+
"epoch": 2.94,
|
805 |
+
"learning_rate": 7.507970295599241e-06,
|
806 |
+
"loss": 0.2976,
|
807 |
+
"step": 65000
|
808 |
+
},
|
809 |
+
{
|
810 |
+
"epoch": 2.97,
|
811 |
+
"learning_rate": 7.483890852701232e-06,
|
812 |
+
"loss": 0.2902,
|
813 |
+
"step": 65500
|
814 |
+
},
|
815 |
+
{
|
816 |
+
"epoch": 2.99,
|
817 |
+
"learning_rate": 7.459811409803224e-06,
|
818 |
+
"loss": 0.2989,
|
819 |
+
"step": 66000
|
820 |
+
},
|
821 |
+
{
|
822 |
+
"epoch": 3.0,
|
823 |
+
"eval_accuracy": 0.8646838634106593,
|
824 |
+
"eval_loss": 0.4122258126735687,
|
825 |
+
"eval_runtime": 48.5877,
|
826 |
+
"eval_samples_per_second": 808.249,
|
827 |
+
"eval_steps_per_second": 50.527,
|
828 |
+
"step": 66270
|
829 |
+
},
|
830 |
+
{
|
831 |
+
"epoch": 3.01,
|
832 |
+
"learning_rate": 7.435731966905214e-06,
|
833 |
+
"loss": 0.2587,
|
834 |
+
"step": 66500
|
835 |
+
},
|
836 |
+
{
|
837 |
+
"epoch": 3.03,
|
838 |
+
"learning_rate": 7.4116525240072056e-06,
|
839 |
+
"loss": 0.219,
|
840 |
+
"step": 67000
|
841 |
+
},
|
842 |
+
{
|
843 |
+
"epoch": 3.06,
|
844 |
+
"learning_rate": 7.387573081109196e-06,
|
845 |
+
"loss": 0.2385,
|
846 |
+
"step": 67500
|
847 |
+
},
|
848 |
+
{
|
849 |
+
"epoch": 3.08,
|
850 |
+
"learning_rate": 7.363493638211186e-06,
|
851 |
+
"loss": 0.2408,
|
852 |
+
"step": 68000
|
853 |
+
},
|
854 |
+
{
|
855 |
+
"epoch": 3.1,
|
856 |
+
"learning_rate": 7.339414195313178e-06,
|
857 |
+
"loss": 0.2248,
|
858 |
+
"step": 68500
|
859 |
+
},
|
860 |
+
{
|
861 |
+
"epoch": 3.12,
|
862 |
+
"learning_rate": 7.315334752415169e-06,
|
863 |
+
"loss": 0.2335,
|
864 |
+
"step": 69000
|
865 |
+
},
|
866 |
+
{
|
867 |
+
"epoch": 3.15,
|
868 |
+
"learning_rate": 7.29125530951716e-06,
|
869 |
+
"loss": 0.2361,
|
870 |
+
"step": 69500
|
871 |
+
},
|
872 |
+
{
|
873 |
+
"epoch": 3.17,
|
874 |
+
"learning_rate": 7.2671758666191506e-06,
|
875 |
+
"loss": 0.2272,
|
876 |
+
"step": 70000
|
877 |
+
},
|
878 |
+
{
|
879 |
+
"epoch": 3.19,
|
880 |
+
"learning_rate": 7.2430964237211406e-06,
|
881 |
+
"loss": 0.2342,
|
882 |
+
"step": 70500
|
883 |
+
},
|
884 |
+
{
|
885 |
+
"epoch": 3.21,
|
886 |
+
"learning_rate": 7.219016980823132e-06,
|
887 |
+
"loss": 0.2288,
|
888 |
+
"step": 71000
|
889 |
+
},
|
890 |
+
{
|
891 |
+
"epoch": 3.24,
|
892 |
+
"learning_rate": 7.194937537925123e-06,
|
893 |
+
"loss": 0.2266,
|
894 |
+
"step": 71500
|
895 |
+
},
|
896 |
+
{
|
897 |
+
"epoch": 3.26,
|
898 |
+
"learning_rate": 7.170858095027115e-06,
|
899 |
+
"loss": 0.2295,
|
900 |
+
"step": 72000
|
901 |
+
},
|
902 |
+
{
|
903 |
+
"epoch": 3.28,
|
904 |
+
"learning_rate": 7.146778652129105e-06,
|
905 |
+
"loss": 0.2118,
|
906 |
+
"step": 72500
|
907 |
+
},
|
908 |
+
{
|
909 |
+
"epoch": 3.3,
|
910 |
+
"learning_rate": 7.1226992092310956e-06,
|
911 |
+
"loss": 0.2187,
|
912 |
+
"step": 73000
|
913 |
+
},
|
914 |
+
{
|
915 |
+
"epoch": 3.33,
|
916 |
+
"learning_rate": 7.098619766333087e-06,
|
917 |
+
"loss": 0.2349,
|
918 |
+
"step": 73500
|
919 |
+
},
|
920 |
+
{
|
921 |
+
"epoch": 3.35,
|
922 |
+
"learning_rate": 7.074540323435077e-06,
|
923 |
+
"loss": 0.2389,
|
924 |
+
"step": 74000
|
925 |
+
},
|
926 |
+
{
|
927 |
+
"epoch": 3.37,
|
928 |
+
"learning_rate": 7.050460880537069e-06,
|
929 |
+
"loss": 0.2285,
|
930 |
+
"step": 74500
|
931 |
+
},
|
932 |
+
{
|
933 |
+
"epoch": 3.4,
|
934 |
+
"learning_rate": 7.026381437639059e-06,
|
935 |
+
"loss": 0.2197,
|
936 |
+
"step": 75000
|
937 |
+
},
|
938 |
+
{
|
939 |
+
"epoch": 3.42,
|
940 |
+
"learning_rate": 7.00230199474105e-06,
|
941 |
+
"loss": 0.2359,
|
942 |
+
"step": 75500
|
943 |
+
},
|
944 |
+
{
|
945 |
+
"epoch": 3.44,
|
946 |
+
"learning_rate": 6.9782225518430414e-06,
|
947 |
+
"loss": 0.2424,
|
948 |
+
"step": 76000
|
949 |
+
},
|
950 |
+
{
|
951 |
+
"epoch": 3.46,
|
952 |
+
"learning_rate": 6.954143108945031e-06,
|
953 |
+
"loss": 0.2292,
|
954 |
+
"step": 76500
|
955 |
+
},
|
956 |
+
{
|
957 |
+
"epoch": 3.49,
|
958 |
+
"learning_rate": 6.930063666047023e-06,
|
959 |
+
"loss": 0.234,
|
960 |
+
"step": 77000
|
961 |
+
},
|
962 |
+
{
|
963 |
+
"epoch": 3.51,
|
964 |
+
"learning_rate": 6.905984223149014e-06,
|
965 |
+
"loss": 0.2342,
|
966 |
+
"step": 77500
|
967 |
+
},
|
968 |
+
{
|
969 |
+
"epoch": 3.53,
|
970 |
+
"learning_rate": 6.881904780251004e-06,
|
971 |
+
"loss": 0.227,
|
972 |
+
"step": 78000
|
973 |
+
},
|
974 |
+
{
|
975 |
+
"epoch": 3.55,
|
976 |
+
"learning_rate": 6.857825337352996e-06,
|
977 |
+
"loss": 0.2349,
|
978 |
+
"step": 78500
|
979 |
+
},
|
980 |
+
{
|
981 |
+
"epoch": 3.58,
|
982 |
+
"learning_rate": 6.8337458944549864e-06,
|
983 |
+
"loss": 0.2356,
|
984 |
+
"step": 79000
|
985 |
+
},
|
986 |
+
{
|
987 |
+
"epoch": 3.6,
|
988 |
+
"learning_rate": 6.809666451556978e-06,
|
989 |
+
"loss": 0.2322,
|
990 |
+
"step": 79500
|
991 |
+
},
|
992 |
+
{
|
993 |
+
"epoch": 3.62,
|
994 |
+
"learning_rate": 6.785587008658968e-06,
|
995 |
+
"loss": 0.2368,
|
996 |
+
"step": 80000
|
997 |
+
},
|
998 |
+
{
|
999 |
+
"epoch": 3.64,
|
1000 |
+
"learning_rate": 6.761507565760959e-06,
|
1001 |
+
"loss": 0.2254,
|
1002 |
+
"step": 80500
|
1003 |
+
},
|
1004 |
+
{
|
1005 |
+
"epoch": 3.67,
|
1006 |
+
"learning_rate": 6.73742812286295e-06,
|
1007 |
+
"loss": 0.2488,
|
1008 |
+
"step": 81000
|
1009 |
+
},
|
1010 |
+
{
|
1011 |
+
"epoch": 3.69,
|
1012 |
+
"learning_rate": 6.713348679964941e-06,
|
1013 |
+
"loss": 0.2403,
|
1014 |
+
"step": 81500
|
1015 |
+
},
|
1016 |
+
{
|
1017 |
+
"epoch": 3.71,
|
1018 |
+
"learning_rate": 6.689269237066932e-06,
|
1019 |
+
"loss": 0.2371,
|
1020 |
+
"step": 82000
|
1021 |
+
},
|
1022 |
+
{
|
1023 |
+
"epoch": 3.73,
|
1024 |
+
"learning_rate": 6.665189794168922e-06,
|
1025 |
+
"loss": 0.2305,
|
1026 |
+
"step": 82500
|
1027 |
+
},
|
1028 |
+
{
|
1029 |
+
"epoch": 3.76,
|
1030 |
+
"learning_rate": 6.641110351270914e-06,
|
1031 |
+
"loss": 0.2355,
|
1032 |
+
"step": 83000
|
1033 |
+
},
|
1034 |
+
{
|
1035 |
+
"epoch": 3.78,
|
1036 |
+
"learning_rate": 6.617030908372905e-06,
|
1037 |
+
"loss": 0.2453,
|
1038 |
+
"step": 83500
|
1039 |
+
},
|
1040 |
+
{
|
1041 |
+
"epoch": 3.8,
|
1042 |
+
"learning_rate": 6.592951465474895e-06,
|
1043 |
+
"loss": 0.2406,
|
1044 |
+
"step": 84000
|
1045 |
+
},
|
1046 |
+
{
|
1047 |
+
"epoch": 3.83,
|
1048 |
+
"learning_rate": 6.5688720225768865e-06,
|
1049 |
+
"loss": 0.2417,
|
1050 |
+
"step": 84500
|
1051 |
+
},
|
1052 |
+
{
|
1053 |
+
"epoch": 3.85,
|
1054 |
+
"learning_rate": 6.544792579678877e-06,
|
1055 |
+
"loss": 0.2453,
|
1056 |
+
"step": 85000
|
1057 |
+
},
|
1058 |
+
{
|
1059 |
+
"epoch": 3.87,
|
1060 |
+
"learning_rate": 6.520713136780868e-06,
|
1061 |
+
"loss": 0.2365,
|
1062 |
+
"step": 85500
|
1063 |
+
},
|
1064 |
+
{
|
1065 |
+
"epoch": 3.89,
|
1066 |
+
"learning_rate": 6.496633693882859e-06,
|
1067 |
+
"loss": 0.2447,
|
1068 |
+
"step": 86000
|
1069 |
+
},
|
1070 |
+
{
|
1071 |
+
"epoch": 3.92,
|
1072 |
+
"learning_rate": 6.472554250984849e-06,
|
1073 |
+
"loss": 0.2441,
|
1074 |
+
"step": 86500
|
1075 |
+
},
|
1076 |
+
{
|
1077 |
+
"epoch": 3.94,
|
1078 |
+
"learning_rate": 6.448474808086841e-06,
|
1079 |
+
"loss": 0.2314,
|
1080 |
+
"step": 87000
|
1081 |
+
},
|
1082 |
+
{
|
1083 |
+
"epoch": 3.96,
|
1084 |
+
"learning_rate": 6.4243953651888315e-06,
|
1085 |
+
"loss": 0.2402,
|
1086 |
+
"step": 87500
|
1087 |
+
},
|
1088 |
+
{
|
1089 |
+
"epoch": 3.98,
|
1090 |
+
"learning_rate": 6.400315922290823e-06,
|
1091 |
+
"loss": 0.235,
|
1092 |
+
"step": 88000
|
1093 |
+
},
|
1094 |
+
{
|
1095 |
+
"epoch": 4.0,
|
1096 |
+
"eval_accuracy": 0.8673575921163199,
|
1097 |
+
"eval_loss": 0.500419020652771,
|
1098 |
+
"eval_runtime": 48.6048,
|
1099 |
+
"eval_samples_per_second": 807.965,
|
1100 |
+
"eval_steps_per_second": 50.509,
|
1101 |
+
"step": 88360
|
1102 |
+
},
|
1103 |
+
{
|
1104 |
+
"epoch": 4.01,
|
1105 |
+
"learning_rate": 6.376236479392813e-06,
|
1106 |
+
"loss": 0.2287,
|
1107 |
+
"step": 88500
|
1108 |
+
},
|
1109 |
+
{
|
1110 |
+
"epoch": 4.03,
|
1111 |
+
"learning_rate": 6.352157036494804e-06,
|
1112 |
+
"loss": 0.1927,
|
1113 |
+
"step": 89000
|
1114 |
+
},
|
1115 |
+
{
|
1116 |
+
"epoch": 4.05,
|
1117 |
+
"learning_rate": 6.328077593596796e-06,
|
1118 |
+
"loss": 0.1863,
|
1119 |
+
"step": 89500
|
1120 |
+
},
|
1121 |
+
{
|
1122 |
+
"epoch": 4.07,
|
1123 |
+
"learning_rate": 6.303998150698786e-06,
|
1124 |
+
"loss": 0.1964,
|
1125 |
+
"step": 90000
|
1126 |
+
},
|
1127 |
+
{
|
1128 |
+
"epoch": 4.1,
|
1129 |
+
"learning_rate": 6.279918707800777e-06,
|
1130 |
+
"loss": 0.2044,
|
1131 |
+
"step": 90500
|
1132 |
+
},
|
1133 |
+
{
|
1134 |
+
"epoch": 4.12,
|
1135 |
+
"learning_rate": 6.255839264902767e-06,
|
1136 |
+
"loss": 0.1911,
|
1137 |
+
"step": 91000
|
1138 |
+
},
|
1139 |
+
{
|
1140 |
+
"epoch": 4.14,
|
1141 |
+
"learning_rate": 6.231759822004758e-06,
|
1142 |
+
"loss": 0.2,
|
1143 |
+
"step": 91500
|
1144 |
+
},
|
1145 |
+
{
|
1146 |
+
"epoch": 4.16,
|
1147 |
+
"learning_rate": 6.20768037910675e-06,
|
1148 |
+
"loss": 0.1866,
|
1149 |
+
"step": 92000
|
1150 |
+
},
|
1151 |
+
{
|
1152 |
+
"epoch": 4.19,
|
1153 |
+
"learning_rate": 6.18360093620874e-06,
|
1154 |
+
"loss": 0.2139,
|
1155 |
+
"step": 92500
|
1156 |
+
},
|
1157 |
+
{
|
1158 |
+
"epoch": 4.21,
|
1159 |
+
"learning_rate": 6.1595214933107315e-06,
|
1160 |
+
"loss": 0.2048,
|
1161 |
+
"step": 93000
|
1162 |
+
},
|
1163 |
+
{
|
1164 |
+
"epoch": 4.23,
|
1165 |
+
"learning_rate": 6.135442050412722e-06,
|
1166 |
+
"loss": 0.1956,
|
1167 |
+
"step": 93500
|
1168 |
+
},
|
1169 |
+
{
|
1170 |
+
"epoch": 4.26,
|
1171 |
+
"learning_rate": 6.111362607514712e-06,
|
1172 |
+
"loss": 0.2057,
|
1173 |
+
"step": 94000
|
1174 |
+
},
|
1175 |
+
{
|
1176 |
+
"epoch": 4.28,
|
1177 |
+
"learning_rate": 6.087283164616704e-06,
|
1178 |
+
"loss": 0.1986,
|
1179 |
+
"step": 94500
|
1180 |
+
},
|
1181 |
+
{
|
1182 |
+
"epoch": 4.3,
|
1183 |
+
"learning_rate": 6.063203721718695e-06,
|
1184 |
+
"loss": 0.2022,
|
1185 |
+
"step": 95000
|
1186 |
+
},
|
1187 |
+
{
|
1188 |
+
"epoch": 4.32,
|
1189 |
+
"learning_rate": 6.039124278820686e-06,
|
1190 |
+
"loss": 0.1933,
|
1191 |
+
"step": 95500
|
1192 |
+
},
|
1193 |
+
{
|
1194 |
+
"epoch": 4.35,
|
1195 |
+
"learning_rate": 6.0150448359226765e-06,
|
1196 |
+
"loss": 0.1929,
|
1197 |
+
"step": 96000
|
1198 |
+
},
|
1199 |
+
{
|
1200 |
+
"epoch": 4.37,
|
1201 |
+
"learning_rate": 5.990965393024667e-06,
|
1202 |
+
"loss": 0.2034,
|
1203 |
+
"step": 96500
|
1204 |
+
},
|
1205 |
+
{
|
1206 |
+
"epoch": 4.39,
|
1207 |
+
"learning_rate": 5.966885950126658e-06,
|
1208 |
+
"loss": 0.1978,
|
1209 |
+
"step": 97000
|
1210 |
+
},
|
1211 |
+
{
|
1212 |
+
"epoch": 4.41,
|
1213 |
+
"learning_rate": 5.942806507228649e-06,
|
1214 |
+
"loss": 0.1988,
|
1215 |
+
"step": 97500
|
1216 |
+
},
|
1217 |
+
{
|
1218 |
+
"epoch": 4.44,
|
1219 |
+
"learning_rate": 5.918727064330641e-06,
|
1220 |
+
"loss": 0.2131,
|
1221 |
+
"step": 98000
|
1222 |
+
},
|
1223 |
+
{
|
1224 |
+
"epoch": 4.46,
|
1225 |
+
"learning_rate": 5.894647621432631e-06,
|
1226 |
+
"loss": 0.1982,
|
1227 |
+
"step": 98500
|
1228 |
+
},
|
1229 |
+
{
|
1230 |
+
"epoch": 4.48,
|
1231 |
+
"learning_rate": 5.8705681785346215e-06,
|
1232 |
+
"loss": 0.2076,
|
1233 |
+
"step": 99000
|
1234 |
+
},
|
1235 |
+
{
|
1236 |
+
"epoch": 4.5,
|
1237 |
+
"learning_rate": 5.846488735636613e-06,
|
1238 |
+
"loss": 0.1987,
|
1239 |
+
"step": 99500
|
1240 |
+
},
|
1241 |
+
{
|
1242 |
+
"epoch": 4.53,
|
1243 |
+
"learning_rate": 5.822409292738603e-06,
|
1244 |
+
"loss": 0.2144,
|
1245 |
+
"step": 100000
|
1246 |
+
},
|
1247 |
+
{
|
1248 |
+
"epoch": 4.55,
|
1249 |
+
"learning_rate": 5.798329849840595e-06,
|
1250 |
+
"loss": 0.2038,
|
1251 |
+
"step": 100500
|
1252 |
+
},
|
1253 |
+
{
|
1254 |
+
"epoch": 4.57,
|
1255 |
+
"learning_rate": 5.774250406942586e-06,
|
1256 |
+
"loss": 0.1908,
|
1257 |
+
"step": 101000
|
1258 |
+
},
|
1259 |
+
{
|
1260 |
+
"epoch": 4.59,
|
1261 |
+
"learning_rate": 5.7501709640445765e-06,
|
1262 |
+
"loss": 0.2056,
|
1263 |
+
"step": 101500
|
1264 |
+
},
|
1265 |
+
{
|
1266 |
+
"epoch": 4.62,
|
1267 |
+
"learning_rate": 5.726091521146567e-06,
|
1268 |
+
"loss": 0.2038,
|
1269 |
+
"step": 102000
|
1270 |
+
},
|
1271 |
+
{
|
1272 |
+
"epoch": 4.64,
|
1273 |
+
"learning_rate": 5.702012078248557e-06,
|
1274 |
+
"loss": 0.2099,
|
1275 |
+
"step": 102500
|
1276 |
+
},
|
1277 |
+
{
|
1278 |
+
"epoch": 4.66,
|
1279 |
+
"learning_rate": 5.677932635350549e-06,
|
1280 |
+
"loss": 0.2048,
|
1281 |
+
"step": 103000
|
1282 |
+
},
|
1283 |
+
{
|
1284 |
+
"epoch": 4.69,
|
1285 |
+
"learning_rate": 5.65385319245254e-06,
|
1286 |
+
"loss": 0.2072,
|
1287 |
+
"step": 103500
|
1288 |
+
},
|
1289 |
+
{
|
1290 |
+
"epoch": 4.71,
|
1291 |
+
"learning_rate": 5.6297737495545315e-06,
|
1292 |
+
"loss": 0.2155,
|
1293 |
+
"step": 104000
|
1294 |
+
},
|
1295 |
+
{
|
1296 |
+
"epoch": 4.73,
|
1297 |
+
"learning_rate": 5.6056943066565215e-06,
|
1298 |
+
"loss": 0.2044,
|
1299 |
+
"step": 104500
|
1300 |
+
},
|
1301 |
+
{
|
1302 |
+
"epoch": 4.75,
|
1303 |
+
"learning_rate": 5.581614863758512e-06,
|
1304 |
+
"loss": 0.203,
|
1305 |
+
"step": 105000
|
1306 |
+
},
|
1307 |
+
{
|
1308 |
+
"epoch": 4.78,
|
1309 |
+
"learning_rate": 5.557535420860504e-06,
|
1310 |
+
"loss": 0.2003,
|
1311 |
+
"step": 105500
|
1312 |
+
},
|
1313 |
+
{
|
1314 |
+
"epoch": 4.8,
|
1315 |
+
"learning_rate": 5.533455977962494e-06,
|
1316 |
+
"loss": 0.1985,
|
1317 |
+
"step": 106000
|
1318 |
+
},
|
1319 |
+
{
|
1320 |
+
"epoch": 4.82,
|
1321 |
+
"learning_rate": 5.509376535064486e-06,
|
1322 |
+
"loss": 0.2197,
|
1323 |
+
"step": 106500
|
1324 |
+
},
|
1325 |
+
{
|
1326 |
+
"epoch": 4.84,
|
1327 |
+
"learning_rate": 5.485297092166476e-06,
|
1328 |
+
"loss": 0.1965,
|
1329 |
+
"step": 107000
|
1330 |
+
},
|
1331 |
+
{
|
1332 |
+
"epoch": 4.87,
|
1333 |
+
"learning_rate": 5.4612176492684665e-06,
|
1334 |
+
"loss": 0.2217,
|
1335 |
+
"step": 107500
|
1336 |
+
},
|
1337 |
+
{
|
1338 |
+
"epoch": 4.89,
|
1339 |
+
"learning_rate": 5.437138206370458e-06,
|
1340 |
+
"loss": 0.1977,
|
1341 |
+
"step": 108000
|
1342 |
+
},
|
1343 |
+
{
|
1344 |
+
"epoch": 4.91,
|
1345 |
+
"learning_rate": 5.413058763472448e-06,
|
1346 |
+
"loss": 0.214,
|
1347 |
+
"step": 108500
|
1348 |
+
},
|
1349 |
+
{
|
1350 |
+
"epoch": 4.93,
|
1351 |
+
"learning_rate": 5.38897932057444e-06,
|
1352 |
+
"loss": 0.196,
|
1353 |
+
"step": 109000
|
1354 |
+
},
|
1355 |
+
{
|
1356 |
+
"epoch": 4.96,
|
1357 |
+
"learning_rate": 5.364899877676431e-06,
|
1358 |
+
"loss": 0.2213,
|
1359 |
+
"step": 109500
|
1360 |
+
},
|
1361 |
+
{
|
1362 |
+
"epoch": 4.98,
|
1363 |
+
"learning_rate": 5.340820434778421e-06,
|
1364 |
+
"loss": 0.2114,
|
1365 |
+
"step": 110000
|
1366 |
+
},
|
1367 |
+
{
|
1368 |
+
"epoch": 5.0,
|
1369 |
+
"eval_accuracy": 0.8639454050062387,
|
1370 |
+
"eval_loss": 0.601565957069397,
|
1371 |
+
"eval_runtime": 48.6094,
|
1372 |
+
"eval_samples_per_second": 807.89,
|
1373 |
+
"eval_steps_per_second": 50.505,
|
1374 |
+
"step": 110450
|
1375 |
+
},
|
1376 |
+
{
|
1377 |
+
"epoch": 5.0,
|
1378 |
+
"learning_rate": 5.316740991880412e-06,
|
1379 |
+
"loss": 0.2042,
|
1380 |
+
"step": 110500
|
1381 |
+
},
|
1382 |
+
{
|
1383 |
+
"epoch": 5.02,
|
1384 |
+
"learning_rate": 5.292661548982403e-06,
|
1385 |
+
"loss": 0.1637,
|
1386 |
+
"step": 111000
|
1387 |
+
},
|
1388 |
+
{
|
1389 |
+
"epoch": 5.05,
|
1390 |
+
"learning_rate": 5.268582106084394e-06,
|
1391 |
+
"loss": 0.157,
|
1392 |
+
"step": 111500
|
1393 |
+
},
|
1394 |
+
{
|
1395 |
+
"epoch": 5.07,
|
1396 |
+
"learning_rate": 5.244502663186385e-06,
|
1397 |
+
"loss": 0.1711,
|
1398 |
+
"step": 112000
|
1399 |
+
},
|
1400 |
+
{
|
1401 |
+
"epoch": 5.09,
|
1402 |
+
"learning_rate": 5.220423220288376e-06,
|
1403 |
+
"loss": 0.1664,
|
1404 |
+
"step": 112500
|
1405 |
+
},
|
1406 |
+
{
|
1407 |
+
"epoch": 5.12,
|
1408 |
+
"learning_rate": 5.1963437773903666e-06,
|
1409 |
+
"loss": 0.181,
|
1410 |
+
"step": 113000
|
1411 |
+
},
|
1412 |
+
{
|
1413 |
+
"epoch": 5.14,
|
1414 |
+
"learning_rate": 5.172264334492357e-06,
|
1415 |
+
"loss": 0.1686,
|
1416 |
+
"step": 113500
|
1417 |
+
},
|
1418 |
+
{
|
1419 |
+
"epoch": 5.16,
|
1420 |
+
"learning_rate": 5.148184891594349e-06,
|
1421 |
+
"loss": 0.165,
|
1422 |
+
"step": 114000
|
1423 |
+
},
|
1424 |
+
{
|
1425 |
+
"epoch": 5.18,
|
1426 |
+
"learning_rate": 5.124105448696339e-06,
|
1427 |
+
"loss": 0.1811,
|
1428 |
+
"step": 114500
|
1429 |
+
},
|
1430 |
+
{
|
1431 |
+
"epoch": 5.21,
|
1432 |
+
"learning_rate": 5.10002600579833e-06,
|
1433 |
+
"loss": 0.1765,
|
1434 |
+
"step": 115000
|
1435 |
+
},
|
1436 |
+
{
|
1437 |
+
"epoch": 5.23,
|
1438 |
+
"learning_rate": 5.075946562900322e-06,
|
1439 |
+
"loss": 0.1722,
|
1440 |
+
"step": 115500
|
1441 |
+
},
|
1442 |
+
{
|
1443 |
+
"epoch": 5.25,
|
1444 |
+
"learning_rate": 5.0518671200023116e-06,
|
1445 |
+
"loss": 0.1681,
|
1446 |
+
"step": 116000
|
1447 |
+
},
|
1448 |
+
{
|
1449 |
+
"epoch": 5.27,
|
1450 |
+
"learning_rate": 5.027787677104303e-06,
|
1451 |
+
"loss": 0.1981,
|
1452 |
+
"step": 116500
|
1453 |
+
},
|
1454 |
+
{
|
1455 |
+
"epoch": 5.3,
|
1456 |
+
"learning_rate": 5.003708234206294e-06,
|
1457 |
+
"loss": 0.1709,
|
1458 |
+
"step": 117000
|
1459 |
+
},
|
1460 |
+
{
|
1461 |
+
"epoch": 5.32,
|
1462 |
+
"learning_rate": 4.979628791308285e-06,
|
1463 |
+
"loss": 0.1647,
|
1464 |
+
"step": 117500
|
1465 |
+
},
|
1466 |
+
{
|
1467 |
+
"epoch": 5.34,
|
1468 |
+
"learning_rate": 4.955549348410276e-06,
|
1469 |
+
"loss": 0.1816,
|
1470 |
+
"step": 118000
|
1471 |
+
},
|
1472 |
+
{
|
1473 |
+
"epoch": 5.36,
|
1474 |
+
"learning_rate": 4.931469905512267e-06,
|
1475 |
+
"loss": 0.175,
|
1476 |
+
"step": 118500
|
1477 |
+
},
|
1478 |
+
{
|
1479 |
+
"epoch": 5.39,
|
1480 |
+
"learning_rate": 4.9073904626142574e-06,
|
1481 |
+
"loss": 0.187,
|
1482 |
+
"step": 119000
|
1483 |
+
},
|
1484 |
+
{
|
1485 |
+
"epoch": 5.41,
|
1486 |
+
"learning_rate": 4.883311019716248e-06,
|
1487 |
+
"loss": 0.1799,
|
1488 |
+
"step": 119500
|
1489 |
+
},
|
1490 |
+
{
|
1491 |
+
"epoch": 5.43,
|
1492 |
+
"learning_rate": 4.859231576818239e-06,
|
1493 |
+
"loss": 0.1813,
|
1494 |
+
"step": 120000
|
1495 |
+
},
|
1496 |
+
{
|
1497 |
+
"epoch": 5.45,
|
1498 |
+
"learning_rate": 4.83515213392023e-06,
|
1499 |
+
"loss": 0.1883,
|
1500 |
+
"step": 120500
|
1501 |
+
},
|
1502 |
+
{
|
1503 |
+
"epoch": 5.48,
|
1504 |
+
"learning_rate": 4.811072691022221e-06,
|
1505 |
+
"loss": 0.1863,
|
1506 |
+
"step": 121000
|
1507 |
+
},
|
1508 |
+
{
|
1509 |
+
"epoch": 5.5,
|
1510 |
+
"learning_rate": 4.7869932481242124e-06,
|
1511 |
+
"loss": 0.1856,
|
1512 |
+
"step": 121500
|
1513 |
+
},
|
1514 |
+
{
|
1515 |
+
"epoch": 5.52,
|
1516 |
+
"learning_rate": 4.7629138052262024e-06,
|
1517 |
+
"loss": 0.1633,
|
1518 |
+
"step": 122000
|
1519 |
+
},
|
1520 |
+
{
|
1521 |
+
"epoch": 5.55,
|
1522 |
+
"learning_rate": 4.738834362328193e-06,
|
1523 |
+
"loss": 0.1863,
|
1524 |
+
"step": 122500
|
1525 |
+
},
|
1526 |
+
{
|
1527 |
+
"epoch": 5.57,
|
1528 |
+
"learning_rate": 4.714754919430184e-06,
|
1529 |
+
"loss": 0.1779,
|
1530 |
+
"step": 123000
|
1531 |
+
},
|
1532 |
+
{
|
1533 |
+
"epoch": 5.59,
|
1534 |
+
"learning_rate": 4.690675476532176e-06,
|
1535 |
+
"loss": 0.1851,
|
1536 |
+
"step": 123500
|
1537 |
+
},
|
1538 |
+
{
|
1539 |
+
"epoch": 5.61,
|
1540 |
+
"learning_rate": 4.666596033634167e-06,
|
1541 |
+
"loss": 0.173,
|
1542 |
+
"step": 124000
|
1543 |
+
},
|
1544 |
+
{
|
1545 |
+
"epoch": 5.64,
|
1546 |
+
"learning_rate": 4.642516590736157e-06,
|
1547 |
+
"loss": 0.1748,
|
1548 |
+
"step": 124500
|
1549 |
+
},
|
1550 |
+
{
|
1551 |
+
"epoch": 5.66,
|
1552 |
+
"learning_rate": 4.618437147838148e-06,
|
1553 |
+
"loss": 0.1782,
|
1554 |
+
"step": 125000
|
1555 |
+
},
|
1556 |
+
{
|
1557 |
+
"epoch": 5.68,
|
1558 |
+
"learning_rate": 4.594357704940139e-06,
|
1559 |
+
"loss": 0.1777,
|
1560 |
+
"step": 125500
|
1561 |
+
},
|
1562 |
+
{
|
1563 |
+
"epoch": 5.7,
|
1564 |
+
"learning_rate": 4.57027826204213e-06,
|
1565 |
+
"loss": 0.1807,
|
1566 |
+
"step": 126000
|
1567 |
+
},
|
1568 |
+
{
|
1569 |
+
"epoch": 5.73,
|
1570 |
+
"learning_rate": 4.546198819144121e-06,
|
1571 |
+
"loss": 0.179,
|
1572 |
+
"step": 126500
|
1573 |
+
},
|
1574 |
+
{
|
1575 |
+
"epoch": 5.75,
|
1576 |
+
"learning_rate": 4.522119376246112e-06,
|
1577 |
+
"loss": 0.1745,
|
1578 |
+
"step": 127000
|
1579 |
+
},
|
1580 |
+
{
|
1581 |
+
"epoch": 5.77,
|
1582 |
+
"learning_rate": 4.4980399333481025e-06,
|
1583 |
+
"loss": 0.1886,
|
1584 |
+
"step": 127500
|
1585 |
+
},
|
1586 |
+
{
|
1587 |
+
"epoch": 5.79,
|
1588 |
+
"learning_rate": 4.473960490450093e-06,
|
1589 |
+
"loss": 0.1708,
|
1590 |
+
"step": 128000
|
1591 |
+
},
|
1592 |
+
{
|
1593 |
+
"epoch": 5.82,
|
1594 |
+
"learning_rate": 4.449881047552084e-06,
|
1595 |
+
"loss": 0.1712,
|
1596 |
+
"step": 128500
|
1597 |
+
},
|
1598 |
+
{
|
1599 |
+
"epoch": 5.84,
|
1600 |
+
"learning_rate": 4.425801604654075e-06,
|
1601 |
+
"loss": 0.1946,
|
1602 |
+
"step": 129000
|
1603 |
+
},
|
1604 |
+
{
|
1605 |
+
"epoch": 5.86,
|
1606 |
+
"learning_rate": 4.401722161756066e-06,
|
1607 |
+
"loss": 0.1772,
|
1608 |
+
"step": 129500
|
1609 |
+
},
|
1610 |
+
{
|
1611 |
+
"epoch": 5.89,
|
1612 |
+
"learning_rate": 4.377642718858057e-06,
|
1613 |
+
"loss": 0.1873,
|
1614 |
+
"step": 130000
|
1615 |
+
},
|
1616 |
+
{
|
1617 |
+
"epoch": 5.91,
|
1618 |
+
"learning_rate": 4.3535632759600475e-06,
|
1619 |
+
"loss": 0.1895,
|
1620 |
+
"step": 130500
|
1621 |
+
},
|
1622 |
+
{
|
1623 |
+
"epoch": 5.93,
|
1624 |
+
"learning_rate": 4.329483833062038e-06,
|
1625 |
+
"loss": 0.1782,
|
1626 |
+
"step": 131000
|
1627 |
+
},
|
1628 |
+
{
|
1629 |
+
"epoch": 5.95,
|
1630 |
+
"learning_rate": 4.30540439016403e-06,
|
1631 |
+
"loss": 0.1915,
|
1632 |
+
"step": 131500
|
1633 |
+
},
|
1634 |
+
{
|
1635 |
+
"epoch": 5.98,
|
1636 |
+
"learning_rate": 4.281324947266021e-06,
|
1637 |
+
"loss": 0.1826,
|
1638 |
+
"step": 132000
|
1639 |
+
},
|
1640 |
+
{
|
1641 |
+
"epoch": 6.0,
|
1642 |
+
"learning_rate": 4.257245504368011e-06,
|
1643 |
+
"loss": 0.1833,
|
1644 |
+
"step": 132500
|
1645 |
+
},
|
1646 |
+
{
|
1647 |
+
"epoch": 6.0,
|
1648 |
+
"eval_accuracy": 0.8642509740011713,
|
1649 |
+
"eval_loss": 0.6854547262191772,
|
1650 |
+
"eval_runtime": 48.5441,
|
1651 |
+
"eval_samples_per_second": 808.975,
|
1652 |
+
"eval_steps_per_second": 50.573,
|
1653 |
+
"step": 132540
|
1654 |
+
},
|
1655 |
+
{
|
1656 |
+
"epoch": 6.02,
|
1657 |
+
"learning_rate": 4.2331660614700025e-06,
|
1658 |
+
"loss": 0.1409,
|
1659 |
+
"step": 133000
|
1660 |
+
},
|
1661 |
+
{
|
1662 |
+
"epoch": 6.04,
|
1663 |
+
"learning_rate": 4.209086618571993e-06,
|
1664 |
+
"loss": 0.1583,
|
1665 |
+
"step": 133500
|
1666 |
+
},
|
1667 |
+
{
|
1668 |
+
"epoch": 6.07,
|
1669 |
+
"learning_rate": 4.185007175673984e-06,
|
1670 |
+
"loss": 0.1503,
|
1671 |
+
"step": 134000
|
1672 |
+
},
|
1673 |
+
{
|
1674 |
+
"epoch": 6.09,
|
1675 |
+
"learning_rate": 4.160927732775975e-06,
|
1676 |
+
"loss": 0.1372,
|
1677 |
+
"step": 134500
|
1678 |
+
},
|
1679 |
+
{
|
1680 |
+
"epoch": 6.11,
|
1681 |
+
"learning_rate": 4.136848289877966e-06,
|
1682 |
+
"loss": 0.1332,
|
1683 |
+
"step": 135000
|
1684 |
+
},
|
1685 |
+
{
|
1686 |
+
"epoch": 6.13,
|
1687 |
+
"learning_rate": 4.112768846979957e-06,
|
1688 |
+
"loss": 0.1528,
|
1689 |
+
"step": 135500
|
1690 |
+
},
|
1691 |
+
{
|
1692 |
+
"epoch": 6.16,
|
1693 |
+
"learning_rate": 4.0886894040819475e-06,
|
1694 |
+
"loss": 0.1367,
|
1695 |
+
"step": 136000
|
1696 |
+
},
|
1697 |
+
{
|
1698 |
+
"epoch": 6.18,
|
1699 |
+
"learning_rate": 4.064609961183938e-06,
|
1700 |
+
"loss": 0.1456,
|
1701 |
+
"step": 136500
|
1702 |
+
},
|
1703 |
+
{
|
1704 |
+
"epoch": 6.2,
|
1705 |
+
"learning_rate": 4.040530518285929e-06,
|
1706 |
+
"loss": 0.1674,
|
1707 |
+
"step": 137000
|
1708 |
+
},
|
1709 |
+
{
|
1710 |
+
"epoch": 6.22,
|
1711 |
+
"learning_rate": 4.01645107538792e-06,
|
1712 |
+
"loss": 0.1465,
|
1713 |
+
"step": 137500
|
1714 |
+
},
|
1715 |
+
{
|
1716 |
+
"epoch": 6.25,
|
1717 |
+
"learning_rate": 3.992371632489911e-06,
|
1718 |
+
"loss": 0.1492,
|
1719 |
+
"step": 138000
|
1720 |
+
},
|
1721 |
+
{
|
1722 |
+
"epoch": 6.27,
|
1723 |
+
"learning_rate": 3.968292189591902e-06,
|
1724 |
+
"loss": 0.149,
|
1725 |
+
"step": 138500
|
1726 |
+
},
|
1727 |
+
{
|
1728 |
+
"epoch": 6.29,
|
1729 |
+
"learning_rate": 3.9442127466938925e-06,
|
1730 |
+
"loss": 0.1539,
|
1731 |
+
"step": 139000
|
1732 |
+
},
|
1733 |
+
{
|
1734 |
+
"epoch": 6.32,
|
1735 |
+
"learning_rate": 3.920133303795884e-06,
|
1736 |
+
"loss": 0.1339,
|
1737 |
+
"step": 139500
|
1738 |
+
},
|
1739 |
+
{
|
1740 |
+
"epoch": 6.34,
|
1741 |
+
"learning_rate": 3.896053860897875e-06,
|
1742 |
+
"loss": 0.142,
|
1743 |
+
"step": 140000
|
1744 |
+
},
|
1745 |
+
{
|
1746 |
+
"epoch": 6.36,
|
1747 |
+
"learning_rate": 3.871974417999865e-06,
|
1748 |
+
"loss": 0.1504,
|
1749 |
+
"step": 140500
|
1750 |
+
},
|
1751 |
+
{
|
1752 |
+
"epoch": 6.38,
|
1753 |
+
"learning_rate": 3.847894975101857e-06,
|
1754 |
+
"loss": 0.1578,
|
1755 |
+
"step": 141000
|
1756 |
+
},
|
1757 |
+
{
|
1758 |
+
"epoch": 6.41,
|
1759 |
+
"learning_rate": 3.8238155322038475e-06,
|
1760 |
+
"loss": 0.1491,
|
1761 |
+
"step": 141500
|
1762 |
+
},
|
1763 |
+
{
|
1764 |
+
"epoch": 6.43,
|
1765 |
+
"learning_rate": 3.7997360893058384e-06,
|
1766 |
+
"loss": 0.1673,
|
1767 |
+
"step": 142000
|
1768 |
+
},
|
1769 |
+
{
|
1770 |
+
"epoch": 6.45,
|
1771 |
+
"learning_rate": 3.775656646407829e-06,
|
1772 |
+
"loss": 0.1416,
|
1773 |
+
"step": 142500
|
1774 |
+
},
|
1775 |
+
{
|
1776 |
+
"epoch": 6.47,
|
1777 |
+
"learning_rate": 3.7515772035098196e-06,
|
1778 |
+
"loss": 0.1607,
|
1779 |
+
"step": 143000
|
1780 |
+
},
|
1781 |
+
{
|
1782 |
+
"epoch": 6.5,
|
1783 |
+
"learning_rate": 3.727497760611811e-06,
|
1784 |
+
"loss": 0.1386,
|
1785 |
+
"step": 143500
|
1786 |
+
},
|
1787 |
+
{
|
1788 |
+
"epoch": 6.52,
|
1789 |
+
"learning_rate": 3.7034183177138017e-06,
|
1790 |
+
"loss": 0.1455,
|
1791 |
+
"step": 144000
|
1792 |
+
},
|
1793 |
+
{
|
1794 |
+
"epoch": 6.54,
|
1795 |
+
"learning_rate": 3.6793388748157925e-06,
|
1796 |
+
"loss": 0.152,
|
1797 |
+
"step": 144500
|
1798 |
+
},
|
1799 |
+
{
|
1800 |
+
"epoch": 6.56,
|
1801 |
+
"learning_rate": 3.6552594319177838e-06,
|
1802 |
+
"loss": 0.1533,
|
1803 |
+
"step": 145000
|
1804 |
+
},
|
1805 |
+
{
|
1806 |
+
"epoch": 6.59,
|
1807 |
+
"learning_rate": 3.631179989019774e-06,
|
1808 |
+
"loss": 0.1491,
|
1809 |
+
"step": 145500
|
1810 |
+
},
|
1811 |
+
{
|
1812 |
+
"epoch": 6.61,
|
1813 |
+
"learning_rate": 3.607100546121765e-06,
|
1814 |
+
"loss": 0.1392,
|
1815 |
+
"step": 146000
|
1816 |
+
},
|
1817 |
+
{
|
1818 |
+
"epoch": 6.63,
|
1819 |
+
"learning_rate": 3.583021103223756e-06,
|
1820 |
+
"loss": 0.1454,
|
1821 |
+
"step": 146500
|
1822 |
+
},
|
1823 |
+
{
|
1824 |
+
"epoch": 6.65,
|
1825 |
+
"learning_rate": 3.558941660325747e-06,
|
1826 |
+
"loss": 0.1639,
|
1827 |
+
"step": 147000
|
1828 |
+
},
|
1829 |
+
{
|
1830 |
+
"epoch": 6.68,
|
1831 |
+
"learning_rate": 3.534862217427738e-06,
|
1832 |
+
"loss": 0.1458,
|
1833 |
+
"step": 147500
|
1834 |
+
},
|
1835 |
+
{
|
1836 |
+
"epoch": 6.7,
|
1837 |
+
"learning_rate": 3.5107827745297292e-06,
|
1838 |
+
"loss": 0.1563,
|
1839 |
+
"step": 148000
|
1840 |
+
},
|
1841 |
+
{
|
1842 |
+
"epoch": 6.72,
|
1843 |
+
"learning_rate": 3.4867033316317196e-06,
|
1844 |
+
"loss": 0.1613,
|
1845 |
+
"step": 148500
|
1846 |
+
},
|
1847 |
+
{
|
1848 |
+
"epoch": 6.75,
|
1849 |
+
"learning_rate": 3.4626238887337105e-06,
|
1850 |
+
"loss": 0.1332,
|
1851 |
+
"step": 149000
|
1852 |
+
},
|
1853 |
+
{
|
1854 |
+
"epoch": 6.77,
|
1855 |
+
"learning_rate": 3.4385444458357013e-06,
|
1856 |
+
"loss": 0.1543,
|
1857 |
+
"step": 149500
|
1858 |
+
},
|
1859 |
+
{
|
1860 |
+
"epoch": 6.79,
|
1861 |
+
"learning_rate": 3.4144650029376926e-06,
|
1862 |
+
"loss": 0.1649,
|
1863 |
+
"step": 150000
|
1864 |
+
},
|
1865 |
+
{
|
1866 |
+
"epoch": 6.81,
|
1867 |
+
"learning_rate": 3.3903855600396834e-06,
|
1868 |
+
"loss": 0.1393,
|
1869 |
+
"step": 150500
|
1870 |
+
},
|
1871 |
+
{
|
1872 |
+
"epoch": 6.84,
|
1873 |
+
"learning_rate": 3.366306117141674e-06,
|
1874 |
+
"loss": 0.1463,
|
1875 |
+
"step": 151000
|
1876 |
+
},
|
1877 |
+
{
|
1878 |
+
"epoch": 6.86,
|
1879 |
+
"learning_rate": 3.342226674243665e-06,
|
1880 |
+
"loss": 0.1468,
|
1881 |
+
"step": 151500
|
1882 |
+
},
|
1883 |
+
{
|
1884 |
+
"epoch": 6.88,
|
1885 |
+
"learning_rate": 3.318147231345656e-06,
|
1886 |
+
"loss": 0.152,
|
1887 |
+
"step": 152000
|
1888 |
+
},
|
1889 |
+
{
|
1890 |
+
"epoch": 6.9,
|
1891 |
+
"learning_rate": 3.2940677884476467e-06,
|
1892 |
+
"loss": 0.1509,
|
1893 |
+
"step": 152500
|
1894 |
+
},
|
1895 |
+
{
|
1896 |
+
"epoch": 6.93,
|
1897 |
+
"learning_rate": 3.269988345549638e-06,
|
1898 |
+
"loss": 0.1548,
|
1899 |
+
"step": 153000
|
1900 |
+
},
|
1901 |
+
{
|
1902 |
+
"epoch": 6.95,
|
1903 |
+
"learning_rate": 3.2459089026516284e-06,
|
1904 |
+
"loss": 0.1452,
|
1905 |
+
"step": 153500
|
1906 |
+
},
|
1907 |
+
{
|
1908 |
+
"epoch": 6.97,
|
1909 |
+
"learning_rate": 3.2218294597536192e-06,
|
1910 |
+
"loss": 0.1461,
|
1911 |
+
"step": 154000
|
1912 |
+
},
|
1913 |
+
{
|
1914 |
+
"epoch": 6.99,
|
1915 |
+
"learning_rate": 3.19775001685561e-06,
|
1916 |
+
"loss": 0.1568,
|
1917 |
+
"step": 154500
|
1918 |
+
},
|
1919 |
+
{
|
1920 |
+
"epoch": 7.0,
|
1921 |
+
"eval_accuracy": 0.8665936696289883,
|
1922 |
+
"eval_loss": 0.773478090763092,
|
1923 |
+
"eval_runtime": 48.6355,
|
1924 |
+
"eval_samples_per_second": 807.455,
|
1925 |
+
"eval_steps_per_second": 50.478,
|
1926 |
+
"step": 154630
|
1927 |
+
},
|
1928 |
+
{
|
1929 |
+
"epoch": 7.02,
|
1930 |
+
"learning_rate": 3.1736705739576013e-06,
|
1931 |
+
"loss": 0.1257,
|
1932 |
+
"step": 155000
|
1933 |
+
},
|
1934 |
+
{
|
1935 |
+
"epoch": 7.04,
|
1936 |
+
"learning_rate": 3.149591131059592e-06,
|
1937 |
+
"loss": 0.1142,
|
1938 |
+
"step": 155500
|
1939 |
+
},
|
1940 |
+
{
|
1941 |
+
"epoch": 7.06,
|
1942 |
+
"learning_rate": 3.1255116881615826e-06,
|
1943 |
+
"loss": 0.1063,
|
1944 |
+
"step": 156000
|
1945 |
+
},
|
1946 |
+
{
|
1947 |
+
"epoch": 7.08,
|
1948 |
+
"learning_rate": 3.101432245263574e-06,
|
1949 |
+
"loss": 0.1294,
|
1950 |
+
"step": 156500
|
1951 |
+
},
|
1952 |
+
{
|
1953 |
+
"epoch": 7.11,
|
1954 |
+
"learning_rate": 3.0773528023655647e-06,
|
1955 |
+
"loss": 0.1166,
|
1956 |
+
"step": 157000
|
1957 |
+
},
|
1958 |
+
{
|
1959 |
+
"epoch": 7.13,
|
1960 |
+
"learning_rate": 3.0532733594675555e-06,
|
1961 |
+
"loss": 0.1169,
|
1962 |
+
"step": 157500
|
1963 |
+
},
|
1964 |
+
{
|
1965 |
+
"epoch": 7.15,
|
1966 |
+
"learning_rate": 3.0291939165695468e-06,
|
1967 |
+
"loss": 0.1244,
|
1968 |
+
"step": 158000
|
1969 |
+
},
|
1970 |
+
{
|
1971 |
+
"epoch": 7.18,
|
1972 |
+
"learning_rate": 3.0051144736715376e-06,
|
1973 |
+
"loss": 0.1211,
|
1974 |
+
"step": 158500
|
1975 |
+
},
|
1976 |
+
{
|
1977 |
+
"epoch": 7.2,
|
1978 |
+
"learning_rate": 2.981035030773528e-06,
|
1979 |
+
"loss": 0.131,
|
1980 |
+
"step": 159000
|
1981 |
+
},
|
1982 |
+
{
|
1983 |
+
"epoch": 7.22,
|
1984 |
+
"learning_rate": 2.9569555878755193e-06,
|
1985 |
+
"loss": 0.1229,
|
1986 |
+
"step": 159500
|
1987 |
+
},
|
1988 |
+
{
|
1989 |
+
"epoch": 7.24,
|
1990 |
+
"learning_rate": 2.93287614497751e-06,
|
1991 |
+
"loss": 0.1208,
|
1992 |
+
"step": 160000
|
1993 |
+
},
|
1994 |
+
{
|
1995 |
+
"epoch": 7.27,
|
1996 |
+
"learning_rate": 2.908796702079501e-06,
|
1997 |
+
"loss": 0.1207,
|
1998 |
+
"step": 160500
|
1999 |
+
},
|
2000 |
+
{
|
2001 |
+
"epoch": 7.29,
|
2002 |
+
"learning_rate": 2.884717259181492e-06,
|
2003 |
+
"loss": 0.1161,
|
2004 |
+
"step": 161000
|
2005 |
+
},
|
2006 |
+
{
|
2007 |
+
"epoch": 7.31,
|
2008 |
+
"learning_rate": 2.8606378162834826e-06,
|
2009 |
+
"loss": 0.1362,
|
2010 |
+
"step": 161500
|
2011 |
+
},
|
2012 |
+
{
|
2013 |
+
"epoch": 7.33,
|
2014 |
+
"learning_rate": 2.8365583733854734e-06,
|
2015 |
+
"loss": 0.1261,
|
2016 |
+
"step": 162000
|
2017 |
+
},
|
2018 |
+
{
|
2019 |
+
"epoch": 7.36,
|
2020 |
+
"learning_rate": 2.8124789304874643e-06,
|
2021 |
+
"loss": 0.1091,
|
2022 |
+
"step": 162500
|
2023 |
+
},
|
2024 |
+
{
|
2025 |
+
"epoch": 7.38,
|
2026 |
+
"learning_rate": 2.7883994875894555e-06,
|
2027 |
+
"loss": 0.126,
|
2028 |
+
"step": 163000
|
2029 |
+
},
|
2030 |
+
{
|
2031 |
+
"epoch": 7.4,
|
2032 |
+
"learning_rate": 2.7643200446914464e-06,
|
2033 |
+
"loss": 0.127,
|
2034 |
+
"step": 163500
|
2035 |
+
},
|
2036 |
+
{
|
2037 |
+
"epoch": 7.42,
|
2038 |
+
"learning_rate": 2.7402406017934368e-06,
|
2039 |
+
"loss": 0.118,
|
2040 |
+
"step": 164000
|
2041 |
+
},
|
2042 |
+
{
|
2043 |
+
"epoch": 7.45,
|
2044 |
+
"learning_rate": 2.716161158895428e-06,
|
2045 |
+
"loss": 0.1368,
|
2046 |
+
"step": 164500
|
2047 |
+
},
|
2048 |
+
{
|
2049 |
+
"epoch": 7.47,
|
2050 |
+
"learning_rate": 2.692081715997419e-06,
|
2051 |
+
"loss": 0.1225,
|
2052 |
+
"step": 165000
|
2053 |
+
},
|
2054 |
+
{
|
2055 |
+
"epoch": 7.49,
|
2056 |
+
"learning_rate": 2.6680022730994097e-06,
|
2057 |
+
"loss": 0.1095,
|
2058 |
+
"step": 165500
|
2059 |
+
},
|
2060 |
+
{
|
2061 |
+
"epoch": 7.51,
|
2062 |
+
"learning_rate": 2.643922830201401e-06,
|
2063 |
+
"loss": 0.126,
|
2064 |
+
"step": 166000
|
2065 |
+
},
|
2066 |
+
{
|
2067 |
+
"epoch": 7.54,
|
2068 |
+
"learning_rate": 2.6198433873033918e-06,
|
2069 |
+
"loss": 0.1212,
|
2070 |
+
"step": 166500
|
2071 |
+
},
|
2072 |
+
{
|
2073 |
+
"epoch": 7.56,
|
2074 |
+
"learning_rate": 2.595763944405382e-06,
|
2075 |
+
"loss": 0.1326,
|
2076 |
+
"step": 167000
|
2077 |
+
},
|
2078 |
+
{
|
2079 |
+
"epoch": 7.58,
|
2080 |
+
"learning_rate": 2.5716845015073735e-06,
|
2081 |
+
"loss": 0.1343,
|
2082 |
+
"step": 167500
|
2083 |
+
},
|
2084 |
+
{
|
2085 |
+
"epoch": 7.61,
|
2086 |
+
"learning_rate": 2.5476050586093643e-06,
|
2087 |
+
"loss": 0.1089,
|
2088 |
+
"step": 168000
|
2089 |
+
},
|
2090 |
+
{
|
2091 |
+
"epoch": 7.63,
|
2092 |
+
"learning_rate": 2.523525615711355e-06,
|
2093 |
+
"loss": 0.1358,
|
2094 |
+
"step": 168500
|
2095 |
+
},
|
2096 |
+
{
|
2097 |
+
"epoch": 7.65,
|
2098 |
+
"learning_rate": 2.499446172813346e-06,
|
2099 |
+
"loss": 0.1231,
|
2100 |
+
"step": 169000
|
2101 |
+
},
|
2102 |
+
{
|
2103 |
+
"epoch": 7.67,
|
2104 |
+
"learning_rate": 2.475366729915337e-06,
|
2105 |
+
"loss": 0.1261,
|
2106 |
+
"step": 169500
|
2107 |
+
},
|
2108 |
+
{
|
2109 |
+
"epoch": 7.7,
|
2110 |
+
"learning_rate": 2.4512872870173276e-06,
|
2111 |
+
"loss": 0.122,
|
2112 |
+
"step": 170000
|
2113 |
+
},
|
2114 |
+
{
|
2115 |
+
"epoch": 7.72,
|
2116 |
+
"learning_rate": 2.4272078441193185e-06,
|
2117 |
+
"loss": 0.1233,
|
2118 |
+
"step": 170500
|
2119 |
+
},
|
2120 |
+
{
|
2121 |
+
"epoch": 7.74,
|
2122 |
+
"learning_rate": 2.4031284012213097e-06,
|
2123 |
+
"loss": 0.1327,
|
2124 |
+
"step": 171000
|
2125 |
+
},
|
2126 |
+
{
|
2127 |
+
"epoch": 7.76,
|
2128 |
+
"learning_rate": 2.3790489583233006e-06,
|
2129 |
+
"loss": 0.1214,
|
2130 |
+
"step": 171500
|
2131 |
+
},
|
2132 |
+
{
|
2133 |
+
"epoch": 7.79,
|
2134 |
+
"learning_rate": 2.3549695154252914e-06,
|
2135 |
+
"loss": 0.1225,
|
2136 |
+
"step": 172000
|
2137 |
+
},
|
2138 |
+
{
|
2139 |
+
"epoch": 7.81,
|
2140 |
+
"learning_rate": 2.3308900725272822e-06,
|
2141 |
+
"loss": 0.1247,
|
2142 |
+
"step": 172500
|
2143 |
+
},
|
2144 |
+
{
|
2145 |
+
"epoch": 7.83,
|
2146 |
+
"learning_rate": 2.306810629629273e-06,
|
2147 |
+
"loss": 0.119,
|
2148 |
+
"step": 173000
|
2149 |
+
},
|
2150 |
+
{
|
2151 |
+
"epoch": 7.85,
|
2152 |
+
"learning_rate": 2.282731186731264e-06,
|
2153 |
+
"loss": 0.1275,
|
2154 |
+
"step": 173500
|
2155 |
+
},
|
2156 |
+
{
|
2157 |
+
"epoch": 7.88,
|
2158 |
+
"learning_rate": 2.2586517438332547e-06,
|
2159 |
+
"loss": 0.1158,
|
2160 |
+
"step": 174000
|
2161 |
+
},
|
2162 |
+
{
|
2163 |
+
"epoch": 7.9,
|
2164 |
+
"learning_rate": 2.2345723009352456e-06,
|
2165 |
+
"loss": 0.1156,
|
2166 |
+
"step": 174500
|
2167 |
+
},
|
2168 |
+
{
|
2169 |
+
"epoch": 7.92,
|
2170 |
+
"learning_rate": 2.210492858037237e-06,
|
2171 |
+
"loss": 0.1313,
|
2172 |
+
"step": 175000
|
2173 |
+
},
|
2174 |
+
{
|
2175 |
+
"epoch": 7.94,
|
2176 |
+
"learning_rate": 2.1864134151392277e-06,
|
2177 |
+
"loss": 0.1301,
|
2178 |
+
"step": 175500
|
2179 |
+
},
|
2180 |
+
{
|
2181 |
+
"epoch": 7.97,
|
2182 |
+
"learning_rate": 2.1623339722412185e-06,
|
2183 |
+
"loss": 0.1198,
|
2184 |
+
"step": 176000
|
2185 |
+
},
|
2186 |
+
{
|
2187 |
+
"epoch": 7.99,
|
2188 |
+
"learning_rate": 2.1382545293432093e-06,
|
2189 |
+
"loss": 0.1237,
|
2190 |
+
"step": 176500
|
2191 |
+
},
|
2192 |
+
{
|
2193 |
+
"epoch": 8.0,
|
2194 |
+
"eval_accuracy": 0.8664918132973441,
|
2195 |
+
"eval_loss": 0.8249724507331848,
|
2196 |
+
"eval_runtime": 48.4679,
|
2197 |
+
"eval_samples_per_second": 810.247,
|
2198 |
+
"eval_steps_per_second": 50.652,
|
2199 |
+
"step": 176720
|
2200 |
+
},
|
2201 |
+
{
|
2202 |
+
"epoch": 8.01,
|
2203 |
+
"learning_rate": 2.1141750864452e-06,
|
2204 |
+
"loss": 0.1008,
|
2205 |
+
"step": 177000
|
2206 |
+
},
|
2207 |
+
{
|
2208 |
+
"epoch": 8.04,
|
2209 |
+
"learning_rate": 2.090095643547191e-06,
|
2210 |
+
"loss": 0.0971,
|
2211 |
+
"step": 177500
|
2212 |
+
},
|
2213 |
+
{
|
2214 |
+
"epoch": 8.06,
|
2215 |
+
"learning_rate": 2.066016200649182e-06,
|
2216 |
+
"loss": 0.1021,
|
2217 |
+
"step": 178000
|
2218 |
+
},
|
2219 |
+
{
|
2220 |
+
"epoch": 8.08,
|
2221 |
+
"learning_rate": 2.0419367577511727e-06,
|
2222 |
+
"loss": 0.1067,
|
2223 |
+
"step": 178500
|
2224 |
+
},
|
2225 |
+
{
|
2226 |
+
"epoch": 8.1,
|
2227 |
+
"learning_rate": 2.017857314853164e-06,
|
2228 |
+
"loss": 0.0989,
|
2229 |
+
"step": 179000
|
2230 |
+
},
|
2231 |
+
{
|
2232 |
+
"epoch": 8.13,
|
2233 |
+
"learning_rate": 1.9937778719551548e-06,
|
2234 |
+
"loss": 0.1049,
|
2235 |
+
"step": 179500
|
2236 |
+
},
|
2237 |
+
{
|
2238 |
+
"epoch": 8.15,
|
2239 |
+
"learning_rate": 1.9696984290571456e-06,
|
2240 |
+
"loss": 0.1154,
|
2241 |
+
"step": 180000
|
2242 |
+
},
|
2243 |
+
{
|
2244 |
+
"epoch": 8.17,
|
2245 |
+
"learning_rate": 1.9456189861591364e-06,
|
2246 |
+
"loss": 0.0919,
|
2247 |
+
"step": 180500
|
2248 |
+
},
|
2249 |
+
{
|
2250 |
+
"epoch": 8.19,
|
2251 |
+
"learning_rate": 1.9215395432611273e-06,
|
2252 |
+
"loss": 0.1185,
|
2253 |
+
"step": 181000
|
2254 |
+
},
|
2255 |
+
{
|
2256 |
+
"epoch": 8.22,
|
2257 |
+
"learning_rate": 1.8974601003631183e-06,
|
2258 |
+
"loss": 0.0943,
|
2259 |
+
"step": 181500
|
2260 |
+
},
|
2261 |
+
{
|
2262 |
+
"epoch": 8.24,
|
2263 |
+
"learning_rate": 1.873380657465109e-06,
|
2264 |
+
"loss": 0.1093,
|
2265 |
+
"step": 182000
|
2266 |
+
},
|
2267 |
+
{
|
2268 |
+
"epoch": 8.26,
|
2269 |
+
"learning_rate": 1.8493012145671e-06,
|
2270 |
+
"loss": 0.0948,
|
2271 |
+
"step": 182500
|
2272 |
+
},
|
2273 |
+
{
|
2274 |
+
"epoch": 8.28,
|
2275 |
+
"learning_rate": 1.825221771669091e-06,
|
2276 |
+
"loss": 0.0984,
|
2277 |
+
"step": 183000
|
2278 |
+
},
|
2279 |
+
{
|
2280 |
+
"epoch": 8.31,
|
2281 |
+
"learning_rate": 1.8011423287710816e-06,
|
2282 |
+
"loss": 0.1063,
|
2283 |
+
"step": 183500
|
2284 |
+
},
|
2285 |
+
{
|
2286 |
+
"epoch": 8.33,
|
2287 |
+
"learning_rate": 1.7770628858730727e-06,
|
2288 |
+
"loss": 0.1084,
|
2289 |
+
"step": 184000
|
2290 |
+
},
|
2291 |
+
{
|
2292 |
+
"epoch": 8.35,
|
2293 |
+
"learning_rate": 1.7529834429750633e-06,
|
2294 |
+
"loss": 0.0988,
|
2295 |
+
"step": 184500
|
2296 |
+
},
|
2297 |
+
{
|
2298 |
+
"epoch": 8.37,
|
2299 |
+
"learning_rate": 1.7289040000770544e-06,
|
2300 |
+
"loss": 0.091,
|
2301 |
+
"step": 185000
|
2302 |
+
},
|
2303 |
+
{
|
2304 |
+
"epoch": 8.4,
|
2305 |
+
"learning_rate": 1.7048245571790454e-06,
|
2306 |
+
"loss": 0.1077,
|
2307 |
+
"step": 185500
|
2308 |
+
},
|
2309 |
+
{
|
2310 |
+
"epoch": 8.42,
|
2311 |
+
"learning_rate": 1.680745114281036e-06,
|
2312 |
+
"loss": 0.0997,
|
2313 |
+
"step": 186000
|
2314 |
+
},
|
2315 |
+
{
|
2316 |
+
"epoch": 8.44,
|
2317 |
+
"learning_rate": 1.656665671383027e-06,
|
2318 |
+
"loss": 0.0898,
|
2319 |
+
"step": 186500
|
2320 |
+
},
|
2321 |
+
{
|
2322 |
+
"epoch": 8.47,
|
2323 |
+
"learning_rate": 1.6325862284850181e-06,
|
2324 |
+
"loss": 0.1011,
|
2325 |
+
"step": 187000
|
2326 |
+
},
|
2327 |
+
{
|
2328 |
+
"epoch": 8.49,
|
2329 |
+
"learning_rate": 1.6085067855870087e-06,
|
2330 |
+
"loss": 0.1043,
|
2331 |
+
"step": 187500
|
2332 |
+
},
|
2333 |
+
{
|
2334 |
+
"epoch": 8.51,
|
2335 |
+
"learning_rate": 1.5844273426889998e-06,
|
2336 |
+
"loss": 0.0891,
|
2337 |
+
"step": 188000
|
2338 |
+
},
|
2339 |
+
{
|
2340 |
+
"epoch": 8.53,
|
2341 |
+
"learning_rate": 1.5603478997909904e-06,
|
2342 |
+
"loss": 0.0992,
|
2343 |
+
"step": 188500
|
2344 |
+
},
|
2345 |
+
{
|
2346 |
+
"epoch": 8.56,
|
2347 |
+
"learning_rate": 1.5362684568929815e-06,
|
2348 |
+
"loss": 0.1055,
|
2349 |
+
"step": 189000
|
2350 |
+
},
|
2351 |
+
{
|
2352 |
+
"epoch": 8.58,
|
2353 |
+
"learning_rate": 1.5121890139949725e-06,
|
2354 |
+
"loss": 0.1032,
|
2355 |
+
"step": 189500
|
2356 |
+
},
|
2357 |
+
{
|
2358 |
+
"epoch": 8.6,
|
2359 |
+
"learning_rate": 1.4881095710969631e-06,
|
2360 |
+
"loss": 0.0945,
|
2361 |
+
"step": 190000
|
2362 |
+
},
|
2363 |
+
{
|
2364 |
+
"epoch": 8.62,
|
2365 |
+
"learning_rate": 1.4640301281989542e-06,
|
2366 |
+
"loss": 0.1159,
|
2367 |
+
"step": 190500
|
2368 |
+
},
|
2369 |
+
{
|
2370 |
+
"epoch": 8.65,
|
2371 |
+
"learning_rate": 1.4399506853009452e-06,
|
2372 |
+
"loss": 0.1016,
|
2373 |
+
"step": 191000
|
2374 |
+
},
|
2375 |
+
{
|
2376 |
+
"epoch": 8.67,
|
2377 |
+
"learning_rate": 1.4158712424029358e-06,
|
2378 |
+
"loss": 0.1024,
|
2379 |
+
"step": 191500
|
2380 |
+
},
|
2381 |
+
{
|
2382 |
+
"epoch": 8.69,
|
2383 |
+
"learning_rate": 1.3917917995049269e-06,
|
2384 |
+
"loss": 0.101,
|
2385 |
+
"step": 192000
|
2386 |
+
},
|
2387 |
+
{
|
2388 |
+
"epoch": 8.71,
|
2389 |
+
"learning_rate": 1.3677123566069175e-06,
|
2390 |
+
"loss": 0.0962,
|
2391 |
+
"step": 192500
|
2392 |
+
},
|
2393 |
+
{
|
2394 |
+
"epoch": 8.74,
|
2395 |
+
"learning_rate": 1.3436329137089086e-06,
|
2396 |
+
"loss": 0.0986,
|
2397 |
+
"step": 193000
|
2398 |
+
},
|
2399 |
+
{
|
2400 |
+
"epoch": 8.76,
|
2401 |
+
"learning_rate": 1.3195534708108996e-06,
|
2402 |
+
"loss": 0.0963,
|
2403 |
+
"step": 193500
|
2404 |
+
},
|
2405 |
+
{
|
2406 |
+
"epoch": 8.78,
|
2407 |
+
"learning_rate": 1.2954740279128902e-06,
|
2408 |
+
"loss": 0.1238,
|
2409 |
+
"step": 194000
|
2410 |
+
},
|
2411 |
+
{
|
2412 |
+
"epoch": 8.8,
|
2413 |
+
"learning_rate": 1.2713945850148813e-06,
|
2414 |
+
"loss": 0.1001,
|
2415 |
+
"step": 194500
|
2416 |
+
},
|
2417 |
+
{
|
2418 |
+
"epoch": 8.83,
|
2419 |
+
"learning_rate": 1.247315142116872e-06,
|
2420 |
+
"loss": 0.0972,
|
2421 |
+
"step": 195000
|
2422 |
+
},
|
2423 |
+
{
|
2424 |
+
"epoch": 8.85,
|
2425 |
+
"learning_rate": 1.223235699218863e-06,
|
2426 |
+
"loss": 0.091,
|
2427 |
+
"step": 195500
|
2428 |
+
},
|
2429 |
+
{
|
2430 |
+
"epoch": 8.87,
|
2431 |
+
"learning_rate": 1.199156256320854e-06,
|
2432 |
+
"loss": 0.1058,
|
2433 |
+
"step": 196000
|
2434 |
+
},
|
2435 |
+
{
|
2436 |
+
"epoch": 8.9,
|
2437 |
+
"learning_rate": 1.1750768134228448e-06,
|
2438 |
+
"loss": 0.0957,
|
2439 |
+
"step": 196500
|
2440 |
+
},
|
2441 |
+
{
|
2442 |
+
"epoch": 8.92,
|
2443 |
+
"learning_rate": 1.1509973705248357e-06,
|
2444 |
+
"loss": 0.0979,
|
2445 |
+
"step": 197000
|
2446 |
+
},
|
2447 |
+
{
|
2448 |
+
"epoch": 8.94,
|
2449 |
+
"learning_rate": 1.1269179276268265e-06,
|
2450 |
+
"loss": 0.1084,
|
2451 |
+
"step": 197500
|
2452 |
+
},
|
2453 |
+
{
|
2454 |
+
"epoch": 8.96,
|
2455 |
+
"learning_rate": 1.1028384847288175e-06,
|
2456 |
+
"loss": 0.0963,
|
2457 |
+
"step": 198000
|
2458 |
+
},
|
2459 |
+
{
|
2460 |
+
"epoch": 8.99,
|
2461 |
+
"learning_rate": 1.0787590418308084e-06,
|
2462 |
+
"loss": 0.0908,
|
2463 |
+
"step": 198500
|
2464 |
+
},
|
2465 |
+
{
|
2466 |
+
"epoch": 9.0,
|
2467 |
+
"eval_accuracy": 0.8667973822922768,
|
2468 |
+
"eval_loss": 0.9133633375167847,
|
2469 |
+
"eval_runtime": 48.4125,
|
2470 |
+
"eval_samples_per_second": 811.174,
|
2471 |
+
"eval_steps_per_second": 50.71,
|
2472 |
+
"step": 198810
|
2473 |
+
},
|
2474 |
+
{
|
2475 |
+
"epoch": 9.01,
|
2476 |
+
"learning_rate": 1.0546795989327992e-06,
|
2477 |
+
"loss": 0.084,
|
2478 |
+
"step": 199000
|
2479 |
+
},
|
2480 |
+
{
|
2481 |
+
"epoch": 9.03,
|
2482 |
+
"learning_rate": 1.03060015603479e-06,
|
2483 |
+
"loss": 0.0891,
|
2484 |
+
"step": 199500
|
2485 |
+
},
|
2486 |
+
{
|
2487 |
+
"epoch": 9.05,
|
2488 |
+
"learning_rate": 1.006520713136781e-06,
|
2489 |
+
"loss": 0.1038,
|
2490 |
+
"step": 200000
|
2491 |
+
},
|
2492 |
+
{
|
2493 |
+
"epoch": 9.08,
|
2494 |
+
"learning_rate": 9.82441270238772e-07,
|
2495 |
+
"loss": 0.0778,
|
2496 |
+
"step": 200500
|
2497 |
+
},
|
2498 |
+
{
|
2499 |
+
"epoch": 9.1,
|
2500 |
+
"learning_rate": 9.583618273407628e-07,
|
2501 |
+
"loss": 0.0917,
|
2502 |
+
"step": 201000
|
2503 |
+
},
|
2504 |
+
{
|
2505 |
+
"epoch": 9.12,
|
2506 |
+
"learning_rate": 9.342823844427536e-07,
|
2507 |
+
"loss": 0.0924,
|
2508 |
+
"step": 201500
|
2509 |
+
},
|
2510 |
+
{
|
2511 |
+
"epoch": 9.14,
|
2512 |
+
"learning_rate": 9.102029415447445e-07,
|
2513 |
+
"loss": 0.077,
|
2514 |
+
"step": 202000
|
2515 |
+
},
|
2516 |
+
{
|
2517 |
+
"epoch": 9.17,
|
2518 |
+
"learning_rate": 8.861234986467354e-07,
|
2519 |
+
"loss": 0.0851,
|
2520 |
+
"step": 202500
|
2521 |
+
},
|
2522 |
+
{
|
2523 |
+
"epoch": 9.19,
|
2524 |
+
"learning_rate": 8.620440557487263e-07,
|
2525 |
+
"loss": 0.0897,
|
2526 |
+
"step": 203000
|
2527 |
+
},
|
2528 |
+
{
|
2529 |
+
"epoch": 9.21,
|
2530 |
+
"learning_rate": 8.379646128507171e-07,
|
2531 |
+
"loss": 0.0844,
|
2532 |
+
"step": 203500
|
2533 |
+
},
|
2534 |
+
{
|
2535 |
+
"epoch": 9.23,
|
2536 |
+
"learning_rate": 8.138851699527081e-07,
|
2537 |
+
"loss": 0.0845,
|
2538 |
+
"step": 204000
|
2539 |
+
},
|
2540 |
+
{
|
2541 |
+
"epoch": 9.26,
|
2542 |
+
"learning_rate": 7.898057270546989e-07,
|
2543 |
+
"loss": 0.0918,
|
2544 |
+
"step": 204500
|
2545 |
+
},
|
2546 |
+
{
|
2547 |
+
"epoch": 9.28,
|
2548 |
+
"learning_rate": 7.657262841566899e-07,
|
2549 |
+
"loss": 0.0792,
|
2550 |
+
"step": 205000
|
2551 |
+
},
|
2552 |
+
{
|
2553 |
+
"epoch": 9.3,
|
2554 |
+
"learning_rate": 7.416468412586807e-07,
|
2555 |
+
"loss": 0.0737,
|
2556 |
+
"step": 205500
|
2557 |
+
},
|
2558 |
+
{
|
2559 |
+
"epoch": 9.33,
|
2560 |
+
"learning_rate": 7.175673983606715e-07,
|
2561 |
+
"loss": 0.0806,
|
2562 |
+
"step": 206000
|
2563 |
+
},
|
2564 |
+
{
|
2565 |
+
"epoch": 9.35,
|
2566 |
+
"learning_rate": 6.934879554626625e-07,
|
2567 |
+
"loss": 0.0843,
|
2568 |
+
"step": 206500
|
2569 |
+
},
|
2570 |
+
{
|
2571 |
+
"epoch": 9.37,
|
2572 |
+
"learning_rate": 6.694085125646534e-07,
|
2573 |
+
"loss": 0.0823,
|
2574 |
+
"step": 207000
|
2575 |
+
},
|
2576 |
+
{
|
2577 |
+
"epoch": 9.39,
|
2578 |
+
"learning_rate": 6.453290696666442e-07,
|
2579 |
+
"loss": 0.0851,
|
2580 |
+
"step": 207500
|
2581 |
+
},
|
2582 |
+
{
|
2583 |
+
"epoch": 9.42,
|
2584 |
+
"learning_rate": 6.212496267686352e-07,
|
2585 |
+
"loss": 0.073,
|
2586 |
+
"step": 208000
|
2587 |
+
},
|
2588 |
+
{
|
2589 |
+
"epoch": 9.44,
|
2590 |
+
"learning_rate": 5.97170183870626e-07,
|
2591 |
+
"loss": 0.0872,
|
2592 |
+
"step": 208500
|
2593 |
+
},
|
2594 |
+
{
|
2595 |
+
"epoch": 9.46,
|
2596 |
+
"learning_rate": 5.73090740972617e-07,
|
2597 |
+
"loss": 0.0905,
|
2598 |
+
"step": 209000
|
2599 |
+
},
|
2600 |
+
{
|
2601 |
+
"epoch": 9.48,
|
2602 |
+
"learning_rate": 5.490112980746078e-07,
|
2603 |
+
"loss": 0.0846,
|
2604 |
+
"step": 209500
|
2605 |
+
},
|
2606 |
+
{
|
2607 |
+
"epoch": 9.51,
|
2608 |
+
"learning_rate": 5.249318551765987e-07,
|
2609 |
+
"loss": 0.0733,
|
2610 |
+
"step": 210000
|
2611 |
+
},
|
2612 |
+
{
|
2613 |
+
"epoch": 9.53,
|
2614 |
+
"learning_rate": 5.008524122785896e-07,
|
2615 |
+
"loss": 0.0926,
|
2616 |
+
"step": 210500
|
2617 |
+
},
|
2618 |
+
{
|
2619 |
+
"epoch": 9.55,
|
2620 |
+
"learning_rate": 4.7677296938058045e-07,
|
2621 |
+
"loss": 0.0767,
|
2622 |
+
"step": 211000
|
2623 |
+
},
|
2624 |
+
{
|
2625 |
+
"epoch": 9.57,
|
2626 |
+
"learning_rate": 4.526935264825713e-07,
|
2627 |
+
"loss": 0.0852,
|
2628 |
+
"step": 211500
|
2629 |
+
},
|
2630 |
+
{
|
2631 |
+
"epoch": 9.6,
|
2632 |
+
"learning_rate": 4.286140835845622e-07,
|
2633 |
+
"loss": 0.0815,
|
2634 |
+
"step": 212000
|
2635 |
+
},
|
2636 |
+
{
|
2637 |
+
"epoch": 9.62,
|
2638 |
+
"learning_rate": 4.0453464068655306e-07,
|
2639 |
+
"loss": 0.0965,
|
2640 |
+
"step": 212500
|
2641 |
+
},
|
2642 |
+
{
|
2643 |
+
"epoch": 9.64,
|
2644 |
+
"learning_rate": 3.80455197788544e-07,
|
2645 |
+
"loss": 0.0858,
|
2646 |
+
"step": 213000
|
2647 |
+
},
|
2648 |
+
{
|
2649 |
+
"epoch": 9.67,
|
2650 |
+
"learning_rate": 3.5637575489053483e-07,
|
2651 |
+
"loss": 0.0845,
|
2652 |
+
"step": 213500
|
2653 |
+
},
|
2654 |
+
{
|
2655 |
+
"epoch": 9.69,
|
2656 |
+
"learning_rate": 3.322963119925258e-07,
|
2657 |
+
"loss": 0.0901,
|
2658 |
+
"step": 214000
|
2659 |
+
},
|
2660 |
+
{
|
2661 |
+
"epoch": 9.71,
|
2662 |
+
"learning_rate": 3.0821686909451666e-07,
|
2663 |
+
"loss": 0.0871,
|
2664 |
+
"step": 214500
|
2665 |
+
},
|
2666 |
+
{
|
2667 |
+
"epoch": 9.73,
|
2668 |
+
"learning_rate": 2.8413742619650755e-07,
|
2669 |
+
"loss": 0.0868,
|
2670 |
+
"step": 215000
|
2671 |
+
},
|
2672 |
+
{
|
2673 |
+
"epoch": 9.76,
|
2674 |
+
"learning_rate": 2.6005798329849844e-07,
|
2675 |
+
"loss": 0.0817,
|
2676 |
+
"step": 215500
|
2677 |
+
},
|
2678 |
+
{
|
2679 |
+
"epoch": 9.78,
|
2680 |
+
"learning_rate": 2.3597854040048932e-07,
|
2681 |
+
"loss": 0.0793,
|
2682 |
+
"step": 216000
|
2683 |
+
},
|
2684 |
+
{
|
2685 |
+
"epoch": 9.8,
|
2686 |
+
"learning_rate": 2.118990975024802e-07,
|
2687 |
+
"loss": 0.0981,
|
2688 |
+
"step": 216500
|
2689 |
+
},
|
2690 |
+
{
|
2691 |
+
"epoch": 9.82,
|
2692 |
+
"learning_rate": 1.878196546044711e-07,
|
2693 |
+
"loss": 0.0746,
|
2694 |
+
"step": 217000
|
2695 |
+
},
|
2696 |
+
{
|
2697 |
+
"epoch": 9.85,
|
2698 |
+
"learning_rate": 1.6374021170646199e-07,
|
2699 |
+
"loss": 0.0812,
|
2700 |
+
"step": 217500
|
2701 |
+
},
|
2702 |
+
{
|
2703 |
+
"epoch": 9.87,
|
2704 |
+
"learning_rate": 1.3966076880845285e-07,
|
2705 |
+
"loss": 0.0848,
|
2706 |
+
"step": 218000
|
2707 |
+
},
|
2708 |
+
{
|
2709 |
+
"epoch": 9.89,
|
2710 |
+
"learning_rate": 1.1558132591044375e-07,
|
2711 |
+
"loss": 0.0777,
|
2712 |
+
"step": 218500
|
2713 |
+
},
|
2714 |
+
{
|
2715 |
+
"epoch": 9.91,
|
2716 |
+
"learning_rate": 9.150188301243464e-08,
|
2717 |
+
"loss": 0.0876,
|
2718 |
+
"step": 219000
|
2719 |
+
},
|
2720 |
+
{
|
2721 |
+
"epoch": 9.94,
|
2722 |
+
"learning_rate": 6.742244011442552e-08,
|
2723 |
+
"loss": 0.0948,
|
2724 |
+
"step": 219500
|
2725 |
+
},
|
2726 |
+
{
|
2727 |
+
"epoch": 9.96,
|
2728 |
+
"learning_rate": 4.3342997216416404e-08,
|
2729 |
+
"loss": 0.0859,
|
2730 |
+
"step": 220000
|
2731 |
+
},
|
2732 |
+
{
|
2733 |
+
"epoch": 9.98,
|
2734 |
+
"learning_rate": 1.926355431840729e-08,
|
2735 |
+
"loss": 0.0832,
|
2736 |
+
"step": 220500
|
2737 |
+
},
|
2738 |
+
{
|
2739 |
+
"epoch": 10.0,
|
2740 |
+
"eval_accuracy": 0.867408520282142,
|
2741 |
+
"eval_loss": 0.9269108176231384,
|
2742 |
+
"eval_runtime": 48.3924,
|
2743 |
+
"eval_samples_per_second": 811.511,
|
2744 |
+
"eval_steps_per_second": 50.731,
|
2745 |
+
"step": 220900
|
2746 |
+
},
|
2747 |
+
{
|
2748 |
+
"epoch": 10.0,
|
2749 |
+
"step": 220900,
|
2750 |
+
"total_flos": 1.4385611002158144e+17,
|
2751 |
+
"train_loss": 0.2250993330244459,
|
2752 |
+
"train_runtime": 14146.4436,
|
2753 |
+
"train_samples_per_second": 249.837,
|
2754 |
+
"train_steps_per_second": 15.615
|
2755 |
+
}
|
2756 |
+
],
|
2757 |
+
"logging_steps": 500,
|
2758 |
+
"max_steps": 220900,
|
2759 |
+
"num_train_epochs": 10,
|
2760 |
+
"save_steps": 500,
|
2761 |
+
"total_flos": 1.4385611002158144e+17,
|
2762 |
+
"trial_name": null,
|
2763 |
+
"trial_params": null
|
2764 |
+
}
|
mnli/roberta-base_lr1e-05/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:239b779aeb51d9b42758e26ba4d8bd72821222f0005a38f5c8122c9300ce9470
|
3 |
+
size 4091
|
mnli/roberta-base_lr1e-05/vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
mrpc/roberta-base_lr5e-05/config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/mnt/data/yule/.cache/roberta-base",
|
3 |
+
"architectures": [
|
4 |
+
"RobertaForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"classifier_dropout": null,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "gelu",
|
11 |
+
"hidden_dropout_prob": 0.1,
|
12 |
+
"hidden_size": 768,
|
13 |
+
"initializer_range": 0.02,
|
14 |
+
"intermediate_size": 3072,
|
15 |
+
"layer_norm_eps": 1e-05,
|
16 |
+
"max_position_embeddings": 514,
|
17 |
+
"model_type": "roberta",
|
18 |
+
"num_attention_heads": 12,
|
19 |
+
"num_hidden_layers": 12,
|
20 |
+
"pad_token_id": 1,
|
21 |
+
"position_embedding_type": "absolute",
|
22 |
+
"torch_dtype": "float32",
|
23 |
+
"transformers_version": "4.33.1",
|
24 |
+
"type_vocab_size": 1,
|
25 |
+
"use_cache": true,
|
26 |
+
"vocab_size": 50265
|
27 |
+
}
|
mrpc/roberta-base_lr5e-05/merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
mrpc/roberta-base_lr5e-05/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:616cc256e78117e1d724807d452a1d577faa02f4716d1b2056d0b23d4781742d
|
3 |
+
size 498654833
|
mrpc/roberta-base_lr5e-05/special_tokens_map.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"cls_token": "<s>",
|
4 |
+
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"pad_token": "<pad>",
|
13 |
+
"sep_token": "</s>",
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
mrpc/roberta-base_lr5e-05/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
mrpc/roberta-base_lr5e-05/tokenizer_config.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"bos_token": "<s>",
|
4 |
+
"clean_up_tokenization_spaces": true,
|
5 |
+
"cls_token": "<s>",
|
6 |
+
"eos_token": "</s>",
|
7 |
+
"errors": "replace",
|
8 |
+
"mask_token": "<mask>",
|
9 |
+
"model_max_length": 1000000000000000019884624838656,
|
10 |
+
"pad_token": "<pad>",
|
11 |
+
"sep_token": "</s>",
|
12 |
+
"tokenizer_class": "RobertaTokenizer",
|
13 |
+
"trim_offsets": true,
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
mrpc/roberta-base_lr5e-05/trainer_state.json
ADDED
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": 0.8596623807901907,
|
3 |
+
"best_model_checkpoint": "./save_models/mrpc/roberta-base_lr5e-05_run0/checkpoint-1449",
|
4 |
+
"epoch": 10.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 2070,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 1.0,
|
13 |
+
"eval_accuracy": 0.8010899182561307,
|
14 |
+
"eval_averaged_scores": 0.8262068735883505,
|
15 |
+
"eval_f1": 0.8513238289205703,
|
16 |
+
"eval_loss": 0.4088594615459442,
|
17 |
+
"eval_runtime": 0.8357,
|
18 |
+
"eval_samples_per_second": 439.143,
|
19 |
+
"eval_steps_per_second": 27.521,
|
20 |
+
"step": 207
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"epoch": 2.0,
|
24 |
+
"eval_accuracy": 0.7602179836512262,
|
25 |
+
"eval_averaged_scores": 0.781009892726514,
|
26 |
+
"eval_f1": 0.8018018018018018,
|
27 |
+
"eval_loss": 0.4726070165634155,
|
28 |
+
"eval_runtime": 0.4955,
|
29 |
+
"eval_samples_per_second": 740.675,
|
30 |
+
"eval_steps_per_second": 46.418,
|
31 |
+
"step": 414
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"epoch": 2.42,
|
35 |
+
"learning_rate": 4.0359897172236504e-05,
|
36 |
+
"loss": 0.4481,
|
37 |
+
"step": 500
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 3.0,
|
41 |
+
"eval_accuracy": 0.8283378746594006,
|
42 |
+
"eval_averaged_scores": 0.8527654285577706,
|
43 |
+
"eval_f1": 0.8771929824561405,
|
44 |
+
"eval_loss": 0.46944934129714966,
|
45 |
+
"eval_runtime": 0.4861,
|
46 |
+
"eval_samples_per_second": 754.978,
|
47 |
+
"eval_steps_per_second": 47.315,
|
48 |
+
"step": 621
|
49 |
+
},
|
50 |
+
{
|
51 |
+
"epoch": 4.0,
|
52 |
+
"eval_accuracy": 0.8365122615803815,
|
53 |
+
"eval_averaged_scores": 0.8587323212663813,
|
54 |
+
"eval_f1": 0.880952380952381,
|
55 |
+
"eval_loss": 0.716332197189331,
|
56 |
+
"eval_runtime": 0.4857,
|
57 |
+
"eval_samples_per_second": 755.614,
|
58 |
+
"eval_steps_per_second": 47.355,
|
59 |
+
"step": 828
|
60 |
+
},
|
61 |
+
{
|
62 |
+
"epoch": 4.83,
|
63 |
+
"learning_rate": 2.750642673521851e-05,
|
64 |
+
"loss": 0.2017,
|
65 |
+
"step": 1000
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 5.0,
|
69 |
+
"eval_accuracy": 0.8283378746594006,
|
70 |
+
"eval_averaged_scores": 0.8510426848246903,
|
71 |
+
"eval_f1": 0.87374749498998,
|
72 |
+
"eval_loss": 0.9263336658477783,
|
73 |
+
"eval_runtime": 0.4853,
|
74 |
+
"eval_samples_per_second": 756.307,
|
75 |
+
"eval_steps_per_second": 47.398,
|
76 |
+
"step": 1035
|
77 |
+
},
|
78 |
+
{
|
79 |
+
"epoch": 6.0,
|
80 |
+
"eval_accuracy": 0.8283378746594006,
|
81 |
+
"eval_averaged_scores": 0.8510426848246903,
|
82 |
+
"eval_f1": 0.87374749498998,
|
83 |
+
"eval_loss": 1.0142543315887451,
|
84 |
+
"eval_runtime": 0.4866,
|
85 |
+
"eval_samples_per_second": 754.169,
|
86 |
+
"eval_steps_per_second": 47.264,
|
87 |
+
"step": 1242
|
88 |
+
},
|
89 |
+
{
|
90 |
+
"epoch": 7.0,
|
91 |
+
"eval_accuracy": 0.8365122615803815,
|
92 |
+
"eval_averaged_scores": 0.8596623807901907,
|
93 |
+
"eval_f1": 0.8828125,
|
94 |
+
"eval_loss": 1.0171551704406738,
|
95 |
+
"eval_runtime": 0.4871,
|
96 |
+
"eval_samples_per_second": 753.373,
|
97 |
+
"eval_steps_per_second": 47.214,
|
98 |
+
"step": 1449
|
99 |
+
},
|
100 |
+
{
|
101 |
+
"epoch": 7.25,
|
102 |
+
"learning_rate": 1.4652956298200515e-05,
|
103 |
+
"loss": 0.0593,
|
104 |
+
"step": 1500
|
105 |
+
},
|
106 |
+
{
|
107 |
+
"epoch": 8.0,
|
108 |
+
"eval_accuracy": 0.8337874659400545,
|
109 |
+
"eval_averaged_scores": 0.8557714884810492,
|
110 |
+
"eval_f1": 0.8777555110220441,
|
111 |
+
"eval_loss": 1.0489568710327148,
|
112 |
+
"eval_runtime": 0.4847,
|
113 |
+
"eval_samples_per_second": 757.236,
|
114 |
+
"eval_steps_per_second": 47.456,
|
115 |
+
"step": 1656
|
116 |
+
},
|
117 |
+
{
|
118 |
+
"epoch": 9.0,
|
119 |
+
"eval_accuracy": 0.8337874659400545,
|
120 |
+
"eval_averaged_scores": 0.8569723184317168,
|
121 |
+
"eval_f1": 0.8801571709233792,
|
122 |
+
"eval_loss": 1.2099965810775757,
|
123 |
+
"eval_runtime": 0.4891,
|
124 |
+
"eval_samples_per_second": 750.402,
|
125 |
+
"eval_steps_per_second": 47.028,
|
126 |
+
"step": 1863
|
127 |
+
},
|
128 |
+
{
|
129 |
+
"epoch": 9.66,
|
130 |
+
"learning_rate": 1.7994858611825194e-06,
|
131 |
+
"loss": 0.0183,
|
132 |
+
"step": 2000
|
133 |
+
},
|
134 |
+
{
|
135 |
+
"epoch": 10.0,
|
136 |
+
"eval_accuracy": 0.8310626702997275,
|
137 |
+
"eval_averaged_scores": 0.8542665130154764,
|
138 |
+
"eval_f1": 0.8774703557312253,
|
139 |
+
"eval_loss": 1.2079670429229736,
|
140 |
+
"eval_runtime": 0.4843,
|
141 |
+
"eval_samples_per_second": 757.767,
|
142 |
+
"eval_steps_per_second": 47.489,
|
143 |
+
"step": 2070
|
144 |
+
},
|
145 |
+
{
|
146 |
+
"epoch": 10.0,
|
147 |
+
"step": 2070,
|
148 |
+
"total_flos": 1295111753354040.0,
|
149 |
+
"train_loss": 0.17568344080523737,
|
150 |
+
"train_runtime": 279.1119,
|
151 |
+
"train_samples_per_second": 118.268,
|
152 |
+
"train_steps_per_second": 7.416
|
153 |
+
}
|
154 |
+
],
|
155 |
+
"logging_steps": 500,
|
156 |
+
"max_steps": 2070,
|
157 |
+
"num_train_epochs": 10,
|
158 |
+
"save_steps": 500,
|
159 |
+
"total_flos": 1295111753354040.0,
|
160 |
+
"trial_name": null,
|
161 |
+
"trial_params": null
|
162 |
+
}
|
mrpc/roberta-base_lr5e-05/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2ebc37108d69bdfafb90cbb1f1440787ba17d426b7bf64d0a5ed964b3056dbae
|
3 |
+
size 4091
|
mrpc/roberta-base_lr5e-05/vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
qnli/roberta-base_lr1e-05/config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/mnt/data/yule/.cache/roberta-base",
|
3 |
+
"architectures": [
|
4 |
+
"RobertaForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"classifier_dropout": null,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "gelu",
|
11 |
+
"hidden_dropout_prob": 0.1,
|
12 |
+
"hidden_size": 768,
|
13 |
+
"initializer_range": 0.02,
|
14 |
+
"intermediate_size": 3072,
|
15 |
+
"layer_norm_eps": 1e-05,
|
16 |
+
"max_position_embeddings": 514,
|
17 |
+
"model_type": "roberta",
|
18 |
+
"num_attention_heads": 12,
|
19 |
+
"num_hidden_layers": 12,
|
20 |
+
"pad_token_id": 1,
|
21 |
+
"position_embedding_type": "absolute",
|
22 |
+
"torch_dtype": "float32",
|
23 |
+
"transformers_version": "4.33.1",
|
24 |
+
"type_vocab_size": 1,
|
25 |
+
"use_cache": true,
|
26 |
+
"vocab_size": 50265
|
27 |
+
}
|
qnli/roberta-base_lr1e-05/merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
qnli/roberta-base_lr1e-05/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7fc7499aa0875452ec8650dfde086ee9810b898fe594d3ed664bb445b4a83591
|
3 |
+
size 498654833
|
qnli/roberta-base_lr1e-05/special_tokens_map.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"cls_token": "<s>",
|
4 |
+
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"pad_token": "<pad>",
|
13 |
+
"sep_token": "</s>",
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
qnli/roberta-base_lr1e-05/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
qnli/roberta-base_lr1e-05/tokenizer_config.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"bos_token": "<s>",
|
4 |
+
"clean_up_tokenization_spaces": true,
|
5 |
+
"cls_token": "<s>",
|
6 |
+
"eos_token": "</s>",
|
7 |
+
"errors": "replace",
|
8 |
+
"mask_token": "<mask>",
|
9 |
+
"model_max_length": 1000000000000000019884624838656,
|
10 |
+
"pad_token": "<pad>",
|
11 |
+
"sep_token": "</s>",
|
12 |
+
"tokenizer_class": "RobertaTokenizer",
|
13 |
+
"trim_offsets": true,
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
qnli/roberta-base_lr1e-05/trainer_state.json
ADDED
@@ -0,0 +1,820 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": 0.9143675417661098,
|
3 |
+
"best_model_checkpoint": "./save_models/qnli/roberta-base_lr1e-05_run0/checkpoint-17676",
|
4 |
+
"epoch": 10.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 58920,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.08,
|
13 |
+
"learning_rate": 1.4140271493212672e-06,
|
14 |
+
"loss": 0.6933,
|
15 |
+
"step": 500
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"epoch": 0.17,
|
19 |
+
"learning_rate": 2.8280542986425343e-06,
|
20 |
+
"loss": 0.542,
|
21 |
+
"step": 1000
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"epoch": 0.25,
|
25 |
+
"learning_rate": 4.242081447963801e-06,
|
26 |
+
"loss": 0.4328,
|
27 |
+
"step": 1500
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"epoch": 0.34,
|
31 |
+
"learning_rate": 5.656108597285069e-06,
|
32 |
+
"loss": 0.3932,
|
33 |
+
"step": 2000
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"epoch": 0.42,
|
37 |
+
"learning_rate": 7.070135746606336e-06,
|
38 |
+
"loss": 0.3613,
|
39 |
+
"step": 2500
|
40 |
+
},
|
41 |
+
{
|
42 |
+
"epoch": 0.51,
|
43 |
+
"learning_rate": 8.484162895927603e-06,
|
44 |
+
"loss": 0.3553,
|
45 |
+
"step": 3000
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"epoch": 0.59,
|
49 |
+
"learning_rate": 9.89819004524887e-06,
|
50 |
+
"loss": 0.3362,
|
51 |
+
"step": 3500
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.68,
|
55 |
+
"learning_rate": 9.916221291347683e-06,
|
56 |
+
"loss": 0.3362,
|
57 |
+
"step": 4000
|
58 |
+
},
|
59 |
+
{
|
60 |
+
"epoch": 0.76,
|
61 |
+
"learning_rate": 9.825942510472339e-06,
|
62 |
+
"loss": 0.3298,
|
63 |
+
"step": 4500
|
64 |
+
},
|
65 |
+
{
|
66 |
+
"epoch": 0.85,
|
67 |
+
"learning_rate": 9.735663729596995e-06,
|
68 |
+
"loss": 0.3075,
|
69 |
+
"step": 5000
|
70 |
+
},
|
71 |
+
{
|
72 |
+
"epoch": 0.93,
|
73 |
+
"learning_rate": 9.645384948721653e-06,
|
74 |
+
"loss": 0.3071,
|
75 |
+
"step": 5500
|
76 |
+
},
|
77 |
+
{
|
78 |
+
"epoch": 1.0,
|
79 |
+
"eval_accuracy": 0.8982338902147972,
|
80 |
+
"eval_loss": 0.273178368806839,
|
81 |
+
"eval_runtime": 14.5434,
|
82 |
+
"eval_samples_per_second": 720.258,
|
83 |
+
"eval_steps_per_second": 45.038,
|
84 |
+
"step": 5892
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"epoch": 1.02,
|
88 |
+
"learning_rate": 9.55510616784631e-06,
|
89 |
+
"loss": 0.2836,
|
90 |
+
"step": 6000
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"epoch": 1.1,
|
94 |
+
"learning_rate": 9.464827386970968e-06,
|
95 |
+
"loss": 0.2656,
|
96 |
+
"step": 6500
|
97 |
+
},
|
98 |
+
{
|
99 |
+
"epoch": 1.19,
|
100 |
+
"learning_rate": 9.374548606095624e-06,
|
101 |
+
"loss": 0.2548,
|
102 |
+
"step": 7000
|
103 |
+
},
|
104 |
+
{
|
105 |
+
"epoch": 1.27,
|
106 |
+
"learning_rate": 9.28426982522028e-06,
|
107 |
+
"loss": 0.2535,
|
108 |
+
"step": 7500
|
109 |
+
},
|
110 |
+
{
|
111 |
+
"epoch": 1.36,
|
112 |
+
"learning_rate": 9.193991044344937e-06,
|
113 |
+
"loss": 0.258,
|
114 |
+
"step": 8000
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 1.44,
|
118 |
+
"learning_rate": 9.103712263469595e-06,
|
119 |
+
"loss": 0.2539,
|
120 |
+
"step": 8500
|
121 |
+
},
|
122 |
+
{
|
123 |
+
"epoch": 1.53,
|
124 |
+
"learning_rate": 9.013433482594251e-06,
|
125 |
+
"loss": 0.2465,
|
126 |
+
"step": 9000
|
127 |
+
},
|
128 |
+
{
|
129 |
+
"epoch": 1.61,
|
130 |
+
"learning_rate": 8.92315470171891e-06,
|
131 |
+
"loss": 0.2586,
|
132 |
+
"step": 9500
|
133 |
+
},
|
134 |
+
{
|
135 |
+
"epoch": 1.7,
|
136 |
+
"learning_rate": 8.832875920843566e-06,
|
137 |
+
"loss": 0.2407,
|
138 |
+
"step": 10000
|
139 |
+
},
|
140 |
+
{
|
141 |
+
"epoch": 1.78,
|
142 |
+
"learning_rate": 8.742597139968224e-06,
|
143 |
+
"loss": 0.2453,
|
144 |
+
"step": 10500
|
145 |
+
},
|
146 |
+
{
|
147 |
+
"epoch": 1.87,
|
148 |
+
"learning_rate": 8.65231835909288e-06,
|
149 |
+
"loss": 0.2506,
|
150 |
+
"step": 11000
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"epoch": 1.95,
|
154 |
+
"learning_rate": 8.562039578217536e-06,
|
155 |
+
"loss": 0.2426,
|
156 |
+
"step": 11500
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 2.0,
|
160 |
+
"eval_accuracy": 0.911217183770883,
|
161 |
+
"eval_loss": 0.24326153099536896,
|
162 |
+
"eval_runtime": 14.4104,
|
163 |
+
"eval_samples_per_second": 726.905,
|
164 |
+
"eval_steps_per_second": 45.453,
|
165 |
+
"step": 11784
|
166 |
+
},
|
167 |
+
{
|
168 |
+
"epoch": 2.04,
|
169 |
+
"learning_rate": 8.471760797342193e-06,
|
170 |
+
"loss": 0.2253,
|
171 |
+
"step": 12000
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"epoch": 2.12,
|
175 |
+
"learning_rate": 8.381482016466849e-06,
|
176 |
+
"loss": 0.2004,
|
177 |
+
"step": 12500
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 2.21,
|
181 |
+
"learning_rate": 8.291203235591507e-06,
|
182 |
+
"loss": 0.2032,
|
183 |
+
"step": 13000
|
184 |
+
},
|
185 |
+
{
|
186 |
+
"epoch": 2.29,
|
187 |
+
"learning_rate": 8.200924454716163e-06,
|
188 |
+
"loss": 0.2098,
|
189 |
+
"step": 13500
|
190 |
+
},
|
191 |
+
{
|
192 |
+
"epoch": 2.38,
|
193 |
+
"learning_rate": 8.110645673840822e-06,
|
194 |
+
"loss": 0.1996,
|
195 |
+
"step": 14000
|
196 |
+
},
|
197 |
+
{
|
198 |
+
"epoch": 2.46,
|
199 |
+
"learning_rate": 8.020366892965478e-06,
|
200 |
+
"loss": 0.2065,
|
201 |
+
"step": 14500
|
202 |
+
},
|
203 |
+
{
|
204 |
+
"epoch": 2.55,
|
205 |
+
"learning_rate": 7.930088112090136e-06,
|
206 |
+
"loss": 0.2011,
|
207 |
+
"step": 15000
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"epoch": 2.63,
|
211 |
+
"learning_rate": 7.839809331214792e-06,
|
212 |
+
"loss": 0.2135,
|
213 |
+
"step": 15500
|
214 |
+
},
|
215 |
+
{
|
216 |
+
"epoch": 2.72,
|
217 |
+
"learning_rate": 7.749530550339449e-06,
|
218 |
+
"loss": 0.2116,
|
219 |
+
"step": 16000
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 2.8,
|
223 |
+
"learning_rate": 7.659251769464105e-06,
|
224 |
+
"loss": 0.221,
|
225 |
+
"step": 16500
|
226 |
+
},
|
227 |
+
{
|
228 |
+
"epoch": 2.89,
|
229 |
+
"learning_rate": 7.568972988588763e-06,
|
230 |
+
"loss": 0.2179,
|
231 |
+
"step": 17000
|
232 |
+
},
|
233 |
+
{
|
234 |
+
"epoch": 2.97,
|
235 |
+
"learning_rate": 7.478694207713419e-06,
|
236 |
+
"loss": 0.2141,
|
237 |
+
"step": 17500
|
238 |
+
},
|
239 |
+
{
|
240 |
+
"epoch": 3.0,
|
241 |
+
"eval_accuracy": 0.9143675417661098,
|
242 |
+
"eval_loss": 0.27570974826812744,
|
243 |
+
"eval_runtime": 14.419,
|
244 |
+
"eval_samples_per_second": 726.472,
|
245 |
+
"eval_steps_per_second": 45.426,
|
246 |
+
"step": 17676
|
247 |
+
},
|
248 |
+
{
|
249 |
+
"epoch": 3.05,
|
250 |
+
"learning_rate": 7.388415426838077e-06,
|
251 |
+
"loss": 0.1871,
|
252 |
+
"step": 18000
|
253 |
+
},
|
254 |
+
{
|
255 |
+
"epoch": 3.14,
|
256 |
+
"learning_rate": 7.298136645962734e-06,
|
257 |
+
"loss": 0.183,
|
258 |
+
"step": 18500
|
259 |
+
},
|
260 |
+
{
|
261 |
+
"epoch": 3.22,
|
262 |
+
"learning_rate": 7.207857865087391e-06,
|
263 |
+
"loss": 0.1757,
|
264 |
+
"step": 19000
|
265 |
+
},
|
266 |
+
{
|
267 |
+
"epoch": 3.31,
|
268 |
+
"learning_rate": 7.117579084212047e-06,
|
269 |
+
"loss": 0.1783,
|
270 |
+
"step": 19500
|
271 |
+
},
|
272 |
+
{
|
273 |
+
"epoch": 3.39,
|
274 |
+
"learning_rate": 7.027300303336704e-06,
|
275 |
+
"loss": 0.1787,
|
276 |
+
"step": 20000
|
277 |
+
},
|
278 |
+
{
|
279 |
+
"epoch": 3.48,
|
280 |
+
"learning_rate": 6.937021522461362e-06,
|
281 |
+
"loss": 0.1834,
|
282 |
+
"step": 20500
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 3.56,
|
286 |
+
"learning_rate": 6.846742741586018e-06,
|
287 |
+
"loss": 0.1722,
|
288 |
+
"step": 21000
|
289 |
+
},
|
290 |
+
{
|
291 |
+
"epoch": 3.65,
|
292 |
+
"learning_rate": 6.756463960710675e-06,
|
293 |
+
"loss": 0.1894,
|
294 |
+
"step": 21500
|
295 |
+
},
|
296 |
+
{
|
297 |
+
"epoch": 3.73,
|
298 |
+
"learning_rate": 6.6661851798353315e-06,
|
299 |
+
"loss": 0.1708,
|
300 |
+
"step": 22000
|
301 |
+
},
|
302 |
+
{
|
303 |
+
"epoch": 3.82,
|
304 |
+
"learning_rate": 6.57590639895999e-06,
|
305 |
+
"loss": 0.1962,
|
306 |
+
"step": 22500
|
307 |
+
},
|
308 |
+
{
|
309 |
+
"epoch": 3.9,
|
310 |
+
"learning_rate": 6.485627618084646e-06,
|
311 |
+
"loss": 0.1831,
|
312 |
+
"step": 23000
|
313 |
+
},
|
314 |
+
{
|
315 |
+
"epoch": 3.99,
|
316 |
+
"learning_rate": 6.395348837209303e-06,
|
317 |
+
"loss": 0.1893,
|
318 |
+
"step": 23500
|
319 |
+
},
|
320 |
+
{
|
321 |
+
"epoch": 4.0,
|
322 |
+
"eval_accuracy": 0.9129355608591886,
|
323 |
+
"eval_loss": 0.39158299565315247,
|
324 |
+
"eval_runtime": 14.3752,
|
325 |
+
"eval_samples_per_second": 728.688,
|
326 |
+
"eval_steps_per_second": 45.565,
|
327 |
+
"step": 23568
|
328 |
+
},
|
329 |
+
{
|
330 |
+
"epoch": 4.07,
|
331 |
+
"learning_rate": 6.3050700563339595e-06,
|
332 |
+
"loss": 0.1408,
|
333 |
+
"step": 24000
|
334 |
+
},
|
335 |
+
{
|
336 |
+
"epoch": 4.16,
|
337 |
+
"learning_rate": 6.214791275458617e-06,
|
338 |
+
"loss": 0.1505,
|
339 |
+
"step": 24500
|
340 |
+
},
|
341 |
+
{
|
342 |
+
"epoch": 4.24,
|
343 |
+
"learning_rate": 6.124512494583274e-06,
|
344 |
+
"loss": 0.1364,
|
345 |
+
"step": 25000
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 4.33,
|
349 |
+
"learning_rate": 6.034233713707931e-06,
|
350 |
+
"loss": 0.1519,
|
351 |
+
"step": 25500
|
352 |
+
},
|
353 |
+
{
|
354 |
+
"epoch": 4.41,
|
355 |
+
"learning_rate": 5.943954932832587e-06,
|
356 |
+
"loss": 0.1367,
|
357 |
+
"step": 26000
|
358 |
+
},
|
359 |
+
{
|
360 |
+
"epoch": 4.5,
|
361 |
+
"learning_rate": 5.853676151957245e-06,
|
362 |
+
"loss": 0.154,
|
363 |
+
"step": 26500
|
364 |
+
},
|
365 |
+
{
|
366 |
+
"epoch": 4.58,
|
367 |
+
"learning_rate": 5.763397371081901e-06,
|
368 |
+
"loss": 0.1437,
|
369 |
+
"step": 27000
|
370 |
+
},
|
371 |
+
{
|
372 |
+
"epoch": 4.67,
|
373 |
+
"learning_rate": 5.673118590206559e-06,
|
374 |
+
"loss": 0.1496,
|
375 |
+
"step": 27500
|
376 |
+
},
|
377 |
+
{
|
378 |
+
"epoch": 4.75,
|
379 |
+
"learning_rate": 5.582839809331215e-06,
|
380 |
+
"loss": 0.1668,
|
381 |
+
"step": 28000
|
382 |
+
},
|
383 |
+
{
|
384 |
+
"epoch": 4.84,
|
385 |
+
"learning_rate": 5.492561028455872e-06,
|
386 |
+
"loss": 0.1448,
|
387 |
+
"step": 28500
|
388 |
+
},
|
389 |
+
{
|
390 |
+
"epoch": 4.92,
|
391 |
+
"learning_rate": 5.402282247580529e-06,
|
392 |
+
"loss": 0.1638,
|
393 |
+
"step": 29000
|
394 |
+
},
|
395 |
+
{
|
396 |
+
"epoch": 5.0,
|
397 |
+
"eval_accuracy": 0.9121718377088306,
|
398 |
+
"eval_loss": 0.4099659323692322,
|
399 |
+
"eval_runtime": 14.3864,
|
400 |
+
"eval_samples_per_second": 728.119,
|
401 |
+
"eval_steps_per_second": 45.529,
|
402 |
+
"step": 29460
|
403 |
+
},
|
404 |
+
{
|
405 |
+
"epoch": 5.01,
|
406 |
+
"learning_rate": 5.312003466705185e-06,
|
407 |
+
"loss": 0.1375,
|
408 |
+
"step": 29500
|
409 |
+
},
|
410 |
+
{
|
411 |
+
"epoch": 5.09,
|
412 |
+
"learning_rate": 5.221724685829843e-06,
|
413 |
+
"loss": 0.1032,
|
414 |
+
"step": 30000
|
415 |
+
},
|
416 |
+
{
|
417 |
+
"epoch": 5.18,
|
418 |
+
"learning_rate": 5.1314459049545e-06,
|
419 |
+
"loss": 0.1106,
|
420 |
+
"step": 30500
|
421 |
+
},
|
422 |
+
{
|
423 |
+
"epoch": 5.26,
|
424 |
+
"learning_rate": 5.041167124079157e-06,
|
425 |
+
"loss": 0.1091,
|
426 |
+
"step": 31000
|
427 |
+
},
|
428 |
+
{
|
429 |
+
"epoch": 5.35,
|
430 |
+
"learning_rate": 4.950888343203813e-06,
|
431 |
+
"loss": 0.1094,
|
432 |
+
"step": 31500
|
433 |
+
},
|
434 |
+
{
|
435 |
+
"epoch": 5.43,
|
436 |
+
"learning_rate": 4.86060956232847e-06,
|
437 |
+
"loss": 0.1223,
|
438 |
+
"step": 32000
|
439 |
+
},
|
440 |
+
{
|
441 |
+
"epoch": 5.52,
|
442 |
+
"learning_rate": 4.7703307814531275e-06,
|
443 |
+
"loss": 0.1084,
|
444 |
+
"step": 32500
|
445 |
+
},
|
446 |
+
{
|
447 |
+
"epoch": 5.6,
|
448 |
+
"learning_rate": 4.680052000577785e-06,
|
449 |
+
"loss": 0.1097,
|
450 |
+
"step": 33000
|
451 |
+
},
|
452 |
+
{
|
453 |
+
"epoch": 5.69,
|
454 |
+
"learning_rate": 4.589773219702441e-06,
|
455 |
+
"loss": 0.1216,
|
456 |
+
"step": 33500
|
457 |
+
},
|
458 |
+
{
|
459 |
+
"epoch": 5.77,
|
460 |
+
"learning_rate": 4.499494438827098e-06,
|
461 |
+
"loss": 0.1191,
|
462 |
+
"step": 34000
|
463 |
+
},
|
464 |
+
{
|
465 |
+
"epoch": 5.86,
|
466 |
+
"learning_rate": 4.4092156579517554e-06,
|
467 |
+
"loss": 0.1215,
|
468 |
+
"step": 34500
|
469 |
+
},
|
470 |
+
{
|
471 |
+
"epoch": 5.94,
|
472 |
+
"learning_rate": 4.318936877076413e-06,
|
473 |
+
"loss": 0.1356,
|
474 |
+
"step": 35000
|
475 |
+
},
|
476 |
+
{
|
477 |
+
"epoch": 6.0,
|
478 |
+
"eval_accuracy": 0.9077804295942721,
|
479 |
+
"eval_loss": 0.484077125787735,
|
480 |
+
"eval_runtime": 14.4023,
|
481 |
+
"eval_samples_per_second": 727.317,
|
482 |
+
"eval_steps_per_second": 45.479,
|
483 |
+
"step": 35352
|
484 |
+
},
|
485 |
+
{
|
486 |
+
"epoch": 6.03,
|
487 |
+
"learning_rate": 4.228658096201069e-06,
|
488 |
+
"loss": 0.1124,
|
489 |
+
"step": 35500
|
490 |
+
},
|
491 |
+
{
|
492 |
+
"epoch": 6.11,
|
493 |
+
"learning_rate": 4.138379315325726e-06,
|
494 |
+
"loss": 0.0893,
|
495 |
+
"step": 36000
|
496 |
+
},
|
497 |
+
{
|
498 |
+
"epoch": 6.19,
|
499 |
+
"learning_rate": 4.048100534450383e-06,
|
500 |
+
"loss": 0.0763,
|
501 |
+
"step": 36500
|
502 |
+
},
|
503 |
+
{
|
504 |
+
"epoch": 6.28,
|
505 |
+
"learning_rate": 3.9578217535750406e-06,
|
506 |
+
"loss": 0.0889,
|
507 |
+
"step": 37000
|
508 |
+
},
|
509 |
+
{
|
510 |
+
"epoch": 6.36,
|
511 |
+
"learning_rate": 3.867542972699697e-06,
|
512 |
+
"loss": 0.0941,
|
513 |
+
"step": 37500
|
514 |
+
},
|
515 |
+
{
|
516 |
+
"epoch": 6.45,
|
517 |
+
"learning_rate": 3.777264191824354e-06,
|
518 |
+
"loss": 0.0931,
|
519 |
+
"step": 38000
|
520 |
+
},
|
521 |
+
{
|
522 |
+
"epoch": 6.53,
|
523 |
+
"learning_rate": 3.686985410949011e-06,
|
524 |
+
"loss": 0.0916,
|
525 |
+
"step": 38500
|
526 |
+
},
|
527 |
+
{
|
528 |
+
"epoch": 6.62,
|
529 |
+
"learning_rate": 3.596706630073668e-06,
|
530 |
+
"loss": 0.0916,
|
531 |
+
"step": 39000
|
532 |
+
},
|
533 |
+
{
|
534 |
+
"epoch": 6.7,
|
535 |
+
"learning_rate": 3.5064278491983244e-06,
|
536 |
+
"loss": 0.1038,
|
537 |
+
"step": 39500
|
538 |
+
},
|
539 |
+
{
|
540 |
+
"epoch": 6.79,
|
541 |
+
"learning_rate": 3.4161490683229816e-06,
|
542 |
+
"loss": 0.089,
|
543 |
+
"step": 40000
|
544 |
+
},
|
545 |
+
{
|
546 |
+
"epoch": 6.87,
|
547 |
+
"learning_rate": 3.3258702874476384e-06,
|
548 |
+
"loss": 0.1008,
|
549 |
+
"step": 40500
|
550 |
+
},
|
551 |
+
{
|
552 |
+
"epoch": 6.96,
|
553 |
+
"learning_rate": 3.2355915065722956e-06,
|
554 |
+
"loss": 0.0867,
|
555 |
+
"step": 41000
|
556 |
+
},
|
557 |
+
{
|
558 |
+
"epoch": 7.0,
|
559 |
+
"eval_accuracy": 0.9097852028639618,
|
560 |
+
"eval_loss": 0.5044658184051514,
|
561 |
+
"eval_runtime": 14.3742,
|
562 |
+
"eval_samples_per_second": 728.736,
|
563 |
+
"eval_steps_per_second": 45.568,
|
564 |
+
"step": 41244
|
565 |
+
},
|
566 |
+
{
|
567 |
+
"epoch": 7.04,
|
568 |
+
"learning_rate": 3.1453127256969523e-06,
|
569 |
+
"loss": 0.0746,
|
570 |
+
"step": 41500
|
571 |
+
},
|
572 |
+
{
|
573 |
+
"epoch": 7.13,
|
574 |
+
"learning_rate": 3.055033944821609e-06,
|
575 |
+
"loss": 0.0624,
|
576 |
+
"step": 42000
|
577 |
+
},
|
578 |
+
{
|
579 |
+
"epoch": 7.21,
|
580 |
+
"learning_rate": 2.9647551639462663e-06,
|
581 |
+
"loss": 0.0695,
|
582 |
+
"step": 42500
|
583 |
+
},
|
584 |
+
{
|
585 |
+
"epoch": 7.3,
|
586 |
+
"learning_rate": 2.874476383070923e-06,
|
587 |
+
"loss": 0.0804,
|
588 |
+
"step": 43000
|
589 |
+
},
|
590 |
+
{
|
591 |
+
"epoch": 7.38,
|
592 |
+
"learning_rate": 2.7841976021955803e-06,
|
593 |
+
"loss": 0.0695,
|
594 |
+
"step": 43500
|
595 |
+
},
|
596 |
+
{
|
597 |
+
"epoch": 7.47,
|
598 |
+
"learning_rate": 2.693918821320237e-06,
|
599 |
+
"loss": 0.0737,
|
600 |
+
"step": 44000
|
601 |
+
},
|
602 |
+
{
|
603 |
+
"epoch": 7.55,
|
604 |
+
"learning_rate": 2.6036400404448942e-06,
|
605 |
+
"loss": 0.0773,
|
606 |
+
"step": 44500
|
607 |
+
},
|
608 |
+
{
|
609 |
+
"epoch": 7.64,
|
610 |
+
"learning_rate": 2.513361259569551e-06,
|
611 |
+
"loss": 0.0756,
|
612 |
+
"step": 45000
|
613 |
+
},
|
614 |
+
{
|
615 |
+
"epoch": 7.72,
|
616 |
+
"learning_rate": 2.4230824786942078e-06,
|
617 |
+
"loss": 0.0697,
|
618 |
+
"step": 45500
|
619 |
+
},
|
620 |
+
{
|
621 |
+
"epoch": 7.81,
|
622 |
+
"learning_rate": 2.332803697818865e-06,
|
623 |
+
"loss": 0.0731,
|
624 |
+
"step": 46000
|
625 |
+
},
|
626 |
+
{
|
627 |
+
"epoch": 7.89,
|
628 |
+
"learning_rate": 2.2425249169435217e-06,
|
629 |
+
"loss": 0.0584,
|
630 |
+
"step": 46500
|
631 |
+
},
|
632 |
+
{
|
633 |
+
"epoch": 7.98,
|
634 |
+
"learning_rate": 2.152246136068179e-06,
|
635 |
+
"loss": 0.071,
|
636 |
+
"step": 47000
|
637 |
+
},
|
638 |
+
{
|
639 |
+
"epoch": 8.0,
|
640 |
+
"eval_accuracy": 0.9092124105011933,
|
641 |
+
"eval_loss": 0.5765216946601868,
|
642 |
+
"eval_runtime": 14.3597,
|
643 |
+
"eval_samples_per_second": 729.473,
|
644 |
+
"eval_steps_per_second": 45.614,
|
645 |
+
"step": 47136
|
646 |
+
},
|
647 |
+
{
|
648 |
+
"epoch": 8.06,
|
649 |
+
"learning_rate": 2.0619673551928357e-06,
|
650 |
+
"loss": 0.0614,
|
651 |
+
"step": 47500
|
652 |
+
},
|
653 |
+
{
|
654 |
+
"epoch": 8.15,
|
655 |
+
"learning_rate": 1.9716885743174925e-06,
|
656 |
+
"loss": 0.0588,
|
657 |
+
"step": 48000
|
658 |
+
},
|
659 |
+
{
|
660 |
+
"epoch": 8.23,
|
661 |
+
"learning_rate": 1.8814097934421497e-06,
|
662 |
+
"loss": 0.0575,
|
663 |
+
"step": 48500
|
664 |
+
},
|
665 |
+
{
|
666 |
+
"epoch": 8.32,
|
667 |
+
"learning_rate": 1.7911310125668062e-06,
|
668 |
+
"loss": 0.0454,
|
669 |
+
"step": 49000
|
670 |
+
},
|
671 |
+
{
|
672 |
+
"epoch": 8.4,
|
673 |
+
"learning_rate": 1.7008522316914632e-06,
|
674 |
+
"loss": 0.0554,
|
675 |
+
"step": 49500
|
676 |
+
},
|
677 |
+
{
|
678 |
+
"epoch": 8.49,
|
679 |
+
"learning_rate": 1.6105734508161202e-06,
|
680 |
+
"loss": 0.0559,
|
681 |
+
"step": 50000
|
682 |
+
},
|
683 |
+
{
|
684 |
+
"epoch": 8.57,
|
685 |
+
"learning_rate": 1.5202946699407772e-06,
|
686 |
+
"loss": 0.0561,
|
687 |
+
"step": 50500
|
688 |
+
},
|
689 |
+
{
|
690 |
+
"epoch": 8.66,
|
691 |
+
"learning_rate": 1.4300158890654341e-06,
|
692 |
+
"loss": 0.0505,
|
693 |
+
"step": 51000
|
694 |
+
},
|
695 |
+
{
|
696 |
+
"epoch": 8.74,
|
697 |
+
"learning_rate": 1.3397371081900911e-06,
|
698 |
+
"loss": 0.0552,
|
699 |
+
"step": 51500
|
700 |
+
},
|
701 |
+
{
|
702 |
+
"epoch": 8.83,
|
703 |
+
"learning_rate": 1.249458327314748e-06,
|
704 |
+
"loss": 0.0604,
|
705 |
+
"step": 52000
|
706 |
+
},
|
707 |
+
{
|
708 |
+
"epoch": 8.91,
|
709 |
+
"learning_rate": 1.1591795464394049e-06,
|
710 |
+
"loss": 0.0626,
|
711 |
+
"step": 52500
|
712 |
+
},
|
713 |
+
{
|
714 |
+
"epoch": 9.0,
|
715 |
+
"learning_rate": 1.0689007655640619e-06,
|
716 |
+
"loss": 0.0636,
|
717 |
+
"step": 53000
|
718 |
+
},
|
719 |
+
{
|
720 |
+
"epoch": 9.0,
|
721 |
+
"eval_accuracy": 0.9113126491646778,
|
722 |
+
"eval_loss": 0.5765402913093567,
|
723 |
+
"eval_runtime": 14.3825,
|
724 |
+
"eval_samples_per_second": 728.315,
|
725 |
+
"eval_steps_per_second": 45.541,
|
726 |
+
"step": 53028
|
727 |
+
},
|
728 |
+
{
|
729 |
+
"epoch": 9.08,
|
730 |
+
"learning_rate": 9.786219846887188e-07,
|
731 |
+
"loss": 0.0439,
|
732 |
+
"step": 53500
|
733 |
+
},
|
734 |
+
{
|
735 |
+
"epoch": 9.16,
|
736 |
+
"learning_rate": 8.883432038133757e-07,
|
737 |
+
"loss": 0.0447,
|
738 |
+
"step": 54000
|
739 |
+
},
|
740 |
+
{
|
741 |
+
"epoch": 9.25,
|
742 |
+
"learning_rate": 7.980644229380327e-07,
|
743 |
+
"loss": 0.0391,
|
744 |
+
"step": 54500
|
745 |
+
},
|
746 |
+
{
|
747 |
+
"epoch": 9.33,
|
748 |
+
"learning_rate": 7.077856420626897e-07,
|
749 |
+
"loss": 0.0404,
|
750 |
+
"step": 55000
|
751 |
+
},
|
752 |
+
{
|
753 |
+
"epoch": 9.42,
|
754 |
+
"learning_rate": 6.175068611873466e-07,
|
755 |
+
"loss": 0.0535,
|
756 |
+
"step": 55500
|
757 |
+
},
|
758 |
+
{
|
759 |
+
"epoch": 9.5,
|
760 |
+
"learning_rate": 5.272280803120034e-07,
|
761 |
+
"loss": 0.038,
|
762 |
+
"step": 56000
|
763 |
+
},
|
764 |
+
{
|
765 |
+
"epoch": 9.59,
|
766 |
+
"learning_rate": 4.3694929943666047e-07,
|
767 |
+
"loss": 0.047,
|
768 |
+
"step": 56500
|
769 |
+
},
|
770 |
+
{
|
771 |
+
"epoch": 9.67,
|
772 |
+
"learning_rate": 3.4667051856131735e-07,
|
773 |
+
"loss": 0.0414,
|
774 |
+
"step": 57000
|
775 |
+
},
|
776 |
+
{
|
777 |
+
"epoch": 9.76,
|
778 |
+
"learning_rate": 2.5639173768597433e-07,
|
779 |
+
"loss": 0.0447,
|
780 |
+
"step": 57500
|
781 |
+
},
|
782 |
+
{
|
783 |
+
"epoch": 9.84,
|
784 |
+
"learning_rate": 1.6611295681063126e-07,
|
785 |
+
"loss": 0.0364,
|
786 |
+
"step": 58000
|
787 |
+
},
|
788 |
+
{
|
789 |
+
"epoch": 9.93,
|
790 |
+
"learning_rate": 7.583417593528817e-08,
|
791 |
+
"loss": 0.0492,
|
792 |
+
"step": 58500
|
793 |
+
},
|
794 |
+
{
|
795 |
+
"epoch": 10.0,
|
796 |
+
"eval_accuracy": 0.9117899761336515,
|
797 |
+
"eval_loss": 0.6481573581695557,
|
798 |
+
"eval_runtime": 14.4339,
|
799 |
+
"eval_samples_per_second": 725.723,
|
800 |
+
"eval_steps_per_second": 45.379,
|
801 |
+
"step": 58920
|
802 |
+
},
|
803 |
+
{
|
804 |
+
"epoch": 10.0,
|
805 |
+
"step": 58920,
|
806 |
+
"total_flos": 4.325458183470288e+16,
|
807 |
+
"train_loss": 0.15599560614596883,
|
808 |
+
"train_runtime": 4172.8624,
|
809 |
+
"train_samples_per_second": 225.907,
|
810 |
+
"train_steps_per_second": 14.12
|
811 |
+
}
|
812 |
+
],
|
813 |
+
"logging_steps": 500,
|
814 |
+
"max_steps": 58920,
|
815 |
+
"num_train_epochs": 10,
|
816 |
+
"save_steps": 500,
|
817 |
+
"total_flos": 4.325458183470288e+16,
|
818 |
+
"trial_name": null,
|
819 |
+
"trial_params": null
|
820 |
+
}
|
qnli/roberta-base_lr1e-05/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:048e418f47a9d1a736e40d3e2588e096278449feefa566272f23bd2f56dd8802
|
3 |
+
size 4091
|
qnli/roberta-base_lr1e-05/vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
qqp/roberta-base_lr1e-05/config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/mnt/data/yule/.cache/roberta-base",
|
3 |
+
"architectures": [
|
4 |
+
"RobertaForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"classifier_dropout": null,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "gelu",
|
11 |
+
"hidden_dropout_prob": 0.1,
|
12 |
+
"hidden_size": 768,
|
13 |
+
"initializer_range": 0.02,
|
14 |
+
"intermediate_size": 3072,
|
15 |
+
"layer_norm_eps": 1e-05,
|
16 |
+
"max_position_embeddings": 514,
|
17 |
+
"model_type": "roberta",
|
18 |
+
"num_attention_heads": 12,
|
19 |
+
"num_hidden_layers": 12,
|
20 |
+
"pad_token_id": 1,
|
21 |
+
"position_embedding_type": "absolute",
|
22 |
+
"torch_dtype": "float32",
|
23 |
+
"transformers_version": "4.33.1",
|
24 |
+
"type_vocab_size": 1,
|
25 |
+
"use_cache": true,
|
26 |
+
"vocab_size": 50265
|
27 |
+
}
|
qqp/roberta-base_lr1e-05/merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
qqp/roberta-base_lr1e-05/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bbd975f1104f1988c62c26a7583a3781ba71a029b4c5a1b9b1436435ea105123
|
3 |
+
size 498654833
|
qqp/roberta-base_lr1e-05/special_tokens_map.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"cls_token": "<s>",
|
4 |
+
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"pad_token": "<pad>",
|
13 |
+
"sep_token": "</s>",
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
qqp/roberta-base_lr1e-05/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
qqp/roberta-base_lr1e-05/tokenizer_config.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"bos_token": "<s>",
|
4 |
+
"clean_up_tokenization_spaces": true,
|
5 |
+
"cls_token": "<s>",
|
6 |
+
"eos_token": "</s>",
|
7 |
+
"errors": "replace",
|
8 |
+
"mask_token": "<mask>",
|
9 |
+
"model_max_length": 1000000000000000019884624838656,
|
10 |
+
"pad_token": "<pad>",
|
11 |
+
"sep_token": "</s>",
|
12 |
+
"tokenizer_class": "RobertaTokenizer",
|
13 |
+
"trim_offsets": true,
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
qqp/roberta-base_lr1e-05/trainer_state.json
ADDED
@@ -0,0 +1,2592 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": 0.8990160346845362,
|
3 |
+
"best_model_checkpoint": "./save_models/qqp/roberta-base_lr1e-05_run0/checkpoint-204670",
|
4 |
+
"epoch": 10.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 204670,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"learning_rate": 4.0713296962788053e-07,
|
14 |
+
"loss": 0.6914,
|
15 |
+
"step": 500
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"epoch": 0.05,
|
19 |
+
"learning_rate": 8.142659392557611e-07,
|
20 |
+
"loss": 0.6479,
|
21 |
+
"step": 1000
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"epoch": 0.07,
|
25 |
+
"learning_rate": 1.2213989088836414e-06,
|
26 |
+
"loss": 0.5224,
|
27 |
+
"step": 1500
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"epoch": 0.1,
|
31 |
+
"learning_rate": 1.6285318785115221e-06,
|
32 |
+
"loss": 0.4532,
|
33 |
+
"step": 2000
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"epoch": 0.12,
|
37 |
+
"learning_rate": 2.0356648481394024e-06,
|
38 |
+
"loss": 0.4179,
|
39 |
+
"step": 2500
|
40 |
+
},
|
41 |
+
{
|
42 |
+
"epoch": 0.15,
|
43 |
+
"learning_rate": 2.442797817767283e-06,
|
44 |
+
"loss": 0.4075,
|
45 |
+
"step": 3000
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"epoch": 0.17,
|
49 |
+
"learning_rate": 2.8499307873951637e-06,
|
50 |
+
"loss": 0.3777,
|
51 |
+
"step": 3500
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.2,
|
55 |
+
"learning_rate": 3.2570637570230442e-06,
|
56 |
+
"loss": 0.3735,
|
57 |
+
"step": 4000
|
58 |
+
},
|
59 |
+
{
|
60 |
+
"epoch": 0.22,
|
61 |
+
"learning_rate": 3.6641967266509243e-06,
|
62 |
+
"loss": 0.3609,
|
63 |
+
"step": 4500
|
64 |
+
},
|
65 |
+
{
|
66 |
+
"epoch": 0.24,
|
67 |
+
"learning_rate": 4.071329696278805e-06,
|
68 |
+
"loss": 0.3624,
|
69 |
+
"step": 5000
|
70 |
+
},
|
71 |
+
{
|
72 |
+
"epoch": 0.27,
|
73 |
+
"learning_rate": 4.478462665906685e-06,
|
74 |
+
"loss": 0.3574,
|
75 |
+
"step": 5500
|
76 |
+
},
|
77 |
+
{
|
78 |
+
"epoch": 0.29,
|
79 |
+
"learning_rate": 4.885595635534566e-06,
|
80 |
+
"loss": 0.3509,
|
81 |
+
"step": 6000
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"epoch": 0.32,
|
85 |
+
"learning_rate": 5.292728605162446e-06,
|
86 |
+
"loss": 0.3387,
|
87 |
+
"step": 6500
|
88 |
+
},
|
89 |
+
{
|
90 |
+
"epoch": 0.34,
|
91 |
+
"learning_rate": 5.6998615747903275e-06,
|
92 |
+
"loss": 0.335,
|
93 |
+
"step": 7000
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.37,
|
97 |
+
"learning_rate": 6.106994544418208e-06,
|
98 |
+
"loss": 0.3242,
|
99 |
+
"step": 7500
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"epoch": 0.39,
|
103 |
+
"learning_rate": 6.5141275140460884e-06,
|
104 |
+
"loss": 0.3317,
|
105 |
+
"step": 8000
|
106 |
+
},
|
107 |
+
{
|
108 |
+
"epoch": 0.42,
|
109 |
+
"learning_rate": 6.921260483673968e-06,
|
110 |
+
"loss": 0.3182,
|
111 |
+
"step": 8500
|
112 |
+
},
|
113 |
+
{
|
114 |
+
"epoch": 0.44,
|
115 |
+
"learning_rate": 7.3283934533018485e-06,
|
116 |
+
"loss": 0.3197,
|
117 |
+
"step": 9000
|
118 |
+
},
|
119 |
+
{
|
120 |
+
"epoch": 0.46,
|
121 |
+
"learning_rate": 7.73552642292973e-06,
|
122 |
+
"loss": 0.3213,
|
123 |
+
"step": 9500
|
124 |
+
},
|
125 |
+
{
|
126 |
+
"epoch": 0.49,
|
127 |
+
"learning_rate": 8.14265939255761e-06,
|
128 |
+
"loss": 0.3204,
|
129 |
+
"step": 10000
|
130 |
+
},
|
131 |
+
{
|
132 |
+
"epoch": 0.51,
|
133 |
+
"learning_rate": 8.54979236218549e-06,
|
134 |
+
"loss": 0.3271,
|
135 |
+
"step": 10500
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.54,
|
139 |
+
"learning_rate": 8.95692533181337e-06,
|
140 |
+
"loss": 0.3107,
|
141 |
+
"step": 11000
|
142 |
+
},
|
143 |
+
{
|
144 |
+
"epoch": 0.56,
|
145 |
+
"learning_rate": 9.364058301441251e-06,
|
146 |
+
"loss": 0.31,
|
147 |
+
"step": 11500
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"epoch": 0.59,
|
151 |
+
"learning_rate": 9.771191271069131e-06,
|
152 |
+
"loss": 0.3089,
|
153 |
+
"step": 12000
|
154 |
+
},
|
155 |
+
{
|
156 |
+
"epoch": 0.61,
|
157 |
+
"learning_rate": 9.988616812811544e-06,
|
158 |
+
"loss": 0.2948,
|
159 |
+
"step": 12500
|
160 |
+
},
|
161 |
+
{
|
162 |
+
"epoch": 0.64,
|
163 |
+
"learning_rate": 9.962627800965753e-06,
|
164 |
+
"loss": 0.3019,
|
165 |
+
"step": 13000
|
166 |
+
},
|
167 |
+
{
|
168 |
+
"epoch": 0.66,
|
169 |
+
"learning_rate": 9.936638789119961e-06,
|
170 |
+
"loss": 0.3093,
|
171 |
+
"step": 13500
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"epoch": 0.68,
|
175 |
+
"learning_rate": 9.91064977727417e-06,
|
176 |
+
"loss": 0.3032,
|
177 |
+
"step": 14000
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.71,
|
181 |
+
"learning_rate": 9.884660765428378e-06,
|
182 |
+
"loss": 0.3014,
|
183 |
+
"step": 14500
|
184 |
+
},
|
185 |
+
{
|
186 |
+
"epoch": 0.73,
|
187 |
+
"learning_rate": 9.858671753582586e-06,
|
188 |
+
"loss": 0.3039,
|
189 |
+
"step": 15000
|
190 |
+
},
|
191 |
+
{
|
192 |
+
"epoch": 0.76,
|
193 |
+
"learning_rate": 9.832682741736795e-06,
|
194 |
+
"loss": 0.2994,
|
195 |
+
"step": 15500
|
196 |
+
},
|
197 |
+
{
|
198 |
+
"epoch": 0.78,
|
199 |
+
"learning_rate": 9.806693729891003e-06,
|
200 |
+
"loss": 0.2954,
|
201 |
+
"step": 16000
|
202 |
+
},
|
203 |
+
{
|
204 |
+
"epoch": 0.81,
|
205 |
+
"learning_rate": 9.78070471804521e-06,
|
206 |
+
"loss": 0.2907,
|
207 |
+
"step": 16500
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"epoch": 0.83,
|
211 |
+
"learning_rate": 9.75471570619942e-06,
|
212 |
+
"loss": 0.2887,
|
213 |
+
"step": 17000
|
214 |
+
},
|
215 |
+
{
|
216 |
+
"epoch": 0.86,
|
217 |
+
"learning_rate": 9.728726694353629e-06,
|
218 |
+
"loss": 0.299,
|
219 |
+
"step": 17500
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.88,
|
223 |
+
"learning_rate": 9.702737682507836e-06,
|
224 |
+
"loss": 0.2789,
|
225 |
+
"step": 18000
|
226 |
+
},
|
227 |
+
{
|
228 |
+
"epoch": 0.9,
|
229 |
+
"learning_rate": 9.676748670662046e-06,
|
230 |
+
"loss": 0.2883,
|
231 |
+
"step": 18500
|
232 |
+
},
|
233 |
+
{
|
234 |
+
"epoch": 0.93,
|
235 |
+
"learning_rate": 9.650759658816253e-06,
|
236 |
+
"loss": 0.2873,
|
237 |
+
"step": 19000
|
238 |
+
},
|
239 |
+
{
|
240 |
+
"epoch": 0.95,
|
241 |
+
"learning_rate": 9.624770646970461e-06,
|
242 |
+
"loss": 0.2948,
|
243 |
+
"step": 19500
|
244 |
+
},
|
245 |
+
{
|
246 |
+
"epoch": 0.98,
|
247 |
+
"learning_rate": 9.59878163512467e-06,
|
248 |
+
"loss": 0.2746,
|
249 |
+
"step": 20000
|
250 |
+
},
|
251 |
+
{
|
252 |
+
"epoch": 1.0,
|
253 |
+
"eval_accuracy": 0.8817094956712931,
|
254 |
+
"eval_averaged_scores": 0.8643799788235642,
|
255 |
+
"eval_f1": 0.8470504619758352,
|
256 |
+
"eval_loss": 0.2860792577266693,
|
257 |
+
"eval_runtime": 36.8913,
|
258 |
+
"eval_samples_per_second": 986.277,
|
259 |
+
"eval_steps_per_second": 61.668,
|
260 |
+
"step": 20467
|
261 |
+
},
|
262 |
+
{
|
263 |
+
"epoch": 1.0,
|
264 |
+
"learning_rate": 9.572792623278878e-06,
|
265 |
+
"loss": 0.2906,
|
266 |
+
"step": 20500
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"epoch": 1.03,
|
270 |
+
"learning_rate": 9.546803611433086e-06,
|
271 |
+
"loss": 0.2525,
|
272 |
+
"step": 21000
|
273 |
+
},
|
274 |
+
{
|
275 |
+
"epoch": 1.05,
|
276 |
+
"learning_rate": 9.520814599587295e-06,
|
277 |
+
"loss": 0.2551,
|
278 |
+
"step": 21500
|
279 |
+
},
|
280 |
+
{
|
281 |
+
"epoch": 1.07,
|
282 |
+
"learning_rate": 9.494825587741504e-06,
|
283 |
+
"loss": 0.259,
|
284 |
+
"step": 22000
|
285 |
+
},
|
286 |
+
{
|
287 |
+
"epoch": 1.1,
|
288 |
+
"learning_rate": 9.468836575895712e-06,
|
289 |
+
"loss": 0.2459,
|
290 |
+
"step": 22500
|
291 |
+
},
|
292 |
+
{
|
293 |
+
"epoch": 1.12,
|
294 |
+
"learning_rate": 9.442847564049921e-06,
|
295 |
+
"loss": 0.256,
|
296 |
+
"step": 23000
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 1.15,
|
300 |
+
"learning_rate": 9.416858552204129e-06,
|
301 |
+
"loss": 0.2573,
|
302 |
+
"step": 23500
|
303 |
+
},
|
304 |
+
{
|
305 |
+
"epoch": 1.17,
|
306 |
+
"learning_rate": 9.390869540358337e-06,
|
307 |
+
"loss": 0.2465,
|
308 |
+
"step": 24000
|
309 |
+
},
|
310 |
+
{
|
311 |
+
"epoch": 1.2,
|
312 |
+
"learning_rate": 9.364880528512546e-06,
|
313 |
+
"loss": 0.2519,
|
314 |
+
"step": 24500
|
315 |
+
},
|
316 |
+
{
|
317 |
+
"epoch": 1.22,
|
318 |
+
"learning_rate": 9.338891516666754e-06,
|
319 |
+
"loss": 0.2457,
|
320 |
+
"step": 25000
|
321 |
+
},
|
322 |
+
{
|
323 |
+
"epoch": 1.25,
|
324 |
+
"learning_rate": 9.312902504820961e-06,
|
325 |
+
"loss": 0.2502,
|
326 |
+
"step": 25500
|
327 |
+
},
|
328 |
+
{
|
329 |
+
"epoch": 1.27,
|
330 |
+
"learning_rate": 9.28691349297517e-06,
|
331 |
+
"loss": 0.253,
|
332 |
+
"step": 26000
|
333 |
+
},
|
334 |
+
{
|
335 |
+
"epoch": 1.29,
|
336 |
+
"learning_rate": 9.26092448112938e-06,
|
337 |
+
"loss": 0.2453,
|
338 |
+
"step": 26500
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 1.32,
|
342 |
+
"learning_rate": 9.234935469283588e-06,
|
343 |
+
"loss": 0.2576,
|
344 |
+
"step": 27000
|
345 |
+
},
|
346 |
+
{
|
347 |
+
"epoch": 1.34,
|
348 |
+
"learning_rate": 9.208946457437797e-06,
|
349 |
+
"loss": 0.2505,
|
350 |
+
"step": 27500
|
351 |
+
},
|
352 |
+
{
|
353 |
+
"epoch": 1.37,
|
354 |
+
"learning_rate": 9.182957445592005e-06,
|
355 |
+
"loss": 0.252,
|
356 |
+
"step": 28000
|
357 |
+
},
|
358 |
+
{
|
359 |
+
"epoch": 1.39,
|
360 |
+
"learning_rate": 9.156968433746212e-06,
|
361 |
+
"loss": 0.2536,
|
362 |
+
"step": 28500
|
363 |
+
},
|
364 |
+
{
|
365 |
+
"epoch": 1.42,
|
366 |
+
"learning_rate": 9.130979421900422e-06,
|
367 |
+
"loss": 0.2369,
|
368 |
+
"step": 29000
|
369 |
+
},
|
370 |
+
{
|
371 |
+
"epoch": 1.44,
|
372 |
+
"learning_rate": 9.10499041005463e-06,
|
373 |
+
"loss": 0.2459,
|
374 |
+
"step": 29500
|
375 |
+
},
|
376 |
+
{
|
377 |
+
"epoch": 1.47,
|
378 |
+
"learning_rate": 9.079001398208837e-06,
|
379 |
+
"loss": 0.2446,
|
380 |
+
"step": 30000
|
381 |
+
},
|
382 |
+
{
|
383 |
+
"epoch": 1.49,
|
384 |
+
"learning_rate": 9.053012386363046e-06,
|
385 |
+
"loss": 0.2434,
|
386 |
+
"step": 30500
|
387 |
+
},
|
388 |
+
{
|
389 |
+
"epoch": 1.51,
|
390 |
+
"learning_rate": 9.027023374517256e-06,
|
391 |
+
"loss": 0.2493,
|
392 |
+
"step": 31000
|
393 |
+
},
|
394 |
+
{
|
395 |
+
"epoch": 1.54,
|
396 |
+
"learning_rate": 9.001034362671463e-06,
|
397 |
+
"loss": 0.2506,
|
398 |
+
"step": 31500
|
399 |
+
},
|
400 |
+
{
|
401 |
+
"epoch": 1.56,
|
402 |
+
"learning_rate": 8.975045350825672e-06,
|
403 |
+
"loss": 0.2421,
|
404 |
+
"step": 32000
|
405 |
+
},
|
406 |
+
{
|
407 |
+
"epoch": 1.59,
|
408 |
+
"learning_rate": 8.94905633897988e-06,
|
409 |
+
"loss": 0.2467,
|
410 |
+
"step": 32500
|
411 |
+
},
|
412 |
+
{
|
413 |
+
"epoch": 1.61,
|
414 |
+
"learning_rate": 8.923067327134088e-06,
|
415 |
+
"loss": 0.2439,
|
416 |
+
"step": 33000
|
417 |
+
},
|
418 |
+
{
|
419 |
+
"epoch": 1.64,
|
420 |
+
"learning_rate": 8.897078315288297e-06,
|
421 |
+
"loss": 0.2423,
|
422 |
+
"step": 33500
|
423 |
+
},
|
424 |
+
{
|
425 |
+
"epoch": 1.66,
|
426 |
+
"learning_rate": 8.871089303442505e-06,
|
427 |
+
"loss": 0.25,
|
428 |
+
"step": 34000
|
429 |
+
},
|
430 |
+
{
|
431 |
+
"epoch": 1.69,
|
432 |
+
"learning_rate": 8.845100291596712e-06,
|
433 |
+
"loss": 0.2367,
|
434 |
+
"step": 34500
|
435 |
+
},
|
436 |
+
{
|
437 |
+
"epoch": 1.71,
|
438 |
+
"learning_rate": 8.819111279750922e-06,
|
439 |
+
"loss": 0.2387,
|
440 |
+
"step": 35000
|
441 |
+
},
|
442 |
+
{
|
443 |
+
"epoch": 1.73,
|
444 |
+
"learning_rate": 8.793122267905131e-06,
|
445 |
+
"loss": 0.2455,
|
446 |
+
"step": 35500
|
447 |
+
},
|
448 |
+
{
|
449 |
+
"epoch": 1.76,
|
450 |
+
"learning_rate": 8.767133256059339e-06,
|
451 |
+
"loss": 0.2439,
|
452 |
+
"step": 36000
|
453 |
+
},
|
454 |
+
{
|
455 |
+
"epoch": 1.78,
|
456 |
+
"learning_rate": 8.741144244213548e-06,
|
457 |
+
"loss": 0.2332,
|
458 |
+
"step": 36500
|
459 |
+
},
|
460 |
+
{
|
461 |
+
"epoch": 1.81,
|
462 |
+
"learning_rate": 8.715155232367756e-06,
|
463 |
+
"loss": 0.2446,
|
464 |
+
"step": 37000
|
465 |
+
},
|
466 |
+
{
|
467 |
+
"epoch": 1.83,
|
468 |
+
"learning_rate": 8.689166220521963e-06,
|
469 |
+
"loss": 0.2389,
|
470 |
+
"step": 37500
|
471 |
+
},
|
472 |
+
{
|
473 |
+
"epoch": 1.86,
|
474 |
+
"learning_rate": 8.663177208676173e-06,
|
475 |
+
"loss": 0.2346,
|
476 |
+
"step": 38000
|
477 |
+
},
|
478 |
+
{
|
479 |
+
"epoch": 1.88,
|
480 |
+
"learning_rate": 8.63718819683038e-06,
|
481 |
+
"loss": 0.2305,
|
482 |
+
"step": 38500
|
483 |
+
},
|
484 |
+
{
|
485 |
+
"epoch": 1.91,
|
486 |
+
"learning_rate": 8.61119918498459e-06,
|
487 |
+
"loss": 0.2331,
|
488 |
+
"step": 39000
|
489 |
+
},
|
490 |
+
{
|
491 |
+
"epoch": 1.93,
|
492 |
+
"learning_rate": 8.585210173138797e-06,
|
493 |
+
"loss": 0.2393,
|
494 |
+
"step": 39500
|
495 |
+
},
|
496 |
+
{
|
497 |
+
"epoch": 1.95,
|
498 |
+
"learning_rate": 8.559221161293007e-06,
|
499 |
+
"loss": 0.2371,
|
500 |
+
"step": 40000
|
501 |
+
},
|
502 |
+
{
|
503 |
+
"epoch": 1.98,
|
504 |
+
"learning_rate": 8.533232149447214e-06,
|
505 |
+
"loss": 0.2291,
|
506 |
+
"step": 40500
|
507 |
+
},
|
508 |
+
{
|
509 |
+
"epoch": 2.0,
|
510 |
+
"eval_accuracy": 0.8982272914662636,
|
511 |
+
"eval_averaged_scores": 0.8808907930119556,
|
512 |
+
"eval_f1": 0.8635542945576477,
|
513 |
+
"eval_loss": 0.27606385946273804,
|
514 |
+
"eval_runtime": 36.4573,
|
515 |
+
"eval_samples_per_second": 998.018,
|
516 |
+
"eval_steps_per_second": 62.402,
|
517 |
+
"step": 40934
|
518 |
+
},
|
519 |
+
{
|
520 |
+
"epoch": 2.0,
|
521 |
+
"learning_rate": 8.507243137601424e-06,
|
522 |
+
"loss": 0.2293,
|
523 |
+
"step": 41000
|
524 |
+
},
|
525 |
+
{
|
526 |
+
"epoch": 2.03,
|
527 |
+
"learning_rate": 8.481254125755631e-06,
|
528 |
+
"loss": 0.1943,
|
529 |
+
"step": 41500
|
530 |
+
},
|
531 |
+
{
|
532 |
+
"epoch": 2.05,
|
533 |
+
"learning_rate": 8.455265113909839e-06,
|
534 |
+
"loss": 0.2058,
|
535 |
+
"step": 42000
|
536 |
+
},
|
537 |
+
{
|
538 |
+
"epoch": 2.08,
|
539 |
+
"learning_rate": 8.429276102064048e-06,
|
540 |
+
"loss": 0.2089,
|
541 |
+
"step": 42500
|
542 |
+
},
|
543 |
+
{
|
544 |
+
"epoch": 2.1,
|
545 |
+
"learning_rate": 8.403287090218256e-06,
|
546 |
+
"loss": 0.2043,
|
547 |
+
"step": 43000
|
548 |
+
},
|
549 |
+
{
|
550 |
+
"epoch": 2.13,
|
551 |
+
"learning_rate": 8.377298078372465e-06,
|
552 |
+
"loss": 0.2005,
|
553 |
+
"step": 43500
|
554 |
+
},
|
555 |
+
{
|
556 |
+
"epoch": 2.15,
|
557 |
+
"learning_rate": 8.351309066526673e-06,
|
558 |
+
"loss": 0.2061,
|
559 |
+
"step": 44000
|
560 |
+
},
|
561 |
+
{
|
562 |
+
"epoch": 2.17,
|
563 |
+
"learning_rate": 8.325320054680882e-06,
|
564 |
+
"loss": 0.2058,
|
565 |
+
"step": 44500
|
566 |
+
},
|
567 |
+
{
|
568 |
+
"epoch": 2.2,
|
569 |
+
"learning_rate": 8.29933104283509e-06,
|
570 |
+
"loss": 0.2114,
|
571 |
+
"step": 45000
|
572 |
+
},
|
573 |
+
{
|
574 |
+
"epoch": 2.22,
|
575 |
+
"learning_rate": 8.2733420309893e-06,
|
576 |
+
"loss": 0.2008,
|
577 |
+
"step": 45500
|
578 |
+
},
|
579 |
+
{
|
580 |
+
"epoch": 2.25,
|
581 |
+
"learning_rate": 8.247353019143507e-06,
|
582 |
+
"loss": 0.2198,
|
583 |
+
"step": 46000
|
584 |
+
},
|
585 |
+
{
|
586 |
+
"epoch": 2.27,
|
587 |
+
"learning_rate": 8.221364007297714e-06,
|
588 |
+
"loss": 0.2153,
|
589 |
+
"step": 46500
|
590 |
+
},
|
591 |
+
{
|
592 |
+
"epoch": 2.3,
|
593 |
+
"learning_rate": 8.195374995451924e-06,
|
594 |
+
"loss": 0.2116,
|
595 |
+
"step": 47000
|
596 |
+
},
|
597 |
+
{
|
598 |
+
"epoch": 2.32,
|
599 |
+
"learning_rate": 8.169385983606131e-06,
|
600 |
+
"loss": 0.1998,
|
601 |
+
"step": 47500
|
602 |
+
},
|
603 |
+
{
|
604 |
+
"epoch": 2.35,
|
605 |
+
"learning_rate": 8.14339697176034e-06,
|
606 |
+
"loss": 0.2122,
|
607 |
+
"step": 48000
|
608 |
+
},
|
609 |
+
{
|
610 |
+
"epoch": 2.37,
|
611 |
+
"learning_rate": 8.117407959914548e-06,
|
612 |
+
"loss": 0.2036,
|
613 |
+
"step": 48500
|
614 |
+
},
|
615 |
+
{
|
616 |
+
"epoch": 2.39,
|
617 |
+
"learning_rate": 8.091418948068758e-06,
|
618 |
+
"loss": 0.2065,
|
619 |
+
"step": 49000
|
620 |
+
},
|
621 |
+
{
|
622 |
+
"epoch": 2.42,
|
623 |
+
"learning_rate": 8.065429936222965e-06,
|
624 |
+
"loss": 0.2038,
|
625 |
+
"step": 49500
|
626 |
+
},
|
627 |
+
{
|
628 |
+
"epoch": 2.44,
|
629 |
+
"learning_rate": 8.039440924377175e-06,
|
630 |
+
"loss": 0.207,
|
631 |
+
"step": 50000
|
632 |
+
},
|
633 |
+
{
|
634 |
+
"epoch": 2.47,
|
635 |
+
"learning_rate": 8.013451912531382e-06,
|
636 |
+
"loss": 0.2046,
|
637 |
+
"step": 50500
|
638 |
+
},
|
639 |
+
{
|
640 |
+
"epoch": 2.49,
|
641 |
+
"learning_rate": 7.98746290068559e-06,
|
642 |
+
"loss": 0.2116,
|
643 |
+
"step": 51000
|
644 |
+
},
|
645 |
+
{
|
646 |
+
"epoch": 2.52,
|
647 |
+
"learning_rate": 7.9614738888398e-06,
|
648 |
+
"loss": 0.2072,
|
649 |
+
"step": 51500
|
650 |
+
},
|
651 |
+
{
|
652 |
+
"epoch": 2.54,
|
653 |
+
"learning_rate": 7.935484876994007e-06,
|
654 |
+
"loss": 0.2117,
|
655 |
+
"step": 52000
|
656 |
+
},
|
657 |
+
{
|
658 |
+
"epoch": 2.57,
|
659 |
+
"learning_rate": 7.909495865148216e-06,
|
660 |
+
"loss": 0.2197,
|
661 |
+
"step": 52500
|
662 |
+
},
|
663 |
+
{
|
664 |
+
"epoch": 2.59,
|
665 |
+
"learning_rate": 7.883506853302424e-06,
|
666 |
+
"loss": 0.2037,
|
667 |
+
"step": 53000
|
668 |
+
},
|
669 |
+
{
|
670 |
+
"epoch": 2.61,
|
671 |
+
"learning_rate": 7.857517841456633e-06,
|
672 |
+
"loss": 0.207,
|
673 |
+
"step": 53500
|
674 |
+
},
|
675 |
+
{
|
676 |
+
"epoch": 2.64,
|
677 |
+
"learning_rate": 7.831528829610841e-06,
|
678 |
+
"loss": 0.219,
|
679 |
+
"step": 54000
|
680 |
+
},
|
681 |
+
{
|
682 |
+
"epoch": 2.66,
|
683 |
+
"learning_rate": 7.80553981776505e-06,
|
684 |
+
"loss": 0.2082,
|
685 |
+
"step": 54500
|
686 |
+
},
|
687 |
+
{
|
688 |
+
"epoch": 2.69,
|
689 |
+
"learning_rate": 7.779550805919258e-06,
|
690 |
+
"loss": 0.201,
|
691 |
+
"step": 55000
|
692 |
+
},
|
693 |
+
{
|
694 |
+
"epoch": 2.71,
|
695 |
+
"learning_rate": 7.753561794073467e-06,
|
696 |
+
"loss": 0.2127,
|
697 |
+
"step": 55500
|
698 |
+
},
|
699 |
+
{
|
700 |
+
"epoch": 2.74,
|
701 |
+
"learning_rate": 7.727572782227675e-06,
|
702 |
+
"loss": 0.2165,
|
703 |
+
"step": 56000
|
704 |
+
},
|
705 |
+
{
|
706 |
+
"epoch": 2.76,
|
707 |
+
"learning_rate": 7.701583770381883e-06,
|
708 |
+
"loss": 0.203,
|
709 |
+
"step": 56500
|
710 |
+
},
|
711 |
+
{
|
712 |
+
"epoch": 2.78,
|
713 |
+
"learning_rate": 7.675594758536092e-06,
|
714 |
+
"loss": 0.2147,
|
715 |
+
"step": 57000
|
716 |
+
},
|
717 |
+
{
|
718 |
+
"epoch": 2.81,
|
719 |
+
"learning_rate": 7.6496057466903e-06,
|
720 |
+
"loss": 0.2006,
|
721 |
+
"step": 57500
|
722 |
+
},
|
723 |
+
{
|
724 |
+
"epoch": 2.83,
|
725 |
+
"learning_rate": 7.623616734844508e-06,
|
726 |
+
"loss": 0.2063,
|
727 |
+
"step": 58000
|
728 |
+
},
|
729 |
+
{
|
730 |
+
"epoch": 2.86,
|
731 |
+
"learning_rate": 7.597627722998717e-06,
|
732 |
+
"loss": 0.2046,
|
733 |
+
"step": 58500
|
734 |
+
},
|
735 |
+
{
|
736 |
+
"epoch": 2.88,
|
737 |
+
"learning_rate": 7.571638711152926e-06,
|
738 |
+
"loss": 0.1942,
|
739 |
+
"step": 59000
|
740 |
+
},
|
741 |
+
{
|
742 |
+
"epoch": 2.91,
|
743 |
+
"learning_rate": 7.5456496993071335e-06,
|
744 |
+
"loss": 0.1976,
|
745 |
+
"step": 59500
|
746 |
+
},
|
747 |
+
{
|
748 |
+
"epoch": 2.93,
|
749 |
+
"learning_rate": 7.519660687461342e-06,
|
750 |
+
"loss": 0.2047,
|
751 |
+
"step": 60000
|
752 |
+
},
|
753 |
+
{
|
754 |
+
"epoch": 2.96,
|
755 |
+
"learning_rate": 7.4936716756155505e-06,
|
756 |
+
"loss": 0.2047,
|
757 |
+
"step": 60500
|
758 |
+
},
|
759 |
+
{
|
760 |
+
"epoch": 2.98,
|
761 |
+
"learning_rate": 7.467682663769759e-06,
|
762 |
+
"loss": 0.2035,
|
763 |
+
"step": 61000
|
764 |
+
},
|
765 |
+
{
|
766 |
+
"epoch": 3.0,
|
767 |
+
"eval_accuracy": 0.9074618661536348,
|
768 |
+
"eval_averaged_scores": 0.8925016186560136,
|
769 |
+
"eval_f1": 0.8775413711583925,
|
770 |
+
"eval_loss": 0.2975204288959503,
|
771 |
+
"eval_runtime": 36.3008,
|
772 |
+
"eval_samples_per_second": 1002.319,
|
773 |
+
"eval_steps_per_second": 62.671,
|
774 |
+
"step": 61401
|
775 |
+
},
|
776 |
+
{
|
777 |
+
"epoch": 3.0,
|
778 |
+
"learning_rate": 7.441693651923967e-06,
|
779 |
+
"loss": 0.1955,
|
780 |
+
"step": 61500
|
781 |
+
},
|
782 |
+
{
|
783 |
+
"epoch": 3.03,
|
784 |
+
"learning_rate": 7.415704640078175e-06,
|
785 |
+
"loss": 0.1745,
|
786 |
+
"step": 62000
|
787 |
+
},
|
788 |
+
{
|
789 |
+
"epoch": 3.05,
|
790 |
+
"learning_rate": 7.389715628232384e-06,
|
791 |
+
"loss": 0.1855,
|
792 |
+
"step": 62500
|
793 |
+
},
|
794 |
+
{
|
795 |
+
"epoch": 3.08,
|
796 |
+
"learning_rate": 7.363726616386593e-06,
|
797 |
+
"loss": 0.1743,
|
798 |
+
"step": 63000
|
799 |
+
},
|
800 |
+
{
|
801 |
+
"epoch": 3.1,
|
802 |
+
"learning_rate": 7.337737604540801e-06,
|
803 |
+
"loss": 0.1829,
|
804 |
+
"step": 63500
|
805 |
+
},
|
806 |
+
{
|
807 |
+
"epoch": 3.13,
|
808 |
+
"learning_rate": 7.311748592695009e-06,
|
809 |
+
"loss": 0.1898,
|
810 |
+
"step": 64000
|
811 |
+
},
|
812 |
+
{
|
813 |
+
"epoch": 3.15,
|
814 |
+
"learning_rate": 7.2857595808492175e-06,
|
815 |
+
"loss": 0.1878,
|
816 |
+
"step": 64500
|
817 |
+
},
|
818 |
+
{
|
819 |
+
"epoch": 3.18,
|
820 |
+
"learning_rate": 7.259770569003426e-06,
|
821 |
+
"loss": 0.1886,
|
822 |
+
"step": 65000
|
823 |
+
},
|
824 |
+
{
|
825 |
+
"epoch": 3.2,
|
826 |
+
"learning_rate": 7.2337815571576345e-06,
|
827 |
+
"loss": 0.1941,
|
828 |
+
"step": 65500
|
829 |
+
},
|
830 |
+
{
|
831 |
+
"epoch": 3.22,
|
832 |
+
"learning_rate": 7.207792545311842e-06,
|
833 |
+
"loss": 0.1901,
|
834 |
+
"step": 66000
|
835 |
+
},
|
836 |
+
{
|
837 |
+
"epoch": 3.25,
|
838 |
+
"learning_rate": 7.181803533466051e-06,
|
839 |
+
"loss": 0.1813,
|
840 |
+
"step": 66500
|
841 |
+
},
|
842 |
+
{
|
843 |
+
"epoch": 3.27,
|
844 |
+
"learning_rate": 7.155814521620259e-06,
|
845 |
+
"loss": 0.1947,
|
846 |
+
"step": 67000
|
847 |
+
},
|
848 |
+
{
|
849 |
+
"epoch": 3.3,
|
850 |
+
"learning_rate": 7.1298255097744685e-06,
|
851 |
+
"loss": 0.1787,
|
852 |
+
"step": 67500
|
853 |
+
},
|
854 |
+
{
|
855 |
+
"epoch": 3.32,
|
856 |
+
"learning_rate": 7.103836497928677e-06,
|
857 |
+
"loss": 0.1927,
|
858 |
+
"step": 68000
|
859 |
+
},
|
860 |
+
{
|
861 |
+
"epoch": 3.35,
|
862 |
+
"learning_rate": 7.077847486082885e-06,
|
863 |
+
"loss": 0.1961,
|
864 |
+
"step": 68500
|
865 |
+
},
|
866 |
+
{
|
867 |
+
"epoch": 3.37,
|
868 |
+
"learning_rate": 7.051858474237093e-06,
|
869 |
+
"loss": 0.1785,
|
870 |
+
"step": 69000
|
871 |
+
},
|
872 |
+
{
|
873 |
+
"epoch": 3.4,
|
874 |
+
"learning_rate": 7.025869462391302e-06,
|
875 |
+
"loss": 0.1892,
|
876 |
+
"step": 69500
|
877 |
+
},
|
878 |
+
{
|
879 |
+
"epoch": 3.42,
|
880 |
+
"learning_rate": 6.99988045054551e-06,
|
881 |
+
"loss": 0.1853,
|
882 |
+
"step": 70000
|
883 |
+
},
|
884 |
+
{
|
885 |
+
"epoch": 3.44,
|
886 |
+
"learning_rate": 6.973891438699718e-06,
|
887 |
+
"loss": 0.1892,
|
888 |
+
"step": 70500
|
889 |
+
},
|
890 |
+
{
|
891 |
+
"epoch": 3.47,
|
892 |
+
"learning_rate": 6.947902426853926e-06,
|
893 |
+
"loss": 0.1908,
|
894 |
+
"step": 71000
|
895 |
+
},
|
896 |
+
{
|
897 |
+
"epoch": 3.49,
|
898 |
+
"learning_rate": 6.921913415008135e-06,
|
899 |
+
"loss": 0.1859,
|
900 |
+
"step": 71500
|
901 |
+
},
|
902 |
+
{
|
903 |
+
"epoch": 3.52,
|
904 |
+
"learning_rate": 6.895924403162344e-06,
|
905 |
+
"loss": 0.1855,
|
906 |
+
"step": 72000
|
907 |
+
},
|
908 |
+
{
|
909 |
+
"epoch": 3.54,
|
910 |
+
"learning_rate": 6.8699353913165525e-06,
|
911 |
+
"loss": 0.1763,
|
912 |
+
"step": 72500
|
913 |
+
},
|
914 |
+
{
|
915 |
+
"epoch": 3.57,
|
916 |
+
"learning_rate": 6.84394637947076e-06,
|
917 |
+
"loss": 0.1886,
|
918 |
+
"step": 73000
|
919 |
+
},
|
920 |
+
{
|
921 |
+
"epoch": 3.59,
|
922 |
+
"learning_rate": 6.817957367624969e-06,
|
923 |
+
"loss": 0.1935,
|
924 |
+
"step": 73500
|
925 |
+
},
|
926 |
+
{
|
927 |
+
"epoch": 3.62,
|
928 |
+
"learning_rate": 6.791968355779177e-06,
|
929 |
+
"loss": 0.1889,
|
930 |
+
"step": 74000
|
931 |
+
},
|
932 |
+
{
|
933 |
+
"epoch": 3.64,
|
934 |
+
"learning_rate": 6.765979343933386e-06,
|
935 |
+
"loss": 0.1837,
|
936 |
+
"step": 74500
|
937 |
+
},
|
938 |
+
{
|
939 |
+
"epoch": 3.66,
|
940 |
+
"learning_rate": 6.739990332087593e-06,
|
941 |
+
"loss": 0.1782,
|
942 |
+
"step": 75000
|
943 |
+
},
|
944 |
+
{
|
945 |
+
"epoch": 3.69,
|
946 |
+
"learning_rate": 6.714001320241802e-06,
|
947 |
+
"loss": 0.1738,
|
948 |
+
"step": 75500
|
949 |
+
},
|
950 |
+
{
|
951 |
+
"epoch": 3.71,
|
952 |
+
"learning_rate": 6.68801230839601e-06,
|
953 |
+
"loss": 0.1833,
|
954 |
+
"step": 76000
|
955 |
+
},
|
956 |
+
{
|
957 |
+
"epoch": 3.74,
|
958 |
+
"learning_rate": 6.66202329655022e-06,
|
959 |
+
"loss": 0.1826,
|
960 |
+
"step": 76500
|
961 |
+
},
|
962 |
+
{
|
963 |
+
"epoch": 3.76,
|
964 |
+
"learning_rate": 6.636034284704428e-06,
|
965 |
+
"loss": 0.1856,
|
966 |
+
"step": 77000
|
967 |
+
},
|
968 |
+
{
|
969 |
+
"epoch": 3.79,
|
970 |
+
"learning_rate": 6.610045272858637e-06,
|
971 |
+
"loss": 0.1843,
|
972 |
+
"step": 77500
|
973 |
+
},
|
974 |
+
{
|
975 |
+
"epoch": 3.81,
|
976 |
+
"learning_rate": 6.584056261012844e-06,
|
977 |
+
"loss": 0.1948,
|
978 |
+
"step": 78000
|
979 |
+
},
|
980 |
+
{
|
981 |
+
"epoch": 3.84,
|
982 |
+
"learning_rate": 6.558067249167053e-06,
|
983 |
+
"loss": 0.1764,
|
984 |
+
"step": 78500
|
985 |
+
},
|
986 |
+
{
|
987 |
+
"epoch": 3.86,
|
988 |
+
"learning_rate": 6.532078237321261e-06,
|
989 |
+
"loss": 0.179,
|
990 |
+
"step": 79000
|
991 |
+
},
|
992 |
+
{
|
993 |
+
"epoch": 3.88,
|
994 |
+
"learning_rate": 6.50608922547547e-06,
|
995 |
+
"loss": 0.1906,
|
996 |
+
"step": 79500
|
997 |
+
},
|
998 |
+
{
|
999 |
+
"epoch": 3.91,
|
1000 |
+
"learning_rate": 6.480100213629677e-06,
|
1001 |
+
"loss": 0.1853,
|
1002 |
+
"step": 80000
|
1003 |
+
},
|
1004 |
+
{
|
1005 |
+
"epoch": 3.93,
|
1006 |
+
"learning_rate": 6.454111201783886e-06,
|
1007 |
+
"loss": 0.1918,
|
1008 |
+
"step": 80500
|
1009 |
+
},
|
1010 |
+
{
|
1011 |
+
"epoch": 3.96,
|
1012 |
+
"learning_rate": 6.428122189938095e-06,
|
1013 |
+
"loss": 0.1769,
|
1014 |
+
"step": 81000
|
1015 |
+
},
|
1016 |
+
{
|
1017 |
+
"epoch": 3.98,
|
1018 |
+
"learning_rate": 6.402133178092304e-06,
|
1019 |
+
"loss": 0.1969,
|
1020 |
+
"step": 81500
|
1021 |
+
},
|
1022 |
+
{
|
1023 |
+
"epoch": 4.0,
|
1024 |
+
"eval_accuracy": 0.9094407035866429,
|
1025 |
+
"eval_averaged_scores": 0.8949438560203089,
|
1026 |
+
"eval_f1": 0.8804470084539749,
|
1027 |
+
"eval_loss": 0.3116997480392456,
|
1028 |
+
"eval_runtime": 36.2351,
|
1029 |
+
"eval_samples_per_second": 1004.137,
|
1030 |
+
"eval_steps_per_second": 62.784,
|
1031 |
+
"step": 81868
|
1032 |
+
},
|
1033 |
+
{
|
1034 |
+
"epoch": 4.01,
|
1035 |
+
"learning_rate": 6.376144166246512e-06,
|
1036 |
+
"loss": 0.1737,
|
1037 |
+
"step": 82000
|
1038 |
+
},
|
1039 |
+
{
|
1040 |
+
"epoch": 4.03,
|
1041 |
+
"learning_rate": 6.35015515440072e-06,
|
1042 |
+
"loss": 0.1467,
|
1043 |
+
"step": 82500
|
1044 |
+
},
|
1045 |
+
{
|
1046 |
+
"epoch": 4.06,
|
1047 |
+
"learning_rate": 6.324166142554928e-06,
|
1048 |
+
"loss": 0.1557,
|
1049 |
+
"step": 83000
|
1050 |
+
},
|
1051 |
+
{
|
1052 |
+
"epoch": 4.08,
|
1053 |
+
"learning_rate": 6.298177130709137e-06,
|
1054 |
+
"loss": 0.1533,
|
1055 |
+
"step": 83500
|
1056 |
+
},
|
1057 |
+
{
|
1058 |
+
"epoch": 4.1,
|
1059 |
+
"learning_rate": 6.272188118863345e-06,
|
1060 |
+
"loss": 0.1573,
|
1061 |
+
"step": 84000
|
1062 |
+
},
|
1063 |
+
{
|
1064 |
+
"epoch": 4.13,
|
1065 |
+
"learning_rate": 6.246199107017553e-06,
|
1066 |
+
"loss": 0.1602,
|
1067 |
+
"step": 84500
|
1068 |
+
},
|
1069 |
+
{
|
1070 |
+
"epoch": 4.15,
|
1071 |
+
"learning_rate": 6.220210095171761e-06,
|
1072 |
+
"loss": 0.1598,
|
1073 |
+
"step": 85000
|
1074 |
+
},
|
1075 |
+
{
|
1076 |
+
"epoch": 4.18,
|
1077 |
+
"learning_rate": 6.194221083325971e-06,
|
1078 |
+
"loss": 0.162,
|
1079 |
+
"step": 85500
|
1080 |
+
},
|
1081 |
+
{
|
1082 |
+
"epoch": 4.2,
|
1083 |
+
"learning_rate": 6.168232071480179e-06,
|
1084 |
+
"loss": 0.1668,
|
1085 |
+
"step": 86000
|
1086 |
+
},
|
1087 |
+
{
|
1088 |
+
"epoch": 4.23,
|
1089 |
+
"learning_rate": 6.142243059634388e-06,
|
1090 |
+
"loss": 0.1673,
|
1091 |
+
"step": 86500
|
1092 |
+
},
|
1093 |
+
{
|
1094 |
+
"epoch": 4.25,
|
1095 |
+
"learning_rate": 6.116254047788595e-06,
|
1096 |
+
"loss": 0.159,
|
1097 |
+
"step": 87000
|
1098 |
+
},
|
1099 |
+
{
|
1100 |
+
"epoch": 4.28,
|
1101 |
+
"learning_rate": 6.090265035942804e-06,
|
1102 |
+
"loss": 0.1615,
|
1103 |
+
"step": 87500
|
1104 |
+
},
|
1105 |
+
{
|
1106 |
+
"epoch": 4.3,
|
1107 |
+
"learning_rate": 6.064276024097012e-06,
|
1108 |
+
"loss": 0.1742,
|
1109 |
+
"step": 88000
|
1110 |
+
},
|
1111 |
+
{
|
1112 |
+
"epoch": 4.32,
|
1113 |
+
"learning_rate": 6.038287012251221e-06,
|
1114 |
+
"loss": 0.1663,
|
1115 |
+
"step": 88500
|
1116 |
+
},
|
1117 |
+
{
|
1118 |
+
"epoch": 4.35,
|
1119 |
+
"learning_rate": 6.0122980004054285e-06,
|
1120 |
+
"loss": 0.1699,
|
1121 |
+
"step": 89000
|
1122 |
+
},
|
1123 |
+
{
|
1124 |
+
"epoch": 4.37,
|
1125 |
+
"learning_rate": 5.986308988559637e-06,
|
1126 |
+
"loss": 0.1625,
|
1127 |
+
"step": 89500
|
1128 |
+
},
|
1129 |
+
{
|
1130 |
+
"epoch": 4.4,
|
1131 |
+
"learning_rate": 5.9603199767138455e-06,
|
1132 |
+
"loss": 0.158,
|
1133 |
+
"step": 90000
|
1134 |
+
},
|
1135 |
+
{
|
1136 |
+
"epoch": 4.42,
|
1137 |
+
"learning_rate": 5.934330964868055e-06,
|
1138 |
+
"loss": 0.1636,
|
1139 |
+
"step": 90500
|
1140 |
+
},
|
1141 |
+
{
|
1142 |
+
"epoch": 4.45,
|
1143 |
+
"learning_rate": 5.908341953022263e-06,
|
1144 |
+
"loss": 0.1568,
|
1145 |
+
"step": 91000
|
1146 |
+
},
|
1147 |
+
{
|
1148 |
+
"epoch": 4.47,
|
1149 |
+
"learning_rate": 5.882352941176471e-06,
|
1150 |
+
"loss": 0.165,
|
1151 |
+
"step": 91500
|
1152 |
+
},
|
1153 |
+
{
|
1154 |
+
"epoch": 4.5,
|
1155 |
+
"learning_rate": 5.856363929330679e-06,
|
1156 |
+
"loss": 0.1687,
|
1157 |
+
"step": 92000
|
1158 |
+
},
|
1159 |
+
{
|
1160 |
+
"epoch": 4.52,
|
1161 |
+
"learning_rate": 5.830374917484888e-06,
|
1162 |
+
"loss": 0.1595,
|
1163 |
+
"step": 92500
|
1164 |
+
},
|
1165 |
+
{
|
1166 |
+
"epoch": 4.54,
|
1167 |
+
"learning_rate": 5.804385905639096e-06,
|
1168 |
+
"loss": 0.1769,
|
1169 |
+
"step": 93000
|
1170 |
+
},
|
1171 |
+
{
|
1172 |
+
"epoch": 4.57,
|
1173 |
+
"learning_rate": 5.778396893793304e-06,
|
1174 |
+
"loss": 0.1567,
|
1175 |
+
"step": 93500
|
1176 |
+
},
|
1177 |
+
{
|
1178 |
+
"epoch": 4.59,
|
1179 |
+
"learning_rate": 5.7524078819475125e-06,
|
1180 |
+
"loss": 0.1724,
|
1181 |
+
"step": 94000
|
1182 |
+
},
|
1183 |
+
{
|
1184 |
+
"epoch": 4.62,
|
1185 |
+
"learning_rate": 5.726418870101721e-06,
|
1186 |
+
"loss": 0.1745,
|
1187 |
+
"step": 94500
|
1188 |
+
},
|
1189 |
+
{
|
1190 |
+
"epoch": 4.64,
|
1191 |
+
"learning_rate": 5.70042985825593e-06,
|
1192 |
+
"loss": 0.167,
|
1193 |
+
"step": 95000
|
1194 |
+
},
|
1195 |
+
{
|
1196 |
+
"epoch": 4.67,
|
1197 |
+
"learning_rate": 5.674440846410139e-06,
|
1198 |
+
"loss": 0.1632,
|
1199 |
+
"step": 95500
|
1200 |
+
},
|
1201 |
+
{
|
1202 |
+
"epoch": 4.69,
|
1203 |
+
"learning_rate": 5.6484518345643465e-06,
|
1204 |
+
"loss": 0.1737,
|
1205 |
+
"step": 96000
|
1206 |
+
},
|
1207 |
+
{
|
1208 |
+
"epoch": 4.71,
|
1209 |
+
"learning_rate": 5.622462822718555e-06,
|
1210 |
+
"loss": 0.1625,
|
1211 |
+
"step": 96500
|
1212 |
+
},
|
1213 |
+
{
|
1214 |
+
"epoch": 4.74,
|
1215 |
+
"learning_rate": 5.5964738108727635e-06,
|
1216 |
+
"loss": 0.1582,
|
1217 |
+
"step": 97000
|
1218 |
+
},
|
1219 |
+
{
|
1220 |
+
"epoch": 4.76,
|
1221 |
+
"learning_rate": 5.570484799026972e-06,
|
1222 |
+
"loss": 0.1691,
|
1223 |
+
"step": 97500
|
1224 |
+
},
|
1225 |
+
{
|
1226 |
+
"epoch": 4.79,
|
1227 |
+
"learning_rate": 5.54449578718118e-06,
|
1228 |
+
"loss": 0.1652,
|
1229 |
+
"step": 98000
|
1230 |
+
},
|
1231 |
+
{
|
1232 |
+
"epoch": 4.81,
|
1233 |
+
"learning_rate": 5.518506775335388e-06,
|
1234 |
+
"loss": 0.1813,
|
1235 |
+
"step": 98500
|
1236 |
+
},
|
1237 |
+
{
|
1238 |
+
"epoch": 4.84,
|
1239 |
+
"learning_rate": 5.4925177634895966e-06,
|
1240 |
+
"loss": 0.1738,
|
1241 |
+
"step": 99000
|
1242 |
+
},
|
1243 |
+
{
|
1244 |
+
"epoch": 4.86,
|
1245 |
+
"learning_rate": 5.466528751643806e-06,
|
1246 |
+
"loss": 0.1717,
|
1247 |
+
"step": 99500
|
1248 |
+
},
|
1249 |
+
{
|
1250 |
+
"epoch": 4.89,
|
1251 |
+
"learning_rate": 5.440539739798014e-06,
|
1252 |
+
"loss": 0.1623,
|
1253 |
+
"step": 100000
|
1254 |
+
},
|
1255 |
+
{
|
1256 |
+
"epoch": 4.91,
|
1257 |
+
"learning_rate": 5.414550727952223e-06,
|
1258 |
+
"loss": 0.1771,
|
1259 |
+
"step": 100500
|
1260 |
+
},
|
1261 |
+
{
|
1262 |
+
"epoch": 4.93,
|
1263 |
+
"learning_rate": 5.3885617161064305e-06,
|
1264 |
+
"loss": 0.157,
|
1265 |
+
"step": 101000
|
1266 |
+
},
|
1267 |
+
{
|
1268 |
+
"epoch": 4.96,
|
1269 |
+
"learning_rate": 5.362572704260639e-06,
|
1270 |
+
"loss": 0.1654,
|
1271 |
+
"step": 101500
|
1272 |
+
},
|
1273 |
+
{
|
1274 |
+
"epoch": 4.98,
|
1275 |
+
"learning_rate": 5.3365836924148475e-06,
|
1276 |
+
"loss": 0.165,
|
1277 |
+
"step": 102000
|
1278 |
+
},
|
1279 |
+
{
|
1280 |
+
"epoch": 5.0,
|
1281 |
+
"eval_accuracy": 0.9078741239521781,
|
1282 |
+
"eval_averaged_scores": 0.893031472907116,
|
1283 |
+
"eval_f1": 0.8781888218620539,
|
1284 |
+
"eval_loss": 0.3768274188041687,
|
1285 |
+
"eval_runtime": 36.2704,
|
1286 |
+
"eval_samples_per_second": 1003.16,
|
1287 |
+
"eval_steps_per_second": 62.723,
|
1288 |
+
"step": 102335
|
1289 |
+
},
|
1290 |
+
{
|
1291 |
+
"epoch": 5.01,
|
1292 |
+
"learning_rate": 5.310594680569056e-06,
|
1293 |
+
"loss": 0.1649,
|
1294 |
+
"step": 102500
|
1295 |
+
},
|
1296 |
+
{
|
1297 |
+
"epoch": 5.03,
|
1298 |
+
"learning_rate": 5.284605668723264e-06,
|
1299 |
+
"loss": 0.1318,
|
1300 |
+
"step": 103000
|
1301 |
+
},
|
1302 |
+
{
|
1303 |
+
"epoch": 5.06,
|
1304 |
+
"learning_rate": 5.258616656877472e-06,
|
1305 |
+
"loss": 0.1436,
|
1306 |
+
"step": 103500
|
1307 |
+
},
|
1308 |
+
{
|
1309 |
+
"epoch": 5.08,
|
1310 |
+
"learning_rate": 5.2326276450316815e-06,
|
1311 |
+
"loss": 0.1509,
|
1312 |
+
"step": 104000
|
1313 |
+
},
|
1314 |
+
{
|
1315 |
+
"epoch": 5.11,
|
1316 |
+
"learning_rate": 5.20663863318589e-06,
|
1317 |
+
"loss": 0.1363,
|
1318 |
+
"step": 104500
|
1319 |
+
},
|
1320 |
+
{
|
1321 |
+
"epoch": 5.13,
|
1322 |
+
"learning_rate": 5.1806496213400985e-06,
|
1323 |
+
"loss": 0.1321,
|
1324 |
+
"step": 105000
|
1325 |
+
},
|
1326 |
+
{
|
1327 |
+
"epoch": 5.15,
|
1328 |
+
"learning_rate": 5.154660609494306e-06,
|
1329 |
+
"loss": 0.1412,
|
1330 |
+
"step": 105500
|
1331 |
+
},
|
1332 |
+
{
|
1333 |
+
"epoch": 5.18,
|
1334 |
+
"learning_rate": 5.128671597648515e-06,
|
1335 |
+
"loss": 0.1347,
|
1336 |
+
"step": 106000
|
1337 |
+
},
|
1338 |
+
{
|
1339 |
+
"epoch": 5.2,
|
1340 |
+
"learning_rate": 5.102682585802723e-06,
|
1341 |
+
"loss": 0.1455,
|
1342 |
+
"step": 106500
|
1343 |
+
},
|
1344 |
+
{
|
1345 |
+
"epoch": 5.23,
|
1346 |
+
"learning_rate": 5.0766935739569316e-06,
|
1347 |
+
"loss": 0.1356,
|
1348 |
+
"step": 107000
|
1349 |
+
},
|
1350 |
+
{
|
1351 |
+
"epoch": 5.25,
|
1352 |
+
"learning_rate": 5.050704562111139e-06,
|
1353 |
+
"loss": 0.1446,
|
1354 |
+
"step": 107500
|
1355 |
+
},
|
1356 |
+
{
|
1357 |
+
"epoch": 5.28,
|
1358 |
+
"learning_rate": 5.024715550265348e-06,
|
1359 |
+
"loss": 0.1403,
|
1360 |
+
"step": 108000
|
1361 |
+
},
|
1362 |
+
{
|
1363 |
+
"epoch": 5.3,
|
1364 |
+
"learning_rate": 4.998726538419556e-06,
|
1365 |
+
"loss": 0.1339,
|
1366 |
+
"step": 108500
|
1367 |
+
},
|
1368 |
+
{
|
1369 |
+
"epoch": 5.33,
|
1370 |
+
"learning_rate": 4.972737526573765e-06,
|
1371 |
+
"loss": 0.1342,
|
1372 |
+
"step": 109000
|
1373 |
+
},
|
1374 |
+
{
|
1375 |
+
"epoch": 5.35,
|
1376 |
+
"learning_rate": 4.946748514727974e-06,
|
1377 |
+
"loss": 0.1281,
|
1378 |
+
"step": 109500
|
1379 |
+
},
|
1380 |
+
{
|
1381 |
+
"epoch": 5.37,
|
1382 |
+
"learning_rate": 4.920759502882182e-06,
|
1383 |
+
"loss": 0.1373,
|
1384 |
+
"step": 110000
|
1385 |
+
},
|
1386 |
+
{
|
1387 |
+
"epoch": 5.4,
|
1388 |
+
"learning_rate": 4.89477049103639e-06,
|
1389 |
+
"loss": 0.1349,
|
1390 |
+
"step": 110500
|
1391 |
+
},
|
1392 |
+
{
|
1393 |
+
"epoch": 5.42,
|
1394 |
+
"learning_rate": 4.868781479190599e-06,
|
1395 |
+
"loss": 0.1572,
|
1396 |
+
"step": 111000
|
1397 |
+
},
|
1398 |
+
{
|
1399 |
+
"epoch": 5.45,
|
1400 |
+
"learning_rate": 4.842792467344807e-06,
|
1401 |
+
"loss": 0.1316,
|
1402 |
+
"step": 111500
|
1403 |
+
},
|
1404 |
+
{
|
1405 |
+
"epoch": 5.47,
|
1406 |
+
"learning_rate": 4.816803455499016e-06,
|
1407 |
+
"loss": 0.1287,
|
1408 |
+
"step": 112000
|
1409 |
+
},
|
1410 |
+
{
|
1411 |
+
"epoch": 5.5,
|
1412 |
+
"learning_rate": 4.790814443653224e-06,
|
1413 |
+
"loss": 0.1416,
|
1414 |
+
"step": 112500
|
1415 |
+
},
|
1416 |
+
{
|
1417 |
+
"epoch": 5.52,
|
1418 |
+
"learning_rate": 4.764825431807433e-06,
|
1419 |
+
"loss": 0.1419,
|
1420 |
+
"step": 113000
|
1421 |
+
},
|
1422 |
+
{
|
1423 |
+
"epoch": 5.55,
|
1424 |
+
"learning_rate": 4.73883641996164e-06,
|
1425 |
+
"loss": 0.1397,
|
1426 |
+
"step": 113500
|
1427 |
+
},
|
1428 |
+
{
|
1429 |
+
"epoch": 5.57,
|
1430 |
+
"learning_rate": 4.712847408115849e-06,
|
1431 |
+
"loss": 0.1466,
|
1432 |
+
"step": 114000
|
1433 |
+
},
|
1434 |
+
{
|
1435 |
+
"epoch": 5.59,
|
1436 |
+
"learning_rate": 4.686858396270057e-06,
|
1437 |
+
"loss": 0.143,
|
1438 |
+
"step": 114500
|
1439 |
+
},
|
1440 |
+
{
|
1441 |
+
"epoch": 5.62,
|
1442 |
+
"learning_rate": 4.660869384424266e-06,
|
1443 |
+
"loss": 0.1428,
|
1444 |
+
"step": 115000
|
1445 |
+
},
|
1446 |
+
{
|
1447 |
+
"epoch": 5.64,
|
1448 |
+
"learning_rate": 4.634880372578474e-06,
|
1449 |
+
"loss": 0.1413,
|
1450 |
+
"step": 115500
|
1451 |
+
},
|
1452 |
+
{
|
1453 |
+
"epoch": 5.67,
|
1454 |
+
"learning_rate": 4.608891360732683e-06,
|
1455 |
+
"loss": 0.1479,
|
1456 |
+
"step": 116000
|
1457 |
+
},
|
1458 |
+
{
|
1459 |
+
"epoch": 5.69,
|
1460 |
+
"learning_rate": 4.582902348886891e-06,
|
1461 |
+
"loss": 0.1454,
|
1462 |
+
"step": 116500
|
1463 |
+
},
|
1464 |
+
{
|
1465 |
+
"epoch": 5.72,
|
1466 |
+
"learning_rate": 4.5569133370411e-06,
|
1467 |
+
"loss": 0.1332,
|
1468 |
+
"step": 117000
|
1469 |
+
},
|
1470 |
+
{
|
1471 |
+
"epoch": 5.74,
|
1472 |
+
"learning_rate": 4.530924325195308e-06,
|
1473 |
+
"loss": 0.1473,
|
1474 |
+
"step": 117500
|
1475 |
+
},
|
1476 |
+
{
|
1477 |
+
"epoch": 5.77,
|
1478 |
+
"learning_rate": 4.504935313349516e-06,
|
1479 |
+
"loss": 0.1353,
|
1480 |
+
"step": 118000
|
1481 |
+
},
|
1482 |
+
{
|
1483 |
+
"epoch": 5.79,
|
1484 |
+
"learning_rate": 4.478946301503724e-06,
|
1485 |
+
"loss": 0.1557,
|
1486 |
+
"step": 118500
|
1487 |
+
},
|
1488 |
+
{
|
1489 |
+
"epoch": 5.81,
|
1490 |
+
"learning_rate": 4.452957289657933e-06,
|
1491 |
+
"loss": 0.1452,
|
1492 |
+
"step": 119000
|
1493 |
+
},
|
1494 |
+
{
|
1495 |
+
"epoch": 5.84,
|
1496 |
+
"learning_rate": 4.426968277812141e-06,
|
1497 |
+
"loss": 0.1469,
|
1498 |
+
"step": 119500
|
1499 |
+
},
|
1500 |
+
{
|
1501 |
+
"epoch": 5.86,
|
1502 |
+
"learning_rate": 4.40097926596635e-06,
|
1503 |
+
"loss": 0.1394,
|
1504 |
+
"step": 120000
|
1505 |
+
},
|
1506 |
+
{
|
1507 |
+
"epoch": 5.89,
|
1508 |
+
"learning_rate": 4.374990254120558e-06,
|
1509 |
+
"loss": 0.1525,
|
1510 |
+
"step": 120500
|
1511 |
+
},
|
1512 |
+
{
|
1513 |
+
"epoch": 5.91,
|
1514 |
+
"learning_rate": 4.349001242274767e-06,
|
1515 |
+
"loss": 0.1448,
|
1516 |
+
"step": 121000
|
1517 |
+
},
|
1518 |
+
{
|
1519 |
+
"epoch": 5.94,
|
1520 |
+
"learning_rate": 4.323012230428975e-06,
|
1521 |
+
"loss": 0.146,
|
1522 |
+
"step": 121500
|
1523 |
+
},
|
1524 |
+
{
|
1525 |
+
"epoch": 5.96,
|
1526 |
+
"learning_rate": 4.297023218583184e-06,
|
1527 |
+
"loss": 0.1365,
|
1528 |
+
"step": 122000
|
1529 |
+
},
|
1530 |
+
{
|
1531 |
+
"epoch": 5.99,
|
1532 |
+
"learning_rate": 4.271034206737391e-06,
|
1533 |
+
"loss": 0.1467,
|
1534 |
+
"step": 122500
|
1535 |
+
},
|
1536 |
+
{
|
1537 |
+
"epoch": 6.0,
|
1538 |
+
"eval_accuracy": 0.9117768311117218,
|
1539 |
+
"eval_averaged_scores": 0.8970713531108301,
|
1540 |
+
"eval_f1": 0.8823658751099384,
|
1541 |
+
"eval_loss": 0.44876691699028015,
|
1542 |
+
"eval_runtime": 36.3498,
|
1543 |
+
"eval_samples_per_second": 1000.969,
|
1544 |
+
"eval_steps_per_second": 62.586,
|
1545 |
+
"step": 122802
|
1546 |
+
},
|
1547 |
+
{
|
1548 |
+
"epoch": 6.01,
|
1549 |
+
"learning_rate": 4.2450451948916e-06,
|
1550 |
+
"loss": 0.1278,
|
1551 |
+
"step": 123000
|
1552 |
+
},
|
1553 |
+
{
|
1554 |
+
"epoch": 6.03,
|
1555 |
+
"learning_rate": 4.219056183045809e-06,
|
1556 |
+
"loss": 0.1097,
|
1557 |
+
"step": 123500
|
1558 |
+
},
|
1559 |
+
{
|
1560 |
+
"epoch": 6.06,
|
1561 |
+
"learning_rate": 4.193067171200017e-06,
|
1562 |
+
"loss": 0.118,
|
1563 |
+
"step": 124000
|
1564 |
+
},
|
1565 |
+
{
|
1566 |
+
"epoch": 6.08,
|
1567 |
+
"learning_rate": 4.167078159354225e-06,
|
1568 |
+
"loss": 0.1108,
|
1569 |
+
"step": 124500
|
1570 |
+
},
|
1571 |
+
{
|
1572 |
+
"epoch": 6.11,
|
1573 |
+
"learning_rate": 4.141089147508434e-06,
|
1574 |
+
"loss": 0.1124,
|
1575 |
+
"step": 125000
|
1576 |
+
},
|
1577 |
+
{
|
1578 |
+
"epoch": 6.13,
|
1579 |
+
"learning_rate": 4.115100135662642e-06,
|
1580 |
+
"loss": 0.1161,
|
1581 |
+
"step": 125500
|
1582 |
+
},
|
1583 |
+
{
|
1584 |
+
"epoch": 6.16,
|
1585 |
+
"learning_rate": 4.089111123816851e-06,
|
1586 |
+
"loss": 0.1159,
|
1587 |
+
"step": 126000
|
1588 |
+
},
|
1589 |
+
{
|
1590 |
+
"epoch": 6.18,
|
1591 |
+
"learning_rate": 4.063122111971059e-06,
|
1592 |
+
"loss": 0.1152,
|
1593 |
+
"step": 126500
|
1594 |
+
},
|
1595 |
+
{
|
1596 |
+
"epoch": 6.21,
|
1597 |
+
"learning_rate": 4.037133100125267e-06,
|
1598 |
+
"loss": 0.1125,
|
1599 |
+
"step": 127000
|
1600 |
+
},
|
1601 |
+
{
|
1602 |
+
"epoch": 6.23,
|
1603 |
+
"learning_rate": 4.011144088279475e-06,
|
1604 |
+
"loss": 0.1126,
|
1605 |
+
"step": 127500
|
1606 |
+
},
|
1607 |
+
{
|
1608 |
+
"epoch": 6.25,
|
1609 |
+
"learning_rate": 3.985155076433685e-06,
|
1610 |
+
"loss": 0.1233,
|
1611 |
+
"step": 128000
|
1612 |
+
},
|
1613 |
+
{
|
1614 |
+
"epoch": 6.28,
|
1615 |
+
"learning_rate": 3.959166064587892e-06,
|
1616 |
+
"loss": 0.1183,
|
1617 |
+
"step": 128500
|
1618 |
+
},
|
1619 |
+
{
|
1620 |
+
"epoch": 6.3,
|
1621 |
+
"learning_rate": 3.933177052742101e-06,
|
1622 |
+
"loss": 0.1207,
|
1623 |
+
"step": 129000
|
1624 |
+
},
|
1625 |
+
{
|
1626 |
+
"epoch": 6.33,
|
1627 |
+
"learning_rate": 3.907188040896309e-06,
|
1628 |
+
"loss": 0.1114,
|
1629 |
+
"step": 129500
|
1630 |
+
},
|
1631 |
+
{
|
1632 |
+
"epoch": 6.35,
|
1633 |
+
"learning_rate": 3.881199029050518e-06,
|
1634 |
+
"loss": 0.1164,
|
1635 |
+
"step": 130000
|
1636 |
+
},
|
1637 |
+
{
|
1638 |
+
"epoch": 6.38,
|
1639 |
+
"learning_rate": 3.855210017204726e-06,
|
1640 |
+
"loss": 0.1179,
|
1641 |
+
"step": 130500
|
1642 |
+
},
|
1643 |
+
{
|
1644 |
+
"epoch": 6.4,
|
1645 |
+
"learning_rate": 3.829221005358935e-06,
|
1646 |
+
"loss": 0.1115,
|
1647 |
+
"step": 131000
|
1648 |
+
},
|
1649 |
+
{
|
1650 |
+
"epoch": 6.42,
|
1651 |
+
"learning_rate": 3.803231993513143e-06,
|
1652 |
+
"loss": 0.1124,
|
1653 |
+
"step": 131500
|
1654 |
+
},
|
1655 |
+
{
|
1656 |
+
"epoch": 6.45,
|
1657 |
+
"learning_rate": 3.777242981667351e-06,
|
1658 |
+
"loss": 0.1228,
|
1659 |
+
"step": 132000
|
1660 |
+
},
|
1661 |
+
{
|
1662 |
+
"epoch": 6.47,
|
1663 |
+
"learning_rate": 3.75125396982156e-06,
|
1664 |
+
"loss": 0.1314,
|
1665 |
+
"step": 132500
|
1666 |
+
},
|
1667 |
+
{
|
1668 |
+
"epoch": 6.5,
|
1669 |
+
"learning_rate": 3.7252649579757684e-06,
|
1670 |
+
"loss": 0.1076,
|
1671 |
+
"step": 133000
|
1672 |
+
},
|
1673 |
+
{
|
1674 |
+
"epoch": 6.52,
|
1675 |
+
"learning_rate": 3.6992759461299764e-06,
|
1676 |
+
"loss": 0.1189,
|
1677 |
+
"step": 133500
|
1678 |
+
},
|
1679 |
+
{
|
1680 |
+
"epoch": 6.55,
|
1681 |
+
"learning_rate": 3.673286934284185e-06,
|
1682 |
+
"loss": 0.1234,
|
1683 |
+
"step": 134000
|
1684 |
+
},
|
1685 |
+
{
|
1686 |
+
"epoch": 6.57,
|
1687 |
+
"learning_rate": 3.647297922438393e-06,
|
1688 |
+
"loss": 0.1199,
|
1689 |
+
"step": 134500
|
1690 |
+
},
|
1691 |
+
{
|
1692 |
+
"epoch": 6.6,
|
1693 |
+
"learning_rate": 3.621308910592602e-06,
|
1694 |
+
"loss": 0.1295,
|
1695 |
+
"step": 135000
|
1696 |
+
},
|
1697 |
+
{
|
1698 |
+
"epoch": 6.62,
|
1699 |
+
"learning_rate": 3.5953198987468104e-06,
|
1700 |
+
"loss": 0.1282,
|
1701 |
+
"step": 135500
|
1702 |
+
},
|
1703 |
+
{
|
1704 |
+
"epoch": 6.64,
|
1705 |
+
"learning_rate": 3.5693308869010185e-06,
|
1706 |
+
"loss": 0.124,
|
1707 |
+
"step": 136000
|
1708 |
+
},
|
1709 |
+
{
|
1710 |
+
"epoch": 6.67,
|
1711 |
+
"learning_rate": 3.543341875055227e-06,
|
1712 |
+
"loss": 0.1292,
|
1713 |
+
"step": 136500
|
1714 |
+
},
|
1715 |
+
{
|
1716 |
+
"epoch": 6.69,
|
1717 |
+
"learning_rate": 3.5173528632094355e-06,
|
1718 |
+
"loss": 0.112,
|
1719 |
+
"step": 137000
|
1720 |
+
},
|
1721 |
+
{
|
1722 |
+
"epoch": 6.72,
|
1723 |
+
"learning_rate": 3.491363851363644e-06,
|
1724 |
+
"loss": 0.1225,
|
1725 |
+
"step": 137500
|
1726 |
+
},
|
1727 |
+
{
|
1728 |
+
"epoch": 6.74,
|
1729 |
+
"learning_rate": 3.465374839517852e-06,
|
1730 |
+
"loss": 0.1262,
|
1731 |
+
"step": 138000
|
1732 |
+
},
|
1733 |
+
{
|
1734 |
+
"epoch": 6.77,
|
1735 |
+
"learning_rate": 3.4393858276720605e-06,
|
1736 |
+
"loss": 0.1174,
|
1737 |
+
"step": 138500
|
1738 |
+
},
|
1739 |
+
{
|
1740 |
+
"epoch": 6.79,
|
1741 |
+
"learning_rate": 3.4133968158262686e-06,
|
1742 |
+
"loss": 0.1219,
|
1743 |
+
"step": 139000
|
1744 |
+
},
|
1745 |
+
{
|
1746 |
+
"epoch": 6.82,
|
1747 |
+
"learning_rate": 3.3874078039804775e-06,
|
1748 |
+
"loss": 0.1172,
|
1749 |
+
"step": 139500
|
1750 |
+
},
|
1751 |
+
{
|
1752 |
+
"epoch": 6.84,
|
1753 |
+
"learning_rate": 3.361418792134686e-06,
|
1754 |
+
"loss": 0.126,
|
1755 |
+
"step": 140000
|
1756 |
+
},
|
1757 |
+
{
|
1758 |
+
"epoch": 6.86,
|
1759 |
+
"learning_rate": 3.335429780288894e-06,
|
1760 |
+
"loss": 0.1206,
|
1761 |
+
"step": 140500
|
1762 |
+
},
|
1763 |
+
{
|
1764 |
+
"epoch": 6.89,
|
1765 |
+
"learning_rate": 3.3094407684431025e-06,
|
1766 |
+
"loss": 0.1216,
|
1767 |
+
"step": 141000
|
1768 |
+
},
|
1769 |
+
{
|
1770 |
+
"epoch": 6.91,
|
1771 |
+
"learning_rate": 3.283451756597311e-06,
|
1772 |
+
"loss": 0.1215,
|
1773 |
+
"step": 141500
|
1774 |
+
},
|
1775 |
+
{
|
1776 |
+
"epoch": 6.94,
|
1777 |
+
"learning_rate": 3.2574627447515195e-06,
|
1778 |
+
"loss": 0.12,
|
1779 |
+
"step": 142000
|
1780 |
+
},
|
1781 |
+
{
|
1782 |
+
"epoch": 6.96,
|
1783 |
+
"learning_rate": 3.2314737329057276e-06,
|
1784 |
+
"loss": 0.1198,
|
1785 |
+
"step": 142500
|
1786 |
+
},
|
1787 |
+
{
|
1788 |
+
"epoch": 6.99,
|
1789 |
+
"learning_rate": 3.205484721059936e-06,
|
1790 |
+
"loss": 0.1093,
|
1791 |
+
"step": 143000
|
1792 |
+
},
|
1793 |
+
{
|
1794 |
+
"epoch": 7.0,
|
1795 |
+
"eval_accuracy": 0.9125738628555724,
|
1796 |
+
"eval_averaged_scores": 0.8981269870097512,
|
1797 |
+
"eval_f1": 0.8836801111639301,
|
1798 |
+
"eval_loss": 0.48590707778930664,
|
1799 |
+
"eval_runtime": 36.5067,
|
1800 |
+
"eval_samples_per_second": 996.667,
|
1801 |
+
"eval_steps_per_second": 62.317,
|
1802 |
+
"step": 143269
|
1803 |
+
},
|
1804 |
+
{
|
1805 |
+
"epoch": 7.01,
|
1806 |
+
"learning_rate": 3.179495709214144e-06,
|
1807 |
+
"loss": 0.1072,
|
1808 |
+
"step": 143500
|
1809 |
+
},
|
1810 |
+
{
|
1811 |
+
"epoch": 7.04,
|
1812 |
+
"learning_rate": 3.153506697368353e-06,
|
1813 |
+
"loss": 0.0934,
|
1814 |
+
"step": 144000
|
1815 |
+
},
|
1816 |
+
{
|
1817 |
+
"epoch": 7.06,
|
1818 |
+
"learning_rate": 3.1275176855225615e-06,
|
1819 |
+
"loss": 0.0963,
|
1820 |
+
"step": 144500
|
1821 |
+
},
|
1822 |
+
{
|
1823 |
+
"epoch": 7.08,
|
1824 |
+
"learning_rate": 3.1015286736767696e-06,
|
1825 |
+
"loss": 0.0962,
|
1826 |
+
"step": 145000
|
1827 |
+
},
|
1828 |
+
{
|
1829 |
+
"epoch": 7.11,
|
1830 |
+
"learning_rate": 3.075539661830978e-06,
|
1831 |
+
"loss": 0.0964,
|
1832 |
+
"step": 145500
|
1833 |
+
},
|
1834 |
+
{
|
1835 |
+
"epoch": 7.13,
|
1836 |
+
"learning_rate": 3.049550649985186e-06,
|
1837 |
+
"loss": 0.0983,
|
1838 |
+
"step": 146000
|
1839 |
+
},
|
1840 |
+
{
|
1841 |
+
"epoch": 7.16,
|
1842 |
+
"learning_rate": 3.023561638139395e-06,
|
1843 |
+
"loss": 0.0942,
|
1844 |
+
"step": 146500
|
1845 |
+
},
|
1846 |
+
{
|
1847 |
+
"epoch": 7.18,
|
1848 |
+
"learning_rate": 2.9975726262936036e-06,
|
1849 |
+
"loss": 0.1016,
|
1850 |
+
"step": 147000
|
1851 |
+
},
|
1852 |
+
{
|
1853 |
+
"epoch": 7.21,
|
1854 |
+
"learning_rate": 2.9715836144478116e-06,
|
1855 |
+
"loss": 0.0806,
|
1856 |
+
"step": 147500
|
1857 |
+
},
|
1858 |
+
{
|
1859 |
+
"epoch": 7.23,
|
1860 |
+
"learning_rate": 2.94559460260202e-06,
|
1861 |
+
"loss": 0.1037,
|
1862 |
+
"step": 148000
|
1863 |
+
},
|
1864 |
+
{
|
1865 |
+
"epoch": 7.26,
|
1866 |
+
"learning_rate": 2.9196055907562286e-06,
|
1867 |
+
"loss": 0.1013,
|
1868 |
+
"step": 148500
|
1869 |
+
},
|
1870 |
+
{
|
1871 |
+
"epoch": 7.28,
|
1872 |
+
"learning_rate": 2.893616578910437e-06,
|
1873 |
+
"loss": 0.0929,
|
1874 |
+
"step": 149000
|
1875 |
+
},
|
1876 |
+
{
|
1877 |
+
"epoch": 7.3,
|
1878 |
+
"learning_rate": 2.867627567064645e-06,
|
1879 |
+
"loss": 0.1034,
|
1880 |
+
"step": 149500
|
1881 |
+
},
|
1882 |
+
{
|
1883 |
+
"epoch": 7.33,
|
1884 |
+
"learning_rate": 2.8416385552188536e-06,
|
1885 |
+
"loss": 0.0955,
|
1886 |
+
"step": 150000
|
1887 |
+
},
|
1888 |
+
{
|
1889 |
+
"epoch": 7.35,
|
1890 |
+
"learning_rate": 2.8156495433730617e-06,
|
1891 |
+
"loss": 0.0857,
|
1892 |
+
"step": 150500
|
1893 |
+
},
|
1894 |
+
{
|
1895 |
+
"epoch": 7.38,
|
1896 |
+
"learning_rate": 2.7896605315272706e-06,
|
1897 |
+
"loss": 0.106,
|
1898 |
+
"step": 151000
|
1899 |
+
},
|
1900 |
+
{
|
1901 |
+
"epoch": 7.4,
|
1902 |
+
"learning_rate": 2.763671519681479e-06,
|
1903 |
+
"loss": 0.0914,
|
1904 |
+
"step": 151500
|
1905 |
+
},
|
1906 |
+
{
|
1907 |
+
"epoch": 7.43,
|
1908 |
+
"learning_rate": 2.737682507835687e-06,
|
1909 |
+
"loss": 0.0979,
|
1910 |
+
"step": 152000
|
1911 |
+
},
|
1912 |
+
{
|
1913 |
+
"epoch": 7.45,
|
1914 |
+
"learning_rate": 2.7116934959898957e-06,
|
1915 |
+
"loss": 0.0986,
|
1916 |
+
"step": 152500
|
1917 |
+
},
|
1918 |
+
{
|
1919 |
+
"epoch": 7.48,
|
1920 |
+
"learning_rate": 2.685704484144104e-06,
|
1921 |
+
"loss": 0.1024,
|
1922 |
+
"step": 153000
|
1923 |
+
},
|
1924 |
+
{
|
1925 |
+
"epoch": 7.5,
|
1926 |
+
"learning_rate": 2.6597154722983127e-06,
|
1927 |
+
"loss": 0.0968,
|
1928 |
+
"step": 153500
|
1929 |
+
},
|
1930 |
+
{
|
1931 |
+
"epoch": 7.52,
|
1932 |
+
"learning_rate": 2.6337264604525207e-06,
|
1933 |
+
"loss": 0.0964,
|
1934 |
+
"step": 154000
|
1935 |
+
},
|
1936 |
+
{
|
1937 |
+
"epoch": 7.55,
|
1938 |
+
"learning_rate": 2.607737448606729e-06,
|
1939 |
+
"loss": 0.1112,
|
1940 |
+
"step": 154500
|
1941 |
+
},
|
1942 |
+
{
|
1943 |
+
"epoch": 7.57,
|
1944 |
+
"learning_rate": 2.5817484367609373e-06,
|
1945 |
+
"loss": 0.0961,
|
1946 |
+
"step": 155000
|
1947 |
+
},
|
1948 |
+
{
|
1949 |
+
"epoch": 7.6,
|
1950 |
+
"learning_rate": 2.555759424915146e-06,
|
1951 |
+
"loss": 0.0994,
|
1952 |
+
"step": 155500
|
1953 |
+
},
|
1954 |
+
{
|
1955 |
+
"epoch": 7.62,
|
1956 |
+
"learning_rate": 2.5297704130693547e-06,
|
1957 |
+
"loss": 0.0928,
|
1958 |
+
"step": 156000
|
1959 |
+
},
|
1960 |
+
{
|
1961 |
+
"epoch": 7.65,
|
1962 |
+
"learning_rate": 2.5037814012235627e-06,
|
1963 |
+
"loss": 0.0964,
|
1964 |
+
"step": 156500
|
1965 |
+
},
|
1966 |
+
{
|
1967 |
+
"epoch": 7.67,
|
1968 |
+
"learning_rate": 2.4777923893777712e-06,
|
1969 |
+
"loss": 0.0817,
|
1970 |
+
"step": 157000
|
1971 |
+
},
|
1972 |
+
{
|
1973 |
+
"epoch": 7.7,
|
1974 |
+
"learning_rate": 2.4518033775319797e-06,
|
1975 |
+
"loss": 0.1125,
|
1976 |
+
"step": 157500
|
1977 |
+
},
|
1978 |
+
{
|
1979 |
+
"epoch": 7.72,
|
1980 |
+
"learning_rate": 2.425814365686188e-06,
|
1981 |
+
"loss": 0.0903,
|
1982 |
+
"step": 158000
|
1983 |
+
},
|
1984 |
+
{
|
1985 |
+
"epoch": 7.74,
|
1986 |
+
"learning_rate": 2.3998253538403967e-06,
|
1987 |
+
"loss": 0.0947,
|
1988 |
+
"step": 158500
|
1989 |
+
},
|
1990 |
+
{
|
1991 |
+
"epoch": 7.77,
|
1992 |
+
"learning_rate": 2.3738363419946048e-06,
|
1993 |
+
"loss": 0.0947,
|
1994 |
+
"step": 159000
|
1995 |
+
},
|
1996 |
+
{
|
1997 |
+
"epoch": 7.79,
|
1998 |
+
"learning_rate": 2.3478473301488133e-06,
|
1999 |
+
"loss": 0.0958,
|
2000 |
+
"step": 159500
|
2001 |
+
},
|
2002 |
+
{
|
2003 |
+
"epoch": 7.82,
|
2004 |
+
"learning_rate": 2.3218583183030217e-06,
|
2005 |
+
"loss": 0.1071,
|
2006 |
+
"step": 160000
|
2007 |
+
},
|
2008 |
+
{
|
2009 |
+
"epoch": 7.84,
|
2010 |
+
"learning_rate": 2.2958693064572302e-06,
|
2011 |
+
"loss": 0.0878,
|
2012 |
+
"step": 160500
|
2013 |
+
},
|
2014 |
+
{
|
2015 |
+
"epoch": 7.87,
|
2016 |
+
"learning_rate": 2.2698802946114383e-06,
|
2017 |
+
"loss": 0.0985,
|
2018 |
+
"step": 161000
|
2019 |
+
},
|
2020 |
+
{
|
2021 |
+
"epoch": 7.89,
|
2022 |
+
"learning_rate": 2.243891282765647e-06,
|
2023 |
+
"loss": 0.0942,
|
2024 |
+
"step": 161500
|
2025 |
+
},
|
2026 |
+
{
|
2027 |
+
"epoch": 7.92,
|
2028 |
+
"learning_rate": 2.2179022709198553e-06,
|
2029 |
+
"loss": 0.1043,
|
2030 |
+
"step": 162000
|
2031 |
+
},
|
2032 |
+
{
|
2033 |
+
"epoch": 7.94,
|
2034 |
+
"learning_rate": 2.1919132590740634e-06,
|
2035 |
+
"loss": 0.0908,
|
2036 |
+
"step": 162500
|
2037 |
+
},
|
2038 |
+
{
|
2039 |
+
"epoch": 7.96,
|
2040 |
+
"learning_rate": 2.1659242472282723e-06,
|
2041 |
+
"loss": 0.0969,
|
2042 |
+
"step": 163000
|
2043 |
+
},
|
2044 |
+
{
|
2045 |
+
"epoch": 7.99,
|
2046 |
+
"learning_rate": 2.1399352353824803e-06,
|
2047 |
+
"loss": 0.1005,
|
2048 |
+
"step": 163500
|
2049 |
+
},
|
2050 |
+
{
|
2051 |
+
"epoch": 8.0,
|
2052 |
+
"eval_accuracy": 0.9120241857908479,
|
2053 |
+
"eval_averaged_scores": 0.897542067688103,
|
2054 |
+
"eval_f1": 0.8830599495853579,
|
2055 |
+
"eval_loss": 0.543989360332489,
|
2056 |
+
"eval_runtime": 36.3255,
|
2057 |
+
"eval_samples_per_second": 1001.637,
|
2058 |
+
"eval_steps_per_second": 62.628,
|
2059 |
+
"step": 163736
|
2060 |
+
},
|
2061 |
+
{
|
2062 |
+
"epoch": 8.01,
|
2063 |
+
"learning_rate": 2.113946223536689e-06,
|
2064 |
+
"loss": 0.0745,
|
2065 |
+
"step": 164000
|
2066 |
+
},
|
2067 |
+
{
|
2068 |
+
"epoch": 8.04,
|
2069 |
+
"learning_rate": 2.0879572116908973e-06,
|
2070 |
+
"loss": 0.0656,
|
2071 |
+
"step": 164500
|
2072 |
+
},
|
2073 |
+
{
|
2074 |
+
"epoch": 8.06,
|
2075 |
+
"learning_rate": 2.061968199845106e-06,
|
2076 |
+
"loss": 0.0775,
|
2077 |
+
"step": 165000
|
2078 |
+
},
|
2079 |
+
{
|
2080 |
+
"epoch": 8.09,
|
2081 |
+
"learning_rate": 2.035979187999314e-06,
|
2082 |
+
"loss": 0.0877,
|
2083 |
+
"step": 165500
|
2084 |
+
},
|
2085 |
+
{
|
2086 |
+
"epoch": 8.11,
|
2087 |
+
"learning_rate": 2.0099901761535224e-06,
|
2088 |
+
"loss": 0.0785,
|
2089 |
+
"step": 166000
|
2090 |
+
},
|
2091 |
+
{
|
2092 |
+
"epoch": 8.14,
|
2093 |
+
"learning_rate": 1.984001164307731e-06,
|
2094 |
+
"loss": 0.07,
|
2095 |
+
"step": 166500
|
2096 |
+
},
|
2097 |
+
{
|
2098 |
+
"epoch": 8.16,
|
2099 |
+
"learning_rate": 1.9580121524619393e-06,
|
2100 |
+
"loss": 0.0866,
|
2101 |
+
"step": 167000
|
2102 |
+
},
|
2103 |
+
{
|
2104 |
+
"epoch": 8.18,
|
2105 |
+
"learning_rate": 1.932023140616148e-06,
|
2106 |
+
"loss": 0.0714,
|
2107 |
+
"step": 167500
|
2108 |
+
},
|
2109 |
+
{
|
2110 |
+
"epoch": 8.21,
|
2111 |
+
"learning_rate": 1.9060341287703559e-06,
|
2112 |
+
"loss": 0.0878,
|
2113 |
+
"step": 168000
|
2114 |
+
},
|
2115 |
+
{
|
2116 |
+
"epoch": 8.23,
|
2117 |
+
"learning_rate": 1.8800451169245646e-06,
|
2118 |
+
"loss": 0.083,
|
2119 |
+
"step": 168500
|
2120 |
+
},
|
2121 |
+
{
|
2122 |
+
"epoch": 8.26,
|
2123 |
+
"learning_rate": 1.8540561050787729e-06,
|
2124 |
+
"loss": 0.081,
|
2125 |
+
"step": 169000
|
2126 |
+
},
|
2127 |
+
{
|
2128 |
+
"epoch": 8.28,
|
2129 |
+
"learning_rate": 1.8280670932329814e-06,
|
2130 |
+
"loss": 0.0726,
|
2131 |
+
"step": 169500
|
2132 |
+
},
|
2133 |
+
{
|
2134 |
+
"epoch": 8.31,
|
2135 |
+
"learning_rate": 1.8020780813871896e-06,
|
2136 |
+
"loss": 0.0764,
|
2137 |
+
"step": 170000
|
2138 |
+
},
|
2139 |
+
{
|
2140 |
+
"epoch": 8.33,
|
2141 |
+
"learning_rate": 1.776089069541398e-06,
|
2142 |
+
"loss": 0.0739,
|
2143 |
+
"step": 170500
|
2144 |
+
},
|
2145 |
+
{
|
2146 |
+
"epoch": 8.35,
|
2147 |
+
"learning_rate": 1.7501000576956064e-06,
|
2148 |
+
"loss": 0.0881,
|
2149 |
+
"step": 171000
|
2150 |
+
},
|
2151 |
+
{
|
2152 |
+
"epoch": 8.38,
|
2153 |
+
"learning_rate": 1.7241110458498147e-06,
|
2154 |
+
"loss": 0.0763,
|
2155 |
+
"step": 171500
|
2156 |
+
},
|
2157 |
+
{
|
2158 |
+
"epoch": 8.4,
|
2159 |
+
"learning_rate": 1.6981220340040234e-06,
|
2160 |
+
"loss": 0.0754,
|
2161 |
+
"step": 172000
|
2162 |
+
},
|
2163 |
+
{
|
2164 |
+
"epoch": 8.43,
|
2165 |
+
"learning_rate": 1.6721330221582317e-06,
|
2166 |
+
"loss": 0.0842,
|
2167 |
+
"step": 172500
|
2168 |
+
},
|
2169 |
+
{
|
2170 |
+
"epoch": 8.45,
|
2171 |
+
"learning_rate": 1.6461440103124402e-06,
|
2172 |
+
"loss": 0.0804,
|
2173 |
+
"step": 173000
|
2174 |
+
},
|
2175 |
+
{
|
2176 |
+
"epoch": 8.48,
|
2177 |
+
"learning_rate": 1.6201549984666484e-06,
|
2178 |
+
"loss": 0.0871,
|
2179 |
+
"step": 173500
|
2180 |
+
},
|
2181 |
+
{
|
2182 |
+
"epoch": 8.5,
|
2183 |
+
"learning_rate": 1.5941659866208567e-06,
|
2184 |
+
"loss": 0.0784,
|
2185 |
+
"step": 174000
|
2186 |
+
},
|
2187 |
+
{
|
2188 |
+
"epoch": 8.53,
|
2189 |
+
"learning_rate": 1.5681769747750652e-06,
|
2190 |
+
"loss": 0.0769,
|
2191 |
+
"step": 174500
|
2192 |
+
},
|
2193 |
+
{
|
2194 |
+
"epoch": 8.55,
|
2195 |
+
"learning_rate": 1.5421879629292735e-06,
|
2196 |
+
"loss": 0.0786,
|
2197 |
+
"step": 175000
|
2198 |
+
},
|
2199 |
+
{
|
2200 |
+
"epoch": 8.57,
|
2201 |
+
"learning_rate": 1.5161989510834822e-06,
|
2202 |
+
"loss": 0.0787,
|
2203 |
+
"step": 175500
|
2204 |
+
},
|
2205 |
+
{
|
2206 |
+
"epoch": 8.6,
|
2207 |
+
"learning_rate": 1.4902099392376905e-06,
|
2208 |
+
"loss": 0.0735,
|
2209 |
+
"step": 176000
|
2210 |
+
},
|
2211 |
+
{
|
2212 |
+
"epoch": 8.62,
|
2213 |
+
"learning_rate": 1.464220927391899e-06,
|
2214 |
+
"loss": 0.0853,
|
2215 |
+
"step": 176500
|
2216 |
+
},
|
2217 |
+
{
|
2218 |
+
"epoch": 8.65,
|
2219 |
+
"learning_rate": 1.4382319155461072e-06,
|
2220 |
+
"loss": 0.0775,
|
2221 |
+
"step": 177000
|
2222 |
+
},
|
2223 |
+
{
|
2224 |
+
"epoch": 8.67,
|
2225 |
+
"learning_rate": 1.4122429037003157e-06,
|
2226 |
+
"loss": 0.0794,
|
2227 |
+
"step": 177500
|
2228 |
+
},
|
2229 |
+
{
|
2230 |
+
"epoch": 8.7,
|
2231 |
+
"learning_rate": 1.386253891854524e-06,
|
2232 |
+
"loss": 0.0814,
|
2233 |
+
"step": 178000
|
2234 |
+
},
|
2235 |
+
{
|
2236 |
+
"epoch": 8.72,
|
2237 |
+
"learning_rate": 1.3602648800087323e-06,
|
2238 |
+
"loss": 0.0792,
|
2239 |
+
"step": 178500
|
2240 |
+
},
|
2241 |
+
{
|
2242 |
+
"epoch": 8.75,
|
2243 |
+
"learning_rate": 1.3342758681629408e-06,
|
2244 |
+
"loss": 0.0857,
|
2245 |
+
"step": 179000
|
2246 |
+
},
|
2247 |
+
{
|
2248 |
+
"epoch": 8.77,
|
2249 |
+
"learning_rate": 1.308286856317149e-06,
|
2250 |
+
"loss": 0.086,
|
2251 |
+
"step": 179500
|
2252 |
+
},
|
2253 |
+
{
|
2254 |
+
"epoch": 8.79,
|
2255 |
+
"learning_rate": 1.2822978444713577e-06,
|
2256 |
+
"loss": 0.0811,
|
2257 |
+
"step": 180000
|
2258 |
+
},
|
2259 |
+
{
|
2260 |
+
"epoch": 8.82,
|
2261 |
+
"learning_rate": 1.256308832625566e-06,
|
2262 |
+
"loss": 0.077,
|
2263 |
+
"step": 180500
|
2264 |
+
},
|
2265 |
+
{
|
2266 |
+
"epoch": 8.84,
|
2267 |
+
"learning_rate": 1.2303198207797743e-06,
|
2268 |
+
"loss": 0.0813,
|
2269 |
+
"step": 181000
|
2270 |
+
},
|
2271 |
+
{
|
2272 |
+
"epoch": 8.87,
|
2273 |
+
"learning_rate": 1.2043308089339828e-06,
|
2274 |
+
"loss": 0.0726,
|
2275 |
+
"step": 181500
|
2276 |
+
},
|
2277 |
+
{
|
2278 |
+
"epoch": 8.89,
|
2279 |
+
"learning_rate": 1.1783417970881913e-06,
|
2280 |
+
"loss": 0.0813,
|
2281 |
+
"step": 182000
|
2282 |
+
},
|
2283 |
+
{
|
2284 |
+
"epoch": 8.92,
|
2285 |
+
"learning_rate": 1.1523527852423996e-06,
|
2286 |
+
"loss": 0.081,
|
2287 |
+
"step": 182500
|
2288 |
+
},
|
2289 |
+
{
|
2290 |
+
"epoch": 8.94,
|
2291 |
+
"learning_rate": 1.126363773396608e-06,
|
2292 |
+
"loss": 0.0774,
|
2293 |
+
"step": 183000
|
2294 |
+
},
|
2295 |
+
{
|
2296 |
+
"epoch": 8.97,
|
2297 |
+
"learning_rate": 1.1003747615508165e-06,
|
2298 |
+
"loss": 0.0806,
|
2299 |
+
"step": 183500
|
2300 |
+
},
|
2301 |
+
{
|
2302 |
+
"epoch": 8.99,
|
2303 |
+
"learning_rate": 1.0743857497050248e-06,
|
2304 |
+
"loss": 0.0671,
|
2305 |
+
"step": 184000
|
2306 |
+
},
|
2307 |
+
{
|
2308 |
+
"epoch": 9.0,
|
2309 |
+
"eval_accuracy": 0.9127112821217535,
|
2310 |
+
"eval_averaged_scores": 0.8981017892633713,
|
2311 |
+
"eval_f1": 0.883492296404989,
|
2312 |
+
"eval_loss": 0.5674276351928711,
|
2313 |
+
"eval_runtime": 36.287,
|
2314 |
+
"eval_samples_per_second": 1002.701,
|
2315 |
+
"eval_steps_per_second": 62.695,
|
2316 |
+
"step": 184203
|
2317 |
+
},
|
2318 |
+
{
|
2319 |
+
"epoch": 9.01,
|
2320 |
+
"learning_rate": 1.048396737859233e-06,
|
2321 |
+
"loss": 0.073,
|
2322 |
+
"step": 184500
|
2323 |
+
},
|
2324 |
+
{
|
2325 |
+
"epoch": 9.04,
|
2326 |
+
"learning_rate": 1.0224077260134416e-06,
|
2327 |
+
"loss": 0.0623,
|
2328 |
+
"step": 185000
|
2329 |
+
},
|
2330 |
+
{
|
2331 |
+
"epoch": 9.06,
|
2332 |
+
"learning_rate": 9.9641871416765e-07,
|
2333 |
+
"loss": 0.0562,
|
2334 |
+
"step": 185500
|
2335 |
+
},
|
2336 |
+
{
|
2337 |
+
"epoch": 9.09,
|
2338 |
+
"learning_rate": 9.704297023218584e-07,
|
2339 |
+
"loss": 0.062,
|
2340 |
+
"step": 186000
|
2341 |
+
},
|
2342 |
+
{
|
2343 |
+
"epoch": 9.11,
|
2344 |
+
"learning_rate": 9.444406904760668e-07,
|
2345 |
+
"loss": 0.0753,
|
2346 |
+
"step": 186500
|
2347 |
+
},
|
2348 |
+
{
|
2349 |
+
"epoch": 9.14,
|
2350 |
+
"learning_rate": 9.184516786302752e-07,
|
2351 |
+
"loss": 0.0624,
|
2352 |
+
"step": 187000
|
2353 |
+
},
|
2354 |
+
{
|
2355 |
+
"epoch": 9.16,
|
2356 |
+
"learning_rate": 8.924626667844836e-07,
|
2357 |
+
"loss": 0.0593,
|
2358 |
+
"step": 187500
|
2359 |
+
},
|
2360 |
+
{
|
2361 |
+
"epoch": 9.19,
|
2362 |
+
"learning_rate": 8.664736549386919e-07,
|
2363 |
+
"loss": 0.0758,
|
2364 |
+
"step": 188000
|
2365 |
+
},
|
2366 |
+
{
|
2367 |
+
"epoch": 9.21,
|
2368 |
+
"learning_rate": 8.404846430929004e-07,
|
2369 |
+
"loss": 0.0766,
|
2370 |
+
"step": 188500
|
2371 |
+
},
|
2372 |
+
{
|
2373 |
+
"epoch": 9.23,
|
2374 |
+
"learning_rate": 8.144956312471088e-07,
|
2375 |
+
"loss": 0.0681,
|
2376 |
+
"step": 189000
|
2377 |
+
},
|
2378 |
+
{
|
2379 |
+
"epoch": 9.26,
|
2380 |
+
"learning_rate": 7.885066194013171e-07,
|
2381 |
+
"loss": 0.0778,
|
2382 |
+
"step": 189500
|
2383 |
+
},
|
2384 |
+
{
|
2385 |
+
"epoch": 9.28,
|
2386 |
+
"learning_rate": 7.625176075555256e-07,
|
2387 |
+
"loss": 0.0596,
|
2388 |
+
"step": 190000
|
2389 |
+
},
|
2390 |
+
{
|
2391 |
+
"epoch": 9.31,
|
2392 |
+
"learning_rate": 7.36528595709734e-07,
|
2393 |
+
"loss": 0.0702,
|
2394 |
+
"step": 190500
|
2395 |
+
},
|
2396 |
+
{
|
2397 |
+
"epoch": 9.33,
|
2398 |
+
"learning_rate": 7.105395838639424e-07,
|
2399 |
+
"loss": 0.0658,
|
2400 |
+
"step": 191000
|
2401 |
+
},
|
2402 |
+
{
|
2403 |
+
"epoch": 9.36,
|
2404 |
+
"learning_rate": 6.845505720181508e-07,
|
2405 |
+
"loss": 0.0729,
|
2406 |
+
"step": 191500
|
2407 |
+
},
|
2408 |
+
{
|
2409 |
+
"epoch": 9.38,
|
2410 |
+
"learning_rate": 6.585615601723591e-07,
|
2411 |
+
"loss": 0.0656,
|
2412 |
+
"step": 192000
|
2413 |
+
},
|
2414 |
+
{
|
2415 |
+
"epoch": 9.41,
|
2416 |
+
"learning_rate": 6.325725483265676e-07,
|
2417 |
+
"loss": 0.0592,
|
2418 |
+
"step": 192500
|
2419 |
+
},
|
2420 |
+
{
|
2421 |
+
"epoch": 9.43,
|
2422 |
+
"learning_rate": 6.065835364807759e-07,
|
2423 |
+
"loss": 0.0653,
|
2424 |
+
"step": 193000
|
2425 |
+
},
|
2426 |
+
{
|
2427 |
+
"epoch": 9.45,
|
2428 |
+
"learning_rate": 5.805945246349843e-07,
|
2429 |
+
"loss": 0.0637,
|
2430 |
+
"step": 193500
|
2431 |
+
},
|
2432 |
+
{
|
2433 |
+
"epoch": 9.48,
|
2434 |
+
"learning_rate": 5.546055127891928e-07,
|
2435 |
+
"loss": 0.0597,
|
2436 |
+
"step": 194000
|
2437 |
+
},
|
2438 |
+
{
|
2439 |
+
"epoch": 9.5,
|
2440 |
+
"learning_rate": 5.286165009434012e-07,
|
2441 |
+
"loss": 0.074,
|
2442 |
+
"step": 194500
|
2443 |
+
},
|
2444 |
+
{
|
2445 |
+
"epoch": 9.53,
|
2446 |
+
"learning_rate": 5.026274890976096e-07,
|
2447 |
+
"loss": 0.0531,
|
2448 |
+
"step": 195000
|
2449 |
+
},
|
2450 |
+
{
|
2451 |
+
"epoch": 9.55,
|
2452 |
+
"learning_rate": 4.7663847725181796e-07,
|
2453 |
+
"loss": 0.0644,
|
2454 |
+
"step": 195500
|
2455 |
+
},
|
2456 |
+
{
|
2457 |
+
"epoch": 9.58,
|
2458 |
+
"learning_rate": 4.5064946540602635e-07,
|
2459 |
+
"loss": 0.0585,
|
2460 |
+
"step": 196000
|
2461 |
+
},
|
2462 |
+
{
|
2463 |
+
"epoch": 9.6,
|
2464 |
+
"learning_rate": 4.246604535602348e-07,
|
2465 |
+
"loss": 0.0624,
|
2466 |
+
"step": 196500
|
2467 |
+
},
|
2468 |
+
{
|
2469 |
+
"epoch": 9.63,
|
2470 |
+
"learning_rate": 3.986714417144431e-07,
|
2471 |
+
"loss": 0.0615,
|
2472 |
+
"step": 197000
|
2473 |
+
},
|
2474 |
+
{
|
2475 |
+
"epoch": 9.65,
|
2476 |
+
"learning_rate": 3.7268242986865155e-07,
|
2477 |
+
"loss": 0.07,
|
2478 |
+
"step": 197500
|
2479 |
+
},
|
2480 |
+
{
|
2481 |
+
"epoch": 9.67,
|
2482 |
+
"learning_rate": 3.4669341802286e-07,
|
2483 |
+
"loss": 0.0612,
|
2484 |
+
"step": 198000
|
2485 |
+
},
|
2486 |
+
{
|
2487 |
+
"epoch": 9.7,
|
2488 |
+
"learning_rate": 3.207044061770684e-07,
|
2489 |
+
"loss": 0.0703,
|
2490 |
+
"step": 198500
|
2491 |
+
},
|
2492 |
+
{
|
2493 |
+
"epoch": 9.72,
|
2494 |
+
"learning_rate": 2.9471539433127676e-07,
|
2495 |
+
"loss": 0.0748,
|
2496 |
+
"step": 199000
|
2497 |
+
},
|
2498 |
+
{
|
2499 |
+
"epoch": 9.75,
|
2500 |
+
"learning_rate": 2.6872638248548514e-07,
|
2501 |
+
"loss": 0.0602,
|
2502 |
+
"step": 199500
|
2503 |
+
},
|
2504 |
+
{
|
2505 |
+
"epoch": 9.77,
|
2506 |
+
"learning_rate": 2.427373706396936e-07,
|
2507 |
+
"loss": 0.063,
|
2508 |
+
"step": 200000
|
2509 |
+
},
|
2510 |
+
{
|
2511 |
+
"epoch": 9.8,
|
2512 |
+
"learning_rate": 2.1674835879390194e-07,
|
2513 |
+
"loss": 0.0722,
|
2514 |
+
"step": 200500
|
2515 |
+
},
|
2516 |
+
{
|
2517 |
+
"epoch": 9.82,
|
2518 |
+
"learning_rate": 1.9075934694811035e-07,
|
2519 |
+
"loss": 0.0528,
|
2520 |
+
"step": 201000
|
2521 |
+
},
|
2522 |
+
{
|
2523 |
+
"epoch": 9.85,
|
2524 |
+
"learning_rate": 1.6477033510231873e-07,
|
2525 |
+
"loss": 0.0783,
|
2526 |
+
"step": 201500
|
2527 |
+
},
|
2528 |
+
{
|
2529 |
+
"epoch": 9.87,
|
2530 |
+
"learning_rate": 1.3878132325652717e-07,
|
2531 |
+
"loss": 0.0623,
|
2532 |
+
"step": 202000
|
2533 |
+
},
|
2534 |
+
{
|
2535 |
+
"epoch": 9.89,
|
2536 |
+
"learning_rate": 1.1279231141073555e-07,
|
2537 |
+
"loss": 0.0745,
|
2538 |
+
"step": 202500
|
2539 |
+
},
|
2540 |
+
{
|
2541 |
+
"epoch": 9.92,
|
2542 |
+
"learning_rate": 8.680329956494395e-08,
|
2543 |
+
"loss": 0.0507,
|
2544 |
+
"step": 203000
|
2545 |
+
},
|
2546 |
+
{
|
2547 |
+
"epoch": 9.94,
|
2548 |
+
"learning_rate": 6.081428771915235e-08,
|
2549 |
+
"loss": 0.0633,
|
2550 |
+
"step": 203500
|
2551 |
+
},
|
2552 |
+
{
|
2553 |
+
"epoch": 9.97,
|
2554 |
+
"learning_rate": 3.4825275873360744e-08,
|
2555 |
+
"loss": 0.0659,
|
2556 |
+
"step": 204000
|
2557 |
+
},
|
2558 |
+
{
|
2559 |
+
"epoch": 9.99,
|
2560 |
+
"learning_rate": 8.836264027569144e-09,
|
2561 |
+
"loss": 0.0669,
|
2562 |
+
"step": 204500
|
2563 |
+
},
|
2564 |
+
{
|
2565 |
+
"epoch": 10.0,
|
2566 |
+
"eval_accuracy": 0.913398378452659,
|
2567 |
+
"eval_averaged_scores": 0.8990160346845362,
|
2568 |
+
"eval_f1": 0.8846336909164134,
|
2569 |
+
"eval_loss": 0.611711859703064,
|
2570 |
+
"eval_runtime": 36.3229,
|
2571 |
+
"eval_samples_per_second": 1001.71,
|
2572 |
+
"eval_steps_per_second": 62.633,
|
2573 |
+
"step": 204670
|
2574 |
+
},
|
2575 |
+
{
|
2576 |
+
"epoch": 10.0,
|
2577 |
+
"step": 204670,
|
2578 |
+
"total_flos": 1.0022032836134272e+17,
|
2579 |
+
"train_loss": 0.16490516577327513,
|
2580 |
+
"train_runtime": 10899.0474,
|
2581 |
+
"train_samples_per_second": 300.449,
|
2582 |
+
"train_steps_per_second": 18.779
|
2583 |
+
}
|
2584 |
+
],
|
2585 |
+
"logging_steps": 500,
|
2586 |
+
"max_steps": 204670,
|
2587 |
+
"num_train_epochs": 10,
|
2588 |
+
"save_steps": 500,
|
2589 |
+
"total_flos": 1.0022032836134272e+17,
|
2590 |
+
"trial_name": null,
|
2591 |
+
"trial_params": null
|
2592 |
+
}
|
qqp/roberta-base_lr1e-05/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:01997f308ff89f5876f115629fa98b97af4516c164b8c29077f20149379ef80d
|
3 |
+
size 4091
|
qqp/roberta-base_lr1e-05/vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
rte/roberta-base_lr1e-05/config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/mnt/data/yule/.cache/roberta-base",
|
3 |
+
"architectures": [
|
4 |
+
"RobertaForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"classifier_dropout": null,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "gelu",
|
11 |
+
"hidden_dropout_prob": 0.1,
|
12 |
+
"hidden_size": 768,
|
13 |
+
"initializer_range": 0.02,
|
14 |
+
"intermediate_size": 3072,
|
15 |
+
"layer_norm_eps": 1e-05,
|
16 |
+
"max_position_embeddings": 514,
|
17 |
+
"model_type": "roberta",
|
18 |
+
"num_attention_heads": 12,
|
19 |
+
"num_hidden_layers": 12,
|
20 |
+
"pad_token_id": 1,
|
21 |
+
"position_embedding_type": "absolute",
|
22 |
+
"torch_dtype": "float32",
|
23 |
+
"transformers_version": "4.33.1",
|
24 |
+
"type_vocab_size": 1,
|
25 |
+
"use_cache": true,
|
26 |
+
"vocab_size": 50265
|
27 |
+
}
|
rte/roberta-base_lr1e-05/merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
rte/roberta-base_lr1e-05/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dc62f9febb6e34f6a9209213f14a89ac2056f08d8a83c4561136d56e1d61e027
|
3 |
+
size 498654833
|
rte/roberta-base_lr1e-05/special_tokens_map.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"cls_token": "<s>",
|
4 |
+
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"pad_token": "<pad>",
|
13 |
+
"sep_token": "</s>",
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|