haryoaw commited on
Commit
211cf08
1 Parent(s): 1991aec

Initial Commit

Browse files
Files changed (4) hide show
  1. README.md +111 -111
  2. eval_result_ner.json +1 -1
  3. model.safetensors +1 -1
  4. training_args.bin +1 -1
README.md CHANGED
@@ -1,14 +1,14 @@
1
  ---
2
- base_model: FacebookAI/xlm-roberta-base
3
  library_name: transformers
4
  license: mit
 
 
 
5
  metrics:
6
  - precision
7
  - recall
8
  - f1
9
  - accuracy
10
- tags:
11
- - generated_from_trainer
12
  model-index:
13
  - name: scenario-non-kd-scr-ner-half-xlmr_data-univner_full44
14
  results: []
@@ -21,11 +21,11 @@ should probably proofread and complete it, then remove this comment. -->
21
 
22
  This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on the None dataset.
23
  It achieves the following results on the evaluation set:
24
- - Loss: 0.3478
25
- - Precision: 0.5257
26
- - Recall: 0.5320
27
- - F1: 0.5288
28
- - Accuracy: 0.9561
29
 
30
  ## Model description
31
 
@@ -56,109 +56,109 @@ The following hyperparameters were used during training:
56
 
57
  | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
58
  |:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
59
- | 0.3554 | 0.2910 | 500 | 0.2916 | 0.4168 | 0.0908 | 0.1491 | 0.9279 |
60
- | 0.2763 | 0.5821 | 1000 | 0.2493 | 0.3002 | 0.1902 | 0.2328 | 0.9322 |
61
- | 0.2425 | 0.8731 | 1500 | 0.2351 | 0.3275 | 0.2192 | 0.2626 | 0.9356 |
62
- | 0.2192 | 1.1641 | 2000 | 0.2244 | 0.3158 | 0.2554 | 0.2824 | 0.9368 |
63
- | 0.2082 | 1.4552 | 2500 | 0.2177 | 0.3400 | 0.2901 | 0.3131 | 0.9380 |
64
- | 0.1953 | 1.7462 | 3000 | 0.2113 | 0.3686 | 0.2952 | 0.3279 | 0.9405 |
65
- | 0.1835 | 2.0373 | 3500 | 0.2088 | 0.3312 | 0.3500 | 0.3404 | 0.9382 |
66
- | 0.1617 | 2.3283 | 4000 | 0.2108 | 0.3719 | 0.3216 | 0.3449 | 0.9412 |
67
- | 0.1654 | 2.6193 | 4500 | 0.2043 | 0.3688 | 0.3471 | 0.3577 | 0.9419 |
68
- | 0.1597 | 2.9104 | 5000 | 0.2023 | 0.3730 | 0.3584 | 0.3655 | 0.9421 |
69
- | 0.1488 | 3.2014 | 5500 | 0.2067 | 0.3939 | 0.3473 | 0.3691 | 0.9433 |
70
- | 0.1425 | 3.4924 | 6000 | 0.2065 | 0.4095 | 0.3556 | 0.3807 | 0.9436 |
71
- | 0.1378 | 3.7835 | 6500 | 0.2033 | 0.4159 | 0.3669 | 0.3899 | 0.9448 |
72
- | 0.1343 | 4.0745 | 7000 | 0.2028 | 0.3799 | 0.3907 | 0.3852 | 0.9436 |
73
- | 0.1231 | 4.3655 | 7500 | 0.2058 | 0.4193 | 0.3926 | 0.4055 | 0.9451 |
74
- | 0.1217 | 4.6566 | 8000 | 0.1968 | 0.4181 | 0.4053 | 0.4116 | 0.9456 |
75
- | 0.1182 | 4.9476 | 8500 | 0.1929 | 0.4311 | 0.4119 | 0.4213 | 0.9470 |
76
- | 0.1045 | 5.2386 | 9000 | 0.2040 | 0.4373 | 0.4124 | 0.4244 | 0.9473 |
77
- | 0.1006 | 5.5297 | 9500 | 0.1949 | 0.4527 | 0.4313 | 0.4417 | 0.9478 |
78
- | 0.1001 | 5.8207 | 10000 | 0.1965 | 0.4267 | 0.4441 | 0.4352 | 0.9475 |
79
- | 0.0946 | 6.1118 | 10500 | 0.1985 | 0.4480 | 0.4650 | 0.4564 | 0.9492 |
80
- | 0.0835 | 6.4028 | 11000 | 0.2020 | 0.4538 | 0.4601 | 0.4569 | 0.9499 |
81
- | 0.0831 | 6.6938 | 11500 | 0.1947 | 0.4471 | 0.4727 | 0.4595 | 0.9491 |
82
- | 0.0795 | 6.9849 | 12000 | 0.2028 | 0.4554 | 0.4497 | 0.4526 | 0.9505 |
83
- | 0.0714 | 7.2759 | 12500 | 0.2064 | 0.4607 | 0.4604 | 0.4606 | 0.9498 |
84
- | 0.0697 | 7.5669 | 13000 | 0.2124 | 0.4511 | 0.4640 | 0.4575 | 0.9505 |
85
- | 0.0686 | 7.8580 | 13500 | 0.2051 | 0.4680 | 0.4869 | 0.4773 | 0.9515 |
86
- | 0.0616 | 8.1490 | 14000 | 0.2194 | 0.4849 | 0.4675 | 0.4760 | 0.9516 |
87
- | 0.0581 | 8.4400 | 14500 | 0.2106 | 0.4639 | 0.5009 | 0.4817 | 0.9508 |
88
- | 0.0603 | 8.7311 | 15000 | 0.2100 | 0.4767 | 0.5044 | 0.4902 | 0.9519 |
89
- | 0.0549 | 9.0221 | 15500 | 0.2195 | 0.4844 | 0.4869 | 0.4856 | 0.9523 |
90
- | 0.048 | 9.3132 | 16000 | 0.2180 | 0.4688 | 0.5103 | 0.4887 | 0.9521 |
91
- | 0.048 | 9.6042 | 16500 | 0.2249 | 0.4780 | 0.4839 | 0.4809 | 0.9528 |
92
- | 0.0468 | 9.8952 | 17000 | 0.2286 | 0.4935 | 0.4890 | 0.4912 | 0.9538 |
93
- | 0.0416 | 10.1863 | 17500 | 0.2258 | 0.4938 | 0.5135 | 0.5035 | 0.9537 |
94
- | 0.0385 | 10.4773 | 18000 | 0.2345 | 0.4984 | 0.4893 | 0.4938 | 0.9536 |
95
- | 0.0409 | 10.7683 | 18500 | 0.2397 | 0.4958 | 0.5073 | 0.5015 | 0.9533 |
96
- | 0.0363 | 11.0594 | 19000 | 0.2347 | 0.4914 | 0.5243 | 0.5073 | 0.9536 |
97
- | 0.0328 | 11.3504 | 19500 | 0.2410 | 0.4951 | 0.5220 | 0.5082 | 0.9540 |
98
- | 0.0325 | 11.6414 | 20000 | 0.2507 | 0.4863 | 0.5024 | 0.4942 | 0.9539 |
99
- | 0.0338 | 11.9325 | 20500 | 0.2406 | 0.4981 | 0.5242 | 0.5108 | 0.9541 |
100
- | 0.0287 | 12.2235 | 21000 | 0.2499 | 0.4945 | 0.5146 | 0.5043 | 0.9541 |
101
- | 0.0279 | 12.5146 | 21500 | 0.2638 | 0.5075 | 0.4959 | 0.5016 | 0.9543 |
102
- | 0.0279 | 12.8056 | 22000 | 0.2546 | 0.5087 | 0.5024 | 0.5055 | 0.9541 |
103
- | 0.0276 | 13.0966 | 22500 | 0.2601 | 0.5141 | 0.5131 | 0.5136 | 0.9550 |
104
- | 0.0249 | 13.3877 | 23000 | 0.2611 | 0.5102 | 0.5170 | 0.5135 | 0.9550 |
105
- | 0.0234 | 13.6787 | 23500 | 0.2614 | 0.5116 | 0.5159 | 0.5138 | 0.9551 |
106
- | 0.0235 | 13.9697 | 24000 | 0.2687 | 0.5183 | 0.5083 | 0.5133 | 0.9553 |
107
- | 0.0203 | 14.2608 | 24500 | 0.2648 | 0.4984 | 0.5340 | 0.5156 | 0.9543 |
108
- | 0.0199 | 14.5518 | 25000 | 0.2713 | 0.5139 | 0.5109 | 0.5124 | 0.9552 |
109
- | 0.0214 | 14.8428 | 25500 | 0.2741 | 0.5259 | 0.5141 | 0.5199 | 0.9557 |
110
- | 0.0193 | 15.1339 | 26000 | 0.2790 | 0.5207 | 0.4966 | 0.5084 | 0.9549 |
111
- | 0.0183 | 15.4249 | 26500 | 0.2888 | 0.5184 | 0.4947 | 0.5063 | 0.9553 |
112
- | 0.0177 | 15.7159 | 27000 | 0.2812 | 0.5134 | 0.5343 | 0.5237 | 0.9554 |
113
- | 0.0175 | 16.0070 | 27500 | 0.2809 | 0.5132 | 0.5360 | 0.5243 | 0.9551 |
114
- | 0.0149 | 16.2980 | 28000 | 0.2875 | 0.5074 | 0.5322 | 0.5195 | 0.9550 |
115
- | 0.016 | 16.5891 | 28500 | 0.2865 | 0.5092 | 0.5263 | 0.5176 | 0.9553 |
116
- | 0.0157 | 16.8801 | 29000 | 0.2915 | 0.5090 | 0.5295 | 0.5191 | 0.9551 |
117
- | 0.0154 | 17.1711 | 29500 | 0.2938 | 0.5068 | 0.5335 | 0.5198 | 0.9549 |
118
- | 0.0138 | 17.4622 | 30000 | 0.3023 | 0.5299 | 0.5110 | 0.5203 | 0.9555 |
119
- | 0.014 | 17.7532 | 30500 | 0.3004 | 0.5190 | 0.5308 | 0.5248 | 0.9556 |
120
- | 0.0126 | 18.0442 | 31000 | 0.2922 | 0.5071 | 0.5413 | 0.5237 | 0.9545 |
121
- | 0.0119 | 18.3353 | 31500 | 0.3025 | 0.4995 | 0.5406 | 0.5192 | 0.9551 |
122
- | 0.012 | 18.6263 | 32000 | 0.3011 | 0.5145 | 0.5330 | 0.5236 | 0.9554 |
123
- | 0.0119 | 18.9173 | 32500 | 0.3092 | 0.5191 | 0.5243 | 0.5217 | 0.9558 |
124
- | 0.011 | 19.2084 | 33000 | 0.3110 | 0.5086 | 0.5308 | 0.5194 | 0.9555 |
125
- | 0.0113 | 19.4994 | 33500 | 0.3055 | 0.5235 | 0.5258 | 0.5246 | 0.9556 |
126
- | 0.0105 | 19.7905 | 34000 | 0.3103 | 0.5226 | 0.5286 | 0.5256 | 0.9556 |
127
- | 0.0092 | 20.0815 | 34500 | 0.3109 | 0.5232 | 0.5346 | 0.5288 | 0.9560 |
128
- | 0.0102 | 20.3725 | 35000 | 0.3121 | 0.5184 | 0.5341 | 0.5262 | 0.9561 |
129
- | 0.0097 | 20.6636 | 35500 | 0.3115 | 0.5283 | 0.5243 | 0.5263 | 0.9557 |
130
- | 0.0091 | 20.9546 | 36000 | 0.3145 | 0.5104 | 0.5380 | 0.5238 | 0.9556 |
131
- | 0.0087 | 21.2456 | 36500 | 0.3167 | 0.5120 | 0.5363 | 0.5239 | 0.9553 |
132
- | 0.0081 | 21.5367 | 37000 | 0.3181 | 0.5150 | 0.5360 | 0.5253 | 0.9555 |
133
- | 0.0087 | 21.8277 | 37500 | 0.3172 | 0.5281 | 0.5265 | 0.5273 | 0.9562 |
134
- | 0.0091 | 22.1187 | 38000 | 0.3181 | 0.5168 | 0.5348 | 0.5257 | 0.9559 |
135
- | 0.0068 | 22.4098 | 38500 | 0.3242 | 0.5238 | 0.5334 | 0.5286 | 0.9564 |
136
- | 0.008 | 22.7008 | 39000 | 0.3280 | 0.5197 | 0.5317 | 0.5256 | 0.9559 |
137
- | 0.0078 | 22.9919 | 39500 | 0.3255 | 0.5175 | 0.5343 | 0.5258 | 0.9559 |
138
- | 0.0076 | 23.2829 | 40000 | 0.3243 | 0.5117 | 0.5457 | 0.5281 | 0.9555 |
139
- | 0.0062 | 23.5739 | 40500 | 0.3264 | 0.5253 | 0.5296 | 0.5274 | 0.9562 |
140
- | 0.0076 | 23.8650 | 41000 | 0.3294 | 0.5316 | 0.5250 | 0.5283 | 0.9562 |
141
- | 0.0063 | 24.1560 | 41500 | 0.3366 | 0.5266 | 0.5282 | 0.5274 | 0.9563 |
142
- | 0.0064 | 24.4470 | 42000 | 0.3331 | 0.5224 | 0.5273 | 0.5249 | 0.9559 |
143
- | 0.0068 | 24.7381 | 42500 | 0.3335 | 0.5231 | 0.5397 | 0.5313 | 0.9561 |
144
- | 0.0061 | 25.0291 | 43000 | 0.3363 | 0.5285 | 0.5338 | 0.5312 | 0.9562 |
145
- | 0.0059 | 25.3201 | 43500 | 0.3380 | 0.5183 | 0.5387 | 0.5283 | 0.9560 |
146
- | 0.006 | 25.6112 | 44000 | 0.3453 | 0.5289 | 0.5126 | 0.5206 | 0.9560 |
147
- | 0.0056 | 25.9022 | 44500 | 0.3372 | 0.5257 | 0.5344 | 0.5300 | 0.9558 |
148
- | 0.0056 | 26.1932 | 45000 | 0.3329 | 0.5220 | 0.5478 | 0.5346 | 0.9557 |
149
- | 0.0054 | 26.4843 | 45500 | 0.3430 | 0.5232 | 0.5289 | 0.5260 | 0.9561 |
150
- | 0.0056 | 26.7753 | 46000 | 0.3417 | 0.5224 | 0.5318 | 0.5271 | 0.9559 |
151
- | 0.0051 | 27.0664 | 46500 | 0.3407 | 0.5208 | 0.5380 | 0.5293 | 0.9560 |
152
- | 0.0046 | 27.3574 | 47000 | 0.3439 | 0.5228 | 0.5382 | 0.5304 | 0.9559 |
153
- | 0.0056 | 27.6484 | 47500 | 0.3452 | 0.5253 | 0.5348 | 0.5300 | 0.9561 |
154
- | 0.0049 | 27.9395 | 48000 | 0.3484 | 0.5201 | 0.5317 | 0.5258 | 0.9561 |
155
- | 0.005 | 28.2305 | 48500 | 0.3473 | 0.5278 | 0.5304 | 0.5291 | 0.9561 |
156
- | 0.0047 | 28.5215 | 49000 | 0.3472 | 0.5265 | 0.5361 | 0.5313 | 0.9560 |
157
- | 0.0044 | 28.8126 | 49500 | 0.3470 | 0.5223 | 0.5364 | 0.5293 | 0.9560 |
158
- | 0.0047 | 29.1036 | 50000 | 0.3487 | 0.5243 | 0.5298 | 0.5271 | 0.9560 |
159
- | 0.0046 | 29.3946 | 50500 | 0.3479 | 0.5264 | 0.5314 | 0.5289 | 0.9560 |
160
- | 0.0044 | 29.6857 | 51000 | 0.3467 | 0.5259 | 0.5354 | 0.5306 | 0.9560 |
161
- | 0.0047 | 29.9767 | 51500 | 0.3478 | 0.5257 | 0.5320 | 0.5288 | 0.9561 |
162
 
163
 
164
  ### Framework versions
 
1
  ---
 
2
  library_name: transformers
3
  license: mit
4
+ base_model: FacebookAI/xlm-roberta-base
5
+ tags:
6
+ - generated_from_trainer
7
  metrics:
8
  - precision
9
  - recall
10
  - f1
11
  - accuracy
 
 
12
  model-index:
13
  - name: scenario-non-kd-scr-ner-half-xlmr_data-univner_full44
14
  results: []
 
21
 
22
  This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on the None dataset.
23
  It achieves the following results on the evaluation set:
24
+ - Loss: 0.3603
25
+ - Precision: 0.5290
26
+ - Recall: 0.5350
27
+ - F1: 0.5320
28
+ - Accuracy: 0.9557
29
 
30
  ## Model description
31
 
 
56
 
57
  | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
58
  |:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
59
+ | 0.3576 | 0.2910 | 500 | 0.2944 | 0.4546 | 0.0903 | 0.1507 | 0.9285 |
60
+ | 0.2752 | 0.5821 | 1000 | 0.2516 | 0.2788 | 0.1799 | 0.2187 | 0.9317 |
61
+ | 0.2421 | 0.8731 | 1500 | 0.2433 | 0.3406 | 0.2018 | 0.2535 | 0.9349 |
62
+ | 0.2184 | 1.1641 | 2000 | 0.2271 | 0.3200 | 0.2548 | 0.2837 | 0.9368 |
63
+ | 0.2079 | 1.4552 | 2500 | 0.2195 | 0.3445 | 0.2835 | 0.3110 | 0.9378 |
64
+ | 0.1943 | 1.7462 | 3000 | 0.2126 | 0.3641 | 0.2946 | 0.3257 | 0.9400 |
65
+ | 0.183 | 2.0373 | 3500 | 0.2067 | 0.3467 | 0.3354 | 0.3410 | 0.9394 |
66
+ | 0.1609 | 2.3283 | 4000 | 0.2107 | 0.3788 | 0.3251 | 0.3499 | 0.9414 |
67
+ | 0.1644 | 2.6193 | 4500 | 0.2114 | 0.3478 | 0.3389 | 0.3433 | 0.9395 |
68
+ | 0.1586 | 2.9104 | 5000 | 0.2053 | 0.3758 | 0.3559 | 0.3656 | 0.9414 |
69
+ | 0.147 | 3.2014 | 5500 | 0.2029 | 0.4010 | 0.3751 | 0.3877 | 0.9430 |
70
+ | 0.1423 | 3.4924 | 6000 | 0.2043 | 0.4021 | 0.3747 | 0.3879 | 0.9437 |
71
+ | 0.1363 | 3.7835 | 6500 | 0.2080 | 0.4074 | 0.3496 | 0.3763 | 0.9436 |
72
+ | 0.133 | 4.0745 | 7000 | 0.2049 | 0.4037 | 0.3919 | 0.3977 | 0.9439 |
73
+ | 0.1227 | 4.3655 | 7500 | 0.2048 | 0.4163 | 0.3976 | 0.4068 | 0.9441 |
74
+ | 0.1214 | 4.6566 | 8000 | 0.2034 | 0.4082 | 0.3984 | 0.4032 | 0.9439 |
75
+ | 0.1198 | 4.9476 | 8500 | 0.1960 | 0.4205 | 0.4249 | 0.4227 | 0.9452 |
76
+ | 0.1058 | 5.2386 | 9000 | 0.2107 | 0.4228 | 0.4048 | 0.4137 | 0.9459 |
77
+ | 0.103 | 5.5297 | 9500 | 0.1999 | 0.4451 | 0.4268 | 0.4358 | 0.9461 |
78
+ | 0.1017 | 5.8207 | 10000 | 0.2005 | 0.4308 | 0.4313 | 0.4310 | 0.9468 |
79
+ | 0.0956 | 6.1118 | 10500 | 0.2008 | 0.4388 | 0.4535 | 0.4460 | 0.9473 |
80
+ | 0.0848 | 6.4028 | 11000 | 0.2039 | 0.4301 | 0.4711 | 0.4496 | 0.9467 |
81
+ | 0.0834 | 6.6938 | 11500 | 0.2040 | 0.4535 | 0.4669 | 0.4601 | 0.9490 |
82
+ | 0.08 | 6.9849 | 12000 | 0.2021 | 0.4517 | 0.4753 | 0.4632 | 0.9495 |
83
+ | 0.0714 | 7.2759 | 12500 | 0.2095 | 0.4691 | 0.4604 | 0.4647 | 0.9503 |
84
+ | 0.0691 | 7.5669 | 13000 | 0.2165 | 0.4749 | 0.4425 | 0.4581 | 0.9509 |
85
+ | 0.0676 | 7.8580 | 13500 | 0.2174 | 0.4795 | 0.4513 | 0.4650 | 0.9512 |
86
+ | 0.0608 | 8.1490 | 14000 | 0.2157 | 0.4833 | 0.4812 | 0.4822 | 0.9516 |
87
+ | 0.0574 | 8.4400 | 14500 | 0.2145 | 0.4707 | 0.4937 | 0.4819 | 0.9512 |
88
+ | 0.06 | 8.7311 | 15000 | 0.2190 | 0.4862 | 0.4969 | 0.4915 | 0.9514 |
89
+ | 0.0555 | 9.0221 | 15500 | 0.2291 | 0.4840 | 0.4679 | 0.4758 | 0.9519 |
90
+ | 0.0491 | 9.3132 | 16000 | 0.2285 | 0.4845 | 0.4888 | 0.4866 | 0.9519 |
91
+ | 0.0481 | 9.6042 | 16500 | 0.2325 | 0.4934 | 0.4799 | 0.4865 | 0.9523 |
92
+ | 0.0476 | 9.8952 | 17000 | 0.2297 | 0.4678 | 0.5099 | 0.4879 | 0.9516 |
93
+ | 0.042 | 10.1863 | 17500 | 0.2336 | 0.4962 | 0.5012 | 0.4987 | 0.9524 |
94
+ | 0.0399 | 10.4773 | 18000 | 0.2368 | 0.5054 | 0.5024 | 0.5039 | 0.9530 |
95
+ | 0.0432 | 10.7683 | 18500 | 0.2408 | 0.5064 | 0.4885 | 0.4973 | 0.9529 |
96
+ | 0.0371 | 11.0594 | 19000 | 0.2471 | 0.4870 | 0.5122 | 0.4993 | 0.9530 |
97
+ | 0.0345 | 11.3504 | 19500 | 0.2485 | 0.4988 | 0.5077 | 0.5032 | 0.9532 |
98
+ | 0.0345 | 11.6414 | 20000 | 0.2533 | 0.5035 | 0.4918 | 0.4976 | 0.9533 |
99
+ | 0.0342 | 11.9325 | 20500 | 0.2524 | 0.4906 | 0.5115 | 0.5008 | 0.9528 |
100
+ | 0.0301 | 12.2235 | 21000 | 0.2562 | 0.4987 | 0.5014 | 0.5000 | 0.9531 |
101
+ | 0.0293 | 12.5146 | 21500 | 0.2572 | 0.5046 | 0.5113 | 0.5080 | 0.9532 |
102
+ | 0.0282 | 12.8056 | 22000 | 0.2633 | 0.5034 | 0.4972 | 0.5003 | 0.9537 |
103
+ | 0.0283 | 13.0966 | 22500 | 0.2680 | 0.5014 | 0.5119 | 0.5066 | 0.9532 |
104
+ | 0.0257 | 13.3877 | 23000 | 0.2685 | 0.4985 | 0.5185 | 0.5083 | 0.9537 |
105
+ | 0.0245 | 13.6787 | 23500 | 0.2735 | 0.5104 | 0.5165 | 0.5134 | 0.9541 |
106
+ | 0.0243 | 13.9697 | 24000 | 0.2811 | 0.4987 | 0.5157 | 0.5070 | 0.9534 |
107
+ | 0.0215 | 14.2608 | 24500 | 0.2769 | 0.5021 | 0.5299 | 0.5156 | 0.9538 |
108
+ | 0.0207 | 14.5518 | 25000 | 0.2748 | 0.4976 | 0.5425 | 0.5191 | 0.9532 |
109
+ | 0.0224 | 14.8428 | 25500 | 0.2835 | 0.5190 | 0.5151 | 0.5170 | 0.9545 |
110
+ | 0.0204 | 15.1339 | 26000 | 0.2845 | 0.5022 | 0.5195 | 0.5107 | 0.9541 |
111
+ | 0.0186 | 15.4249 | 26500 | 0.2922 | 0.5177 | 0.5057 | 0.5116 | 0.9541 |
112
+ | 0.0185 | 15.7159 | 27000 | 0.2888 | 0.5236 | 0.5210 | 0.5223 | 0.9546 |
113
+ | 0.018 | 16.0070 | 27500 | 0.2892 | 0.5029 | 0.5354 | 0.5187 | 0.9540 |
114
+ | 0.0152 | 16.2980 | 28000 | 0.2992 | 0.5166 | 0.5219 | 0.5192 | 0.9547 |
115
+ | 0.0159 | 16.5891 | 28500 | 0.3011 | 0.5127 | 0.5232 | 0.5179 | 0.9545 |
116
+ | 0.0159 | 16.8801 | 29000 | 0.3051 | 0.5135 | 0.5172 | 0.5153 | 0.9545 |
117
+ | 0.015 | 17.1711 | 29500 | 0.3000 | 0.5170 | 0.5233 | 0.5201 | 0.9544 |
118
+ | 0.0144 | 17.4622 | 30000 | 0.3049 | 0.5045 | 0.5180 | 0.5111 | 0.9543 |
119
+ | 0.0144 | 17.7532 | 30500 | 0.3040 | 0.5066 | 0.5470 | 0.5260 | 0.9545 |
120
+ | 0.0131 | 18.0442 | 31000 | 0.3145 | 0.5144 | 0.5214 | 0.5179 | 0.9547 |
121
+ | 0.0119 | 18.3353 | 31500 | 0.3129 | 0.5150 | 0.5351 | 0.5249 | 0.9546 |
122
+ | 0.013 | 18.6263 | 32000 | 0.3156 | 0.5113 | 0.5308 | 0.5208 | 0.9544 |
123
+ | 0.0121 | 18.9173 | 32500 | 0.3242 | 0.5334 | 0.5129 | 0.5229 | 0.9551 |
124
+ | 0.0115 | 19.2084 | 33000 | 0.3194 | 0.5244 | 0.5327 | 0.5285 | 0.9554 |
125
+ | 0.0118 | 19.4994 | 33500 | 0.3195 | 0.5137 | 0.5396 | 0.5263 | 0.9546 |
126
+ | 0.0107 | 19.7905 | 34000 | 0.3209 | 0.5107 | 0.5416 | 0.5257 | 0.9544 |
127
+ | 0.0095 | 20.0815 | 34500 | 0.3266 | 0.5235 | 0.5361 | 0.5298 | 0.9550 |
128
+ | 0.01 | 20.3725 | 35000 | 0.3252 | 0.5167 | 0.5370 | 0.5267 | 0.9549 |
129
+ | 0.01 | 20.6636 | 35500 | 0.3272 | 0.5177 | 0.5354 | 0.5264 | 0.9549 |
130
+ | 0.0095 | 20.9546 | 36000 | 0.3290 | 0.5171 | 0.5328 | 0.5248 | 0.9550 |
131
+ | 0.0089 | 21.2456 | 36500 | 0.3316 | 0.5255 | 0.5348 | 0.5301 | 0.9556 |
132
+ | 0.0085 | 21.5367 | 37000 | 0.3361 | 0.5258 | 0.5333 | 0.5295 | 0.9553 |
133
+ | 0.0089 | 21.8277 | 37500 | 0.3336 | 0.5320 | 0.5227 | 0.5273 | 0.9553 |
134
+ | 0.009 | 22.1187 | 38000 | 0.3368 | 0.5287 | 0.5263 | 0.5275 | 0.9555 |
135
+ | 0.0069 | 22.4098 | 38500 | 0.3374 | 0.5194 | 0.5325 | 0.5259 | 0.9553 |
136
+ | 0.0082 | 22.7008 | 39000 | 0.3367 | 0.5161 | 0.5376 | 0.5266 | 0.9552 |
137
+ | 0.0079 | 22.9919 | 39500 | 0.3362 | 0.5170 | 0.5385 | 0.5275 | 0.9549 |
138
+ | 0.0075 | 23.2829 | 40000 | 0.3408 | 0.5204 | 0.5309 | 0.5256 | 0.9552 |
139
+ | 0.0062 | 23.5739 | 40500 | 0.3469 | 0.5194 | 0.5284 | 0.5239 | 0.9551 |
140
+ | 0.0074 | 23.8650 | 41000 | 0.3394 | 0.5206 | 0.5409 | 0.5306 | 0.9553 |
141
+ | 0.0065 | 24.1560 | 41500 | 0.3424 | 0.5163 | 0.5429 | 0.5293 | 0.9553 |
142
+ | 0.0061 | 24.4470 | 42000 | 0.3466 | 0.5263 | 0.5330 | 0.5296 | 0.9550 |
143
+ | 0.0065 | 24.7381 | 42500 | 0.3490 | 0.5297 | 0.5318 | 0.5308 | 0.9556 |
144
+ | 0.0065 | 25.0291 | 43000 | 0.3523 | 0.5241 | 0.5344 | 0.5292 | 0.9553 |
145
+ | 0.0056 | 25.3201 | 43500 | 0.3485 | 0.5287 | 0.5348 | 0.5317 | 0.9551 |
146
+ | 0.0061 | 25.6112 | 44000 | 0.3515 | 0.5187 | 0.5490 | 0.5334 | 0.9552 |
147
+ | 0.0059 | 25.9022 | 44500 | 0.3559 | 0.5254 | 0.5292 | 0.5273 | 0.9554 |
148
+ | 0.0052 | 26.1932 | 45000 | 0.3521 | 0.5292 | 0.5340 | 0.5316 | 0.9555 |
149
+ | 0.0051 | 26.4843 | 45500 | 0.3545 | 0.5222 | 0.5403 | 0.5311 | 0.9556 |
150
+ | 0.0057 | 26.7753 | 46000 | 0.3551 | 0.5249 | 0.5374 | 0.5311 | 0.9554 |
151
+ | 0.0055 | 27.0664 | 46500 | 0.3534 | 0.5198 | 0.5491 | 0.5341 | 0.9554 |
152
+ | 0.0047 | 27.3574 | 47000 | 0.3572 | 0.5245 | 0.5318 | 0.5281 | 0.9556 |
153
+ | 0.0054 | 27.6484 | 47500 | 0.3566 | 0.5290 | 0.5374 | 0.5332 | 0.9557 |
154
+ | 0.0045 | 27.9395 | 48000 | 0.3560 | 0.5269 | 0.5405 | 0.5336 | 0.9554 |
155
+ | 0.0053 | 28.2305 | 48500 | 0.3587 | 0.5244 | 0.5373 | 0.5308 | 0.9556 |
156
+ | 0.0048 | 28.5215 | 49000 | 0.3605 | 0.5268 | 0.5353 | 0.5310 | 0.9555 |
157
+ | 0.0043 | 28.8126 | 49500 | 0.3569 | 0.5243 | 0.5385 | 0.5313 | 0.9553 |
158
+ | 0.0049 | 29.1036 | 50000 | 0.3585 | 0.5251 | 0.5370 | 0.5310 | 0.9555 |
159
+ | 0.0045 | 29.3946 | 50500 | 0.3591 | 0.5277 | 0.5359 | 0.5317 | 0.9555 |
160
+ | 0.0046 | 29.6857 | 51000 | 0.3598 | 0.5260 | 0.5347 | 0.5303 | 0.9555 |
161
+ | 0.0045 | 29.9767 | 51500 | 0.3603 | 0.5290 | 0.5350 | 0.5320 | 0.9557 |
162
 
163
 
164
  ### Framework versions
eval_result_ner.json CHANGED
@@ -1 +1 @@
1
- {"ceb_gja": {"precision": 0.2619047619047619, "recall": 0.673469387755102, "f1": 0.37714285714285717, "accuracy": 0.9104247104247104}, "en_pud": {"precision": 0.4165009940357853, "recall": 0.38976744186046514, "f1": 0.4026910139356079, "accuracy": 0.944087646392142}, "de_pud": {"precision": 0.082502022108385, "recall": 0.2945139557266602, "f1": 0.12889637742207244, "accuracy": 0.77253762130233}, "pt_pud": {"precision": 0.4730902777777778, "recall": 0.4959053685168335, "f1": 0.4842292314526877, "accuracy": 0.9560387918144145}, "ru_pud": {"precision": 0.017528008673653776, "recall": 0.09362934362934362, "f1": 0.029528158295281586, "accuracy": 0.47346938775510206}, "sv_pud": {"precision": 0.4527027027027027, "recall": 0.32555879494655005, "f1": 0.3787450537026568, "accuracy": 0.9434892010903754}, "tl_trg": {"precision": 0.21875, "recall": 0.6086956521739131, "f1": 0.3218390804597701, "accuracy": 0.9209809264305178}, "tl_ugnayan": {"precision": 0.06862745098039216, "recall": 0.21212121212121213, "f1": 0.10370370370370371, "accuracy": 0.8842297174111212}, "zh_gsd": {"precision": 0.42035928143712575, "recall": 0.4576271186440678, "f1": 0.43820224719101125, "accuracy": 0.9217449217449217}, "zh_gsdsimp": {"precision": 0.4292929292929293, "recall": 0.4456094364351245, "f1": 0.43729903536977494, "accuracy": 0.921911421911422}, "hr_set": {"precision": 0.6520314547837484, "recall": 0.7091945830363506, "f1": 0.6794127688630932, "accuracy": 0.9648392415498763}, "da_ddt": {"precision": 0.5426008968609866, "recall": 0.5413870246085011, "f1": 0.541993281075028, "accuracy": 0.9668761847750175}, "en_ewt": {"precision": 0.4939870490286771, "recall": 0.49080882352941174, "f1": 0.4923928077455048, "accuracy": 0.954217635573973}, "pt_bosque": {"precision": 0.496028880866426, "recall": 0.5654320987654321, "f1": 0.5284615384615384, "accuracy": 0.9594623967540936}, "sr_set": {"precision": 0.7150900900900901, "recall": 0.7497048406139315, "f1": 0.7319884726224783, "accuracy": 0.963313194991682}, "sk_snk": {"precision": 0.344, "recall": 0.2819672131147541, "f1": 0.3099099099099099, "accuracy": 0.9119817839195979}, "sv_talbanken": {"precision": 0.5767195767195767, "recall": 0.5561224489795918, "f1": 0.5662337662337662, "accuracy": 0.9926387593855818}}
 
1
+ {"ceb_gja": {"precision": 0.2543859649122807, "recall": 0.5918367346938775, "f1": 0.3558282208588957, "accuracy": 0.9196911196911197}, "en_pud": {"precision": 0.4077212806026365, "recall": 0.4027906976744186, "f1": 0.40524099204492275, "accuracy": 0.9446071023800529}, "de_pud": {"precision": 0.08227176220806794, "recall": 0.2983638113570741, "f1": 0.12897857291449968, "accuracy": 0.7782101167315175}, "pt_pud": {"precision": 0.4693708609271523, "recall": 0.5159235668789809, "f1": 0.49154746423927176, "accuracy": 0.9561242363395566}, "ru_pud": {"precision": 0.014287982219399904, "recall": 0.08687258687258688, "f1": 0.024539877300613498, "accuracy": 0.49031258072849393}, "sv_pud": {"precision": 0.4292866082603254, "recall": 0.3333333333333333, "f1": 0.3752735229759299, "accuracy": 0.9441706856783393}, "tl_trg": {"precision": 0.17647058823529413, "recall": 0.5217391304347826, "f1": 0.26373626373626374, "accuracy": 0.9073569482288828}, "tl_ugnayan": {"precision": 0.11, "recall": 0.3333333333333333, "f1": 0.16541353383458648, "accuracy": 0.8787602552415679}, "zh_gsd": {"precision": 0.4070904645476773, "recall": 0.43415906127770537, "f1": 0.4201892744479495, "accuracy": 0.9204961704961705}, "zh_gsdsimp": {"precision": 0.41836734693877553, "recall": 0.42988204456094364, "f1": 0.42404654169360056, "accuracy": 0.924908424908425}, "hr_set": {"precision": 0.6501628664495114, "recall": 0.7113328581610834, "f1": 0.6793737236215113, "accuracy": 0.9651690024732069}, "da_ddt": {"precision": 0.5087719298245614, "recall": 0.5190156599552572, "f1": 0.5138427464008859, "accuracy": 0.9654793973860122}, "en_ewt": {"precision": 0.4832881662149955, "recall": 0.49172794117647056, "f1": 0.4874715261958998, "accuracy": 0.9524644379806352}, "pt_bosque": {"precision": 0.48723559445660103, "recall": 0.5497942386831276, "f1": 0.5166279969064191, "accuracy": 0.9587740907114911}, "sr_set": {"precision": 0.7037861915367484, "recall": 0.7461629279811098, "f1": 0.7243553008595989, "accuracy": 0.9626127309342439}, "sk_snk": {"precision": 0.35142118863049093, "recall": 0.2972677595628415, "f1": 0.32208407341622264, "accuracy": 0.9163002512562815}, "sv_talbanken": {"precision": 0.5561224489795918, "recall": 0.5561224489795918, "f1": 0.5561224489795918, "accuracy": 0.9919026353241399}}
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ceb5127bd514ac5c6e246e698d470517d6ff57ef52f079648f52aaa40dd591a9
3
  size 427407404
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a8ec7b4b1a2f91a769f096babeb92422978e6f660bf97e275e9b92e68e78940
3
  size 427407404
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ea653b7e1b9042709b6a10c43aa7368167dc5860cd3ad60e779cf15007df743
3
  size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b96b3c7bcb2846705cbcb0bfd9a8c6b505e5adb370116a77969c393bf8162fae
3
  size 5304