Initial Commit
Browse files- README.md +111 -111
- eval_result_ner.json +1 -1
- model.safetensors +1 -1
- training_args.bin +1 -1
README.md
CHANGED
@@ -1,14 +1,14 @@
|
|
1 |
---
|
2 |
-
base_model: FacebookAI/xlm-roberta-base
|
3 |
library_name: transformers
|
4 |
license: mit
|
|
|
|
|
|
|
5 |
metrics:
|
6 |
- precision
|
7 |
- recall
|
8 |
- f1
|
9 |
- accuracy
|
10 |
-
tags:
|
11 |
-
- generated_from_trainer
|
12 |
model-index:
|
13 |
- name: scenario-non-kd-scr-ner-half-xlmr_data-univner_full66
|
14 |
results: []
|
@@ -21,11 +21,11 @@ should probably proofread and complete it, then remove this comment. -->
|
|
21 |
|
22 |
This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on the None dataset.
|
23 |
It achieves the following results on the evaluation set:
|
24 |
-
- Loss: 0.
|
25 |
-
- Precision: 0.
|
26 |
-
- Recall: 0.
|
27 |
-
- F1: 0.
|
28 |
-
- Accuracy: 0.
|
29 |
|
30 |
## Model description
|
31 |
|
@@ -56,109 +56,109 @@ The following hyperparameters were used during training:
|
|
56 |
|
57 |
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|
58 |
|:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.
|
98 |
-
| 0.
|
99 |
-
| 0.
|
100 |
-
| 0.0289 | 12.2235 | 21000 | 0.
|
101 |
-
| 0.0278 | 12.5146 | 21500 | 0.
|
102 |
-
| 0.
|
103 |
-
| 0.
|
104 |
-
| 0.
|
105 |
-
| 0.
|
106 |
-
| 0.0241 | 13.9697 | 24000 | 0.
|
107 |
-
| 0.
|
108 |
-
| 0.
|
109 |
-
| 0.
|
110 |
-
| 0.
|
111 |
-
| 0.
|
112 |
-
| 0.
|
113 |
-
| 0.
|
114 |
-
| 0.
|
115 |
-
| 0.
|
116 |
-
| 0.
|
117 |
-
| 0.
|
118 |
-
| 0.0142 | 17.4622 | 30000 | 0.
|
119 |
-
| 0.
|
120 |
-
| 0.
|
121 |
-
| 0.
|
122 |
-
| 0.
|
123 |
-
| 0.
|
124 |
-
| 0.
|
125 |
-
| 0.
|
126 |
-
| 0.
|
127 |
-
| 0.
|
128 |
-
| 0.
|
129 |
-
| 0.
|
130 |
-
| 0.
|
131 |
-
| 0.
|
132 |
-
| 0.
|
133 |
-
| 0.
|
134 |
-
| 0.
|
135 |
-
| 0.
|
136 |
-
| 0.
|
137 |
-
| 0.
|
138 |
-
| 0.
|
139 |
-
| 0.
|
140 |
-
| 0.
|
141 |
-
| 0.
|
142 |
-
| 0.
|
143 |
-
| 0.
|
144 |
-
| 0.
|
145 |
-
| 0.
|
146 |
-
| 0.
|
147 |
-
| 0.
|
148 |
-
| 0.0051 | 26.1932 | 45000 | 0.
|
149 |
-
| 0.0055 | 26.4843 | 45500 | 0.
|
150 |
-
| 0.
|
151 |
-
| 0.0058 | 27.0664 | 46500 | 0.
|
152 |
-
| 0.
|
153 |
-
| 0.
|
154 |
-
| 0.
|
155 |
-
| 0.
|
156 |
-
| 0.0049 | 28.5215 | 49000 | 0.
|
157 |
-
| 0.
|
158 |
-
| 0.0042 | 29.1036 | 50000 | 0.
|
159 |
-
| 0.
|
160 |
-
| 0.
|
161 |
-
| 0.
|
162 |
|
163 |
|
164 |
### Framework versions
|
|
|
1 |
---
|
|
|
2 |
library_name: transformers
|
3 |
license: mit
|
4 |
+
base_model: FacebookAI/xlm-roberta-base
|
5 |
+
tags:
|
6 |
+
- generated_from_trainer
|
7 |
metrics:
|
8 |
- precision
|
9 |
- recall
|
10 |
- f1
|
11 |
- accuracy
|
|
|
|
|
12 |
model-index:
|
13 |
- name: scenario-non-kd-scr-ner-half-xlmr_data-univner_full66
|
14 |
results: []
|
|
|
21 |
|
22 |
This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on the None dataset.
|
23 |
It achieves the following results on the evaluation set:
|
24 |
+
- Loss: 0.3518
|
25 |
+
- Precision: 0.5316
|
26 |
+
- Recall: 0.5425
|
27 |
+
- F1: 0.5370
|
28 |
+
- Accuracy: 0.9565
|
29 |
|
30 |
## Model description
|
31 |
|
|
|
56 |
|
57 |
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|
58 |
|:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
|
59 |
+
| 0.3589 | 0.2910 | 500 | 0.2912 | 0.4556 | 0.0998 | 0.1638 | 0.9285 |
|
60 |
+
| 0.2772 | 0.5821 | 1000 | 0.2813 | 0.4297 | 0.1296 | 0.1991 | 0.9314 |
|
61 |
+
| 0.2452 | 0.8731 | 1500 | 0.2362 | 0.3233 | 0.2021 | 0.2487 | 0.9349 |
|
62 |
+
| 0.2225 | 1.1641 | 2000 | 0.2304 | 0.3518 | 0.2447 | 0.2886 | 0.9377 |
|
63 |
+
| 0.2028 | 1.4552 | 2500 | 0.2316 | 0.3541 | 0.2378 | 0.2845 | 0.9385 |
|
64 |
+
| 0.1966 | 1.7462 | 3000 | 0.2155 | 0.3847 | 0.2730 | 0.3194 | 0.9401 |
|
65 |
+
| 0.1841 | 2.0373 | 3500 | 0.2109 | 0.3783 | 0.3202 | 0.3468 | 0.9408 |
|
66 |
+
| 0.1651 | 2.3283 | 4000 | 0.2105 | 0.3801 | 0.3388 | 0.3582 | 0.9404 |
|
67 |
+
| 0.1644 | 2.6193 | 4500 | 0.2056 | 0.3732 | 0.3506 | 0.3616 | 0.9417 |
|
68 |
+
| 0.1635 | 2.9104 | 5000 | 0.1966 | 0.3919 | 0.3705 | 0.3809 | 0.9429 |
|
69 |
+
| 0.1441 | 3.2014 | 5500 | 0.2022 | 0.4122 | 0.3985 | 0.4052 | 0.9432 |
|
70 |
+
| 0.1418 | 3.4924 | 6000 | 0.2040 | 0.4160 | 0.3760 | 0.3950 | 0.9443 |
|
71 |
+
| 0.1363 | 3.7835 | 6500 | 0.2053 | 0.4157 | 0.3682 | 0.3905 | 0.9446 |
|
72 |
+
| 0.1367 | 4.0745 | 7000 | 0.2032 | 0.4182 | 0.3929 | 0.4051 | 0.9448 |
|
73 |
+
| 0.1203 | 4.3655 | 7500 | 0.2053 | 0.4173 | 0.4007 | 0.4088 | 0.9448 |
|
74 |
+
| 0.1222 | 4.6566 | 8000 | 0.2040 | 0.4337 | 0.3936 | 0.4127 | 0.9459 |
|
75 |
+
| 0.1215 | 4.9476 | 8500 | 0.1982 | 0.4271 | 0.4227 | 0.4249 | 0.9455 |
|
76 |
+
| 0.1072 | 5.2386 | 9000 | 0.2090 | 0.4375 | 0.4162 | 0.4266 | 0.9470 |
|
77 |
+
| 0.1035 | 5.5297 | 9500 | 0.2049 | 0.4449 | 0.4233 | 0.4338 | 0.9477 |
|
78 |
+
| 0.0988 | 5.8207 | 10000 | 0.1970 | 0.4446 | 0.4558 | 0.4501 | 0.9469 |
|
79 |
+
| 0.0935 | 6.1118 | 10500 | 0.2087 | 0.4576 | 0.4489 | 0.4532 | 0.9485 |
|
80 |
+
| 0.0847 | 6.4028 | 11000 | 0.2098 | 0.4410 | 0.4639 | 0.4521 | 0.9487 |
|
81 |
+
| 0.0811 | 6.6938 | 11500 | 0.2072 | 0.4662 | 0.4699 | 0.4680 | 0.9506 |
|
82 |
+
| 0.0828 | 6.9849 | 12000 | 0.1986 | 0.4947 | 0.4657 | 0.4798 | 0.9510 |
|
83 |
+
| 0.0681 | 7.2759 | 12500 | 0.2098 | 0.4742 | 0.4797 | 0.4769 | 0.9515 |
|
84 |
+
| 0.0664 | 7.5669 | 13000 | 0.2018 | 0.4830 | 0.4887 | 0.4858 | 0.9511 |
|
85 |
+
| 0.0674 | 7.8580 | 13500 | 0.2066 | 0.4954 | 0.5084 | 0.5018 | 0.9532 |
|
86 |
+
| 0.0621 | 8.1490 | 14000 | 0.2088 | 0.4737 | 0.5086 | 0.4905 | 0.9513 |
|
87 |
+
| 0.0532 | 8.4400 | 14500 | 0.2197 | 0.4995 | 0.4777 | 0.4883 | 0.9528 |
|
88 |
+
| 0.0544 | 8.7311 | 15000 | 0.2195 | 0.5120 | 0.4793 | 0.4951 | 0.9528 |
|
89 |
+
| 0.0558 | 9.0221 | 15500 | 0.2174 | 0.4953 | 0.5044 | 0.4998 | 0.9533 |
|
90 |
+
| 0.0454 | 9.3132 | 16000 | 0.2241 | 0.5061 | 0.5095 | 0.5078 | 0.9536 |
|
91 |
+
| 0.0458 | 9.6042 | 16500 | 0.2215 | 0.5058 | 0.5227 | 0.5141 | 0.9540 |
|
92 |
+
| 0.0451 | 9.8952 | 17000 | 0.2181 | 0.4940 | 0.5200 | 0.5066 | 0.9525 |
|
93 |
+
| 0.0399 | 10.1863 | 17500 | 0.2318 | 0.5085 | 0.5194 | 0.5139 | 0.9538 |
|
94 |
+
| 0.0375 | 10.4773 | 18000 | 0.2378 | 0.5108 | 0.5240 | 0.5173 | 0.9541 |
|
95 |
+
| 0.0378 | 10.7683 | 18500 | 0.2312 | 0.5118 | 0.5255 | 0.5185 | 0.9543 |
|
96 |
+
| 0.0376 | 11.0594 | 19000 | 0.2445 | 0.5006 | 0.5074 | 0.5040 | 0.9540 |
|
97 |
+
| 0.0338 | 11.3504 | 19500 | 0.2455 | 0.5081 | 0.5120 | 0.5101 | 0.9543 |
|
98 |
+
| 0.0326 | 11.6414 | 20000 | 0.2442 | 0.5108 | 0.5321 | 0.5212 | 0.9546 |
|
99 |
+
| 0.0318 | 11.9325 | 20500 | 0.2495 | 0.5168 | 0.5171 | 0.5169 | 0.9550 |
|
100 |
+
| 0.0289 | 12.2235 | 21000 | 0.2487 | 0.5113 | 0.5350 | 0.5229 | 0.9550 |
|
101 |
+
| 0.0278 | 12.5146 | 21500 | 0.2522 | 0.5050 | 0.5263 | 0.5154 | 0.9543 |
|
102 |
+
| 0.0277 | 12.8056 | 22000 | 0.2608 | 0.5221 | 0.5138 | 0.5179 | 0.9548 |
|
103 |
+
| 0.0263 | 13.0966 | 22500 | 0.2561 | 0.5133 | 0.5269 | 0.5200 | 0.9551 |
|
104 |
+
| 0.024 | 13.3877 | 23000 | 0.2631 | 0.5196 | 0.5258 | 0.5227 | 0.9547 |
|
105 |
+
| 0.0246 | 13.6787 | 23500 | 0.2628 | 0.5110 | 0.5527 | 0.5311 | 0.9551 |
|
106 |
+
| 0.0241 | 13.9697 | 24000 | 0.2735 | 0.5161 | 0.5260 | 0.5210 | 0.9552 |
|
107 |
+
| 0.021 | 14.2608 | 24500 | 0.2737 | 0.5224 | 0.5256 | 0.5240 | 0.9551 |
|
108 |
+
| 0.0201 | 14.5518 | 25000 | 0.2743 | 0.5246 | 0.5360 | 0.5302 | 0.9554 |
|
109 |
+
| 0.0208 | 14.8428 | 25500 | 0.2776 | 0.5180 | 0.5266 | 0.5222 | 0.9552 |
|
110 |
+
| 0.0201 | 15.1339 | 26000 | 0.2801 | 0.5065 | 0.5370 | 0.5213 | 0.9549 |
|
111 |
+
| 0.018 | 15.4249 | 26500 | 0.2770 | 0.5168 | 0.5335 | 0.5250 | 0.9550 |
|
112 |
+
| 0.0176 | 15.7159 | 27000 | 0.2875 | 0.5185 | 0.5324 | 0.5253 | 0.9551 |
|
113 |
+
| 0.0177 | 16.0070 | 27500 | 0.2861 | 0.5267 | 0.5321 | 0.5294 | 0.9556 |
|
114 |
+
| 0.0148 | 16.2980 | 28000 | 0.2860 | 0.5079 | 0.5442 | 0.5254 | 0.9549 |
|
115 |
+
| 0.0156 | 16.5891 | 28500 | 0.2953 | 0.5188 | 0.5380 | 0.5282 | 0.9552 |
|
116 |
+
| 0.0165 | 16.8801 | 29000 | 0.2928 | 0.5261 | 0.5333 | 0.5297 | 0.9557 |
|
117 |
+
| 0.0135 | 17.1711 | 29500 | 0.2981 | 0.5171 | 0.5396 | 0.5281 | 0.9554 |
|
118 |
+
| 0.0142 | 17.4622 | 30000 | 0.3062 | 0.5269 | 0.5164 | 0.5216 | 0.9554 |
|
119 |
+
| 0.0134 | 17.7532 | 30500 | 0.2947 | 0.5211 | 0.5418 | 0.5312 | 0.9555 |
|
120 |
+
| 0.0134 | 18.0442 | 31000 | 0.3045 | 0.5188 | 0.5426 | 0.5305 | 0.9559 |
|
121 |
+
| 0.012 | 18.3353 | 31500 | 0.3070 | 0.5236 | 0.5380 | 0.5307 | 0.9558 |
|
122 |
+
| 0.0123 | 18.6263 | 32000 | 0.3071 | 0.5409 | 0.5328 | 0.5368 | 0.9567 |
|
123 |
+
| 0.0117 | 18.9173 | 32500 | 0.3094 | 0.5265 | 0.5357 | 0.5311 | 0.9560 |
|
124 |
+
| 0.0108 | 19.2084 | 33000 | 0.3167 | 0.5344 | 0.5305 | 0.5325 | 0.9565 |
|
125 |
+
| 0.0111 | 19.4994 | 33500 | 0.3162 | 0.5182 | 0.5302 | 0.5241 | 0.9556 |
|
126 |
+
| 0.011 | 19.7905 | 34000 | 0.3152 | 0.5243 | 0.5377 | 0.5309 | 0.9557 |
|
127 |
+
| 0.0106 | 20.0815 | 34500 | 0.3241 | 0.5354 | 0.5200 | 0.5276 | 0.9562 |
|
128 |
+
| 0.0094 | 20.3725 | 35000 | 0.3240 | 0.5223 | 0.5288 | 0.5255 | 0.9560 |
|
129 |
+
| 0.0094 | 20.6636 | 35500 | 0.3271 | 0.5293 | 0.5322 | 0.5308 | 0.9563 |
|
130 |
+
| 0.0099 | 20.9546 | 36000 | 0.3219 | 0.5256 | 0.5334 | 0.5295 | 0.9559 |
|
131 |
+
| 0.0085 | 21.2456 | 36500 | 0.3223 | 0.5245 | 0.5429 | 0.5335 | 0.9560 |
|
132 |
+
| 0.0081 | 21.5367 | 37000 | 0.3308 | 0.5170 | 0.5340 | 0.5254 | 0.9558 |
|
133 |
+
| 0.0095 | 21.8277 | 37500 | 0.3292 | 0.5333 | 0.5294 | 0.5313 | 0.9564 |
|
134 |
+
| 0.008 | 22.1187 | 38000 | 0.3326 | 0.5270 | 0.5416 | 0.5342 | 0.9563 |
|
135 |
+
| 0.007 | 22.4098 | 38500 | 0.3306 | 0.5252 | 0.5473 | 0.5360 | 0.9563 |
|
136 |
+
| 0.0083 | 22.7008 | 39000 | 0.3301 | 0.5354 | 0.5396 | 0.5375 | 0.9565 |
|
137 |
+
| 0.0079 | 22.9919 | 39500 | 0.3268 | 0.5357 | 0.5421 | 0.5389 | 0.9562 |
|
138 |
+
| 0.0072 | 23.2829 | 40000 | 0.3383 | 0.5367 | 0.5311 | 0.5339 | 0.9563 |
|
139 |
+
| 0.0068 | 23.5739 | 40500 | 0.3349 | 0.5281 | 0.5392 | 0.5336 | 0.9562 |
|
140 |
+
| 0.0069 | 23.8650 | 41000 | 0.3383 | 0.5280 | 0.5408 | 0.5343 | 0.9563 |
|
141 |
+
| 0.0073 | 24.1560 | 41500 | 0.3390 | 0.5217 | 0.5436 | 0.5324 | 0.9563 |
|
142 |
+
| 0.0057 | 24.4470 | 42000 | 0.3395 | 0.5279 | 0.5311 | 0.5295 | 0.9560 |
|
143 |
+
| 0.0064 | 24.7381 | 42500 | 0.3420 | 0.5403 | 0.5295 | 0.5349 | 0.9563 |
|
144 |
+
| 0.0065 | 25.0291 | 43000 | 0.3436 | 0.5372 | 0.5348 | 0.5360 | 0.9565 |
|
145 |
+
| 0.0053 | 25.3201 | 43500 | 0.3444 | 0.5259 | 0.5399 | 0.5328 | 0.9562 |
|
146 |
+
| 0.0058 | 25.6112 | 44000 | 0.3475 | 0.5160 | 0.5367 | 0.5261 | 0.9556 |
|
147 |
+
| 0.0061 | 25.9022 | 44500 | 0.3479 | 0.5393 | 0.5344 | 0.5369 | 0.9566 |
|
148 |
+
| 0.0051 | 26.1932 | 45000 | 0.3435 | 0.5266 | 0.5418 | 0.5341 | 0.9559 |
|
149 |
+
| 0.0055 | 26.4843 | 45500 | 0.3440 | 0.5282 | 0.5419 | 0.5350 | 0.9562 |
|
150 |
+
| 0.005 | 26.7753 | 46000 | 0.3466 | 0.5287 | 0.5423 | 0.5354 | 0.9564 |
|
151 |
+
| 0.0058 | 27.0664 | 46500 | 0.3470 | 0.5308 | 0.5490 | 0.5398 | 0.9565 |
|
152 |
+
| 0.0052 | 27.3574 | 47000 | 0.3506 | 0.5343 | 0.5379 | 0.5361 | 0.9564 |
|
153 |
+
| 0.0049 | 27.6484 | 47500 | 0.3475 | 0.5276 | 0.5473 | 0.5373 | 0.9563 |
|
154 |
+
| 0.0052 | 27.9395 | 48000 | 0.3496 | 0.5276 | 0.5483 | 0.5377 | 0.9565 |
|
155 |
+
| 0.0049 | 28.2305 | 48500 | 0.3507 | 0.5327 | 0.5422 | 0.5374 | 0.9564 |
|
156 |
+
| 0.0049 | 28.5215 | 49000 | 0.3528 | 0.5363 | 0.5399 | 0.5381 | 0.9565 |
|
157 |
+
| 0.0052 | 28.8126 | 49500 | 0.3516 | 0.5382 | 0.5385 | 0.5383 | 0.9565 |
|
158 |
+
| 0.0042 | 29.1036 | 50000 | 0.3499 | 0.5330 | 0.5454 | 0.5391 | 0.9565 |
|
159 |
+
| 0.0045 | 29.3946 | 50500 | 0.3514 | 0.5343 | 0.5389 | 0.5366 | 0.9565 |
|
160 |
+
| 0.0048 | 29.6857 | 51000 | 0.3517 | 0.5316 | 0.5418 | 0.5367 | 0.9564 |
|
161 |
+
| 0.0043 | 29.9767 | 51500 | 0.3518 | 0.5316 | 0.5425 | 0.5370 | 0.9565 |
|
162 |
|
163 |
|
164 |
### Framework versions
|
eval_result_ner.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"ceb_gja": {"precision": 0.2905982905982906, "recall": 0.6938775510204082, "f1": 0.4096385542168675, "accuracy": 0.9181467181467181}, "en_pud": {"precision": 0.
|
|
|
1 |
+
{"ceb_gja": {"precision": 0.2905982905982906, "recall": 0.6938775510204082, "f1": 0.4096385542168675, "accuracy": 0.9181467181467181}, "en_pud": {"precision": 0.408876298394712, "recall": 0.4027906976744186, "f1": 0.40581068416119964, "accuracy": 0.9448432187381942}, "de_pud": {"precision": 0.09606986899563319, "recall": 0.3176130895091434, "f1": 0.1475189986589182, "accuracy": 0.7986967324551123}, "pt_pud": {"precision": 0.4526226734348562, "recall": 0.48680618744313015, "f1": 0.4690925032880316, "accuracy": 0.9546716794121417}, "ru_pud": {"precision": 0.014307355664029625, "recall": 0.08204633204633205, "f1": 0.02436577325498065, "accuracy": 0.5190390080082666}, "sv_pud": {"precision": 0.48132780082987553, "recall": 0.33819241982507287, "f1": 0.3972602739726028, "accuracy": 0.9451142797232124}, "tl_trg": {"precision": 0.22058823529411764, "recall": 0.6521739130434783, "f1": 0.32967032967032966, "accuracy": 0.9141689373297003}, "tl_ugnayan": {"precision": 0.03305785123966942, "recall": 0.12121212121212122, "f1": 0.05194805194805195, "accuracy": 0.8641750227894257}, "zh_gsd": {"precision": 0.4342105263157895, "recall": 0.4302477183833116, "f1": 0.43222003929273084, "accuracy": 0.9227439227439227}, "zh_gsdsimp": {"precision": 0.4262948207171315, "recall": 0.42070773263433814, "f1": 0.4234828496042217, "accuracy": 0.9244921744921745}, "hr_set": {"precision": 0.6521452145214521, "recall": 0.7042052744119743, "f1": 0.6771761480466072, "accuracy": 0.96479802143446}, "da_ddt": {"precision": 0.5563380281690141, "recall": 0.5302013422818792, "f1": 0.5429553264604812, "accuracy": 0.9667764142472314}, "en_ewt": {"precision": 0.5095969289827256, "recall": 0.4880514705882353, "f1": 0.49859154929577465, "accuracy": 0.9547754711718532}, "pt_bosque": {"precision": 0.5263559969442322, "recall": 0.5670781893004115, "f1": 0.545958795562599, "accuracy": 0.9606941023040139}, "sr_set": {"precision": 0.7265536723163842, "recall": 0.7591499409681228, "f1": 0.7424942263279446, "accuracy": 0.9640136590491201}, "sk_snk": {"precision": 0.35027472527472525, "recall": 0.2786885245901639, "f1": 0.310407790626902, "accuracy": 0.913552135678392}, "sv_talbanken": {"precision": 0.6610169491525424, "recall": 0.5969387755102041, "f1": 0.6273458445040214, "accuracy": 0.9934239583844531}}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 427407404
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:daa409ed2cf9aede47bb21af04ac72175edbaa2c105ff5e59887b8aa2e783d83
|
3 |
size 427407404
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5304
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:627c1ea359fe2c69a16a37c529c480e3b87e191247eaa25a1dd71815c5b8534c
|
3 |
size 5304
|