Initial Commit
Browse files- README.md +111 -111
- eval_result_ner.json +1 -1
- model.safetensors +1 -1
- training_args.bin +1 -1
README.md
CHANGED
@@ -1,14 +1,14 @@
|
|
1 |
---
|
2 |
-
base_model: FacebookAI/xlm-roberta-base
|
3 |
library_name: transformers
|
4 |
license: mit
|
|
|
|
|
|
|
5 |
metrics:
|
6 |
- precision
|
7 |
- recall
|
8 |
- f1
|
9 |
- accuracy
|
10 |
-
tags:
|
11 |
-
- generated_from_trainer
|
12 |
model-index:
|
13 |
- name: scenario-non-kd-scr-ner-full-xlmr_data-univner_full44
|
14 |
results: []
|
@@ -21,11 +21,11 @@ should probably proofread and complete it, then remove this comment. -->
|
|
21 |
|
22 |
This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on the None dataset.
|
23 |
It achieves the following results on the evaluation set:
|
24 |
-
- Loss: 0.
|
25 |
-
- Precision: 0.
|
26 |
-
- Recall: 0.
|
27 |
-
- F1: 0.
|
28 |
-
- Accuracy: 0.
|
29 |
|
30 |
## Model description
|
31 |
|
@@ -56,109 +56,109 @@ The following hyperparameters were used during training:
|
|
56 |
|
57 |
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|
58 |
|:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.0128 | 9.3132 | 16000 | 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.008 | 11.3504 | 19500 | 0.
|
98 |
-
| 0.
|
99 |
-
| 0.
|
100 |
-
| 0.
|
101 |
-
| 0.
|
102 |
-
| 0.
|
103 |
-
| 0.
|
104 |
-
| 0.
|
105 |
-
| 0.
|
106 |
-
| 0.
|
107 |
-
| 0.
|
108 |
-
| 0.
|
109 |
-
| 0.
|
110 |
-
| 0.
|
111 |
-
| 0.
|
112 |
-
| 0.
|
113 |
-
| 0.
|
114 |
-
| 0.0028 | 16.2980 | 28000 | 0.
|
115 |
-
| 0.
|
116 |
-
| 0.003 | 16.8801 | 29000 | 0.
|
117 |
-
| 0.
|
118 |
-
| 0.
|
119 |
-
| 0.
|
120 |
-
| 0.0025 | 18.0442 | 31000 | 0.
|
121 |
-
| 0.
|
122 |
-
| 0.
|
123 |
-
| 0.0021 | 18.9173 | 32500 | 0.
|
124 |
-
| 0.
|
125 |
-
| 0.
|
126 |
-
| 0.
|
127 |
-
| 0.
|
128 |
-
| 0.
|
129 |
-
| 0.
|
130 |
-
| 0.
|
131 |
-
| 0.
|
132 |
-
| 0.0012 | 21.5367 | 37000 | 0.
|
133 |
-
| 0.
|
134 |
-
| 0.
|
135 |
-
| 0.
|
136 |
-
| 0.001 | 22.7008 | 39000 | 0.
|
137 |
-
| 0.
|
138 |
-
| 0.
|
139 |
-
| 0.0008 | 23.5739 | 40500 | 0.
|
140 |
-
| 0.001 | 23.8650 | 41000 | 0.
|
141 |
-
| 0.
|
142 |
-
| 0.
|
143 |
-
| 0.0007 | 24.7381 | 42500 | 0.
|
144 |
-
| 0.
|
145 |
-
| 0.
|
146 |
-
| 0.
|
147 |
-
| 0.
|
148 |
-
| 0.
|
149 |
-
| 0.
|
150 |
-
| 0.0004 | 26.7753 | 46000 | 0.
|
151 |
-
| 0.0006 | 27.0664 | 46500 | 0.
|
152 |
-
| 0.
|
153 |
-
| 0.0004 | 27.6484 | 47500 | 0.
|
154 |
-
| 0.0005 | 27.9395 | 48000 | 0.
|
155 |
-
| 0.0003 | 28.2305 | 48500 | 0.
|
156 |
-
| 0.
|
157 |
-
| 0.0004 | 28.8126 | 49500 | 0.
|
158 |
-
| 0.
|
159 |
-
| 0.
|
160 |
-
| 0.
|
161 |
-
| 0.
|
162 |
|
163 |
|
164 |
### Framework versions
|
|
|
1 |
---
|
|
|
2 |
library_name: transformers
|
3 |
license: mit
|
4 |
+
base_model: FacebookAI/xlm-roberta-base
|
5 |
+
tags:
|
6 |
+
- generated_from_trainer
|
7 |
metrics:
|
8 |
- precision
|
9 |
- recall
|
10 |
- f1
|
11 |
- accuracy
|
|
|
|
|
12 |
model-index:
|
13 |
- name: scenario-non-kd-scr-ner-full-xlmr_data-univner_full44
|
14 |
results: []
|
|
|
21 |
|
22 |
This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on the None dataset.
|
23 |
It achieves the following results on the evaluation set:
|
24 |
+
- Loss: 0.3770
|
25 |
+
- Precision: 0.5737
|
26 |
+
- Recall: 0.5773
|
27 |
+
- F1: 0.5755
|
28 |
+
- Accuracy: 0.9597
|
29 |
|
30 |
## Model description
|
31 |
|
|
|
56 |
|
57 |
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|
58 |
|:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
|
59 |
+
| 0.3376 | 0.2910 | 500 | 0.2801 | 0.4349 | 0.1412 | 0.2132 | 0.9307 |
|
60 |
+
| 0.2635 | 0.5821 | 1000 | 0.2441 | 0.3323 | 0.2249 | 0.2683 | 0.9343 |
|
61 |
+
| 0.2296 | 0.8731 | 1500 | 0.2240 | 0.3293 | 0.2759 | 0.3002 | 0.9364 |
|
62 |
+
| 0.2009 | 1.1641 | 2000 | 0.2151 | 0.3590 | 0.3569 | 0.3580 | 0.9372 |
|
63 |
+
| 0.1874 | 1.4552 | 2500 | 0.2115 | 0.3755 | 0.3354 | 0.3543 | 0.9398 |
|
64 |
+
| 0.1754 | 1.7462 | 3000 | 0.2126 | 0.3774 | 0.3173 | 0.3448 | 0.9420 |
|
65 |
+
| 0.1643 | 2.0373 | 3500 | 0.2050 | 0.3776 | 0.3711 | 0.3743 | 0.9412 |
|
66 |
+
| 0.1368 | 2.3283 | 4000 | 0.2107 | 0.4135 | 0.3636 | 0.3869 | 0.9435 |
|
67 |
+
| 0.1361 | 2.6193 | 4500 | 0.1976 | 0.4033 | 0.4125 | 0.4078 | 0.9449 |
|
68 |
+
| 0.1255 | 2.9104 | 5000 | 0.1890 | 0.4470 | 0.4359 | 0.4413 | 0.9470 |
|
69 |
+
| 0.105 | 3.2014 | 5500 | 0.1942 | 0.4691 | 0.4675 | 0.4683 | 0.9494 |
|
70 |
+
| 0.0946 | 3.4924 | 6000 | 0.1912 | 0.4519 | 0.4515 | 0.4517 | 0.9491 |
|
71 |
+
| 0.0875 | 3.7835 | 6500 | 0.1861 | 0.4874 | 0.4696 | 0.4784 | 0.9516 |
|
72 |
+
| 0.079 | 4.0745 | 7000 | 0.1950 | 0.4950 | 0.5017 | 0.4983 | 0.9535 |
|
73 |
+
| 0.0641 | 4.3655 | 7500 | 0.1965 | 0.5102 | 0.5058 | 0.5080 | 0.9545 |
|
74 |
+
| 0.0627 | 4.6566 | 8000 | 0.1870 | 0.4881 | 0.5327 | 0.5094 | 0.9535 |
|
75 |
+
| 0.06 | 4.9476 | 8500 | 0.1986 | 0.5151 | 0.5056 | 0.5103 | 0.9550 |
|
76 |
+
| 0.0467 | 5.2386 | 9000 | 0.2012 | 0.5317 | 0.5217 | 0.5267 | 0.9552 |
|
77 |
+
| 0.0443 | 5.5297 | 9500 | 0.2116 | 0.5339 | 0.5125 | 0.5230 | 0.9552 |
|
78 |
+
| 0.0446 | 5.8207 | 10000 | 0.2037 | 0.5268 | 0.5232 | 0.5250 | 0.9556 |
|
79 |
+
| 0.0401 | 6.1118 | 10500 | 0.2191 | 0.5186 | 0.5510 | 0.5343 | 0.9563 |
|
80 |
+
| 0.0313 | 6.4028 | 11000 | 0.2224 | 0.5503 | 0.5269 | 0.5384 | 0.9569 |
|
81 |
+
| 0.0318 | 6.6938 | 11500 | 0.2233 | 0.5035 | 0.5485 | 0.5251 | 0.9546 |
|
82 |
+
| 0.0299 | 6.9849 | 12000 | 0.2302 | 0.5646 | 0.5056 | 0.5335 | 0.9566 |
|
83 |
+
| 0.0237 | 7.2759 | 12500 | 0.2427 | 0.5342 | 0.5403 | 0.5372 | 0.9564 |
|
84 |
+
| 0.0235 | 7.5669 | 13000 | 0.2487 | 0.5049 | 0.5425 | 0.5230 | 0.9557 |
|
85 |
+
| 0.0226 | 7.8580 | 13500 | 0.2501 | 0.5431 | 0.5325 | 0.5378 | 0.9569 |
|
86 |
+
| 0.0193 | 8.1490 | 14000 | 0.2425 | 0.5252 | 0.5604 | 0.5422 | 0.9567 |
|
87 |
+
| 0.0169 | 8.4400 | 14500 | 0.2520 | 0.5446 | 0.5423 | 0.5435 | 0.9568 |
|
88 |
+
| 0.0174 | 8.7311 | 15000 | 0.2516 | 0.5351 | 0.5725 | 0.5532 | 0.9570 |
|
89 |
+
| 0.0167 | 9.0221 | 15500 | 0.2772 | 0.5618 | 0.5335 | 0.5473 | 0.9581 |
|
90 |
+
| 0.0128 | 9.3132 | 16000 | 0.2577 | 0.5349 | 0.5754 | 0.5544 | 0.9575 |
|
91 |
+
| 0.013 | 9.6042 | 16500 | 0.2834 | 0.5483 | 0.5392 | 0.5437 | 0.9579 |
|
92 |
+
| 0.0129 | 9.8952 | 17000 | 0.2734 | 0.5573 | 0.5659 | 0.5615 | 0.9583 |
|
93 |
+
| 0.0108 | 10.1863 | 17500 | 0.2819 | 0.5346 | 0.5804 | 0.5566 | 0.9575 |
|
94 |
+
| 0.0096 | 10.4773 | 18000 | 0.3010 | 0.5129 | 0.5861 | 0.5471 | 0.9558 |
|
95 |
+
| 0.011 | 10.7683 | 18500 | 0.2832 | 0.5315 | 0.5859 | 0.5574 | 0.9576 |
|
96 |
+
| 0.009 | 11.0594 | 19000 | 0.3044 | 0.5479 | 0.5636 | 0.5556 | 0.9581 |
|
97 |
+
| 0.008 | 11.3504 | 19500 | 0.2994 | 0.5418 | 0.5800 | 0.5602 | 0.9578 |
|
98 |
+
| 0.0082 | 11.6414 | 20000 | 0.2879 | 0.5529 | 0.5640 | 0.5584 | 0.9579 |
|
99 |
+
| 0.0086 | 11.9325 | 20500 | 0.3122 | 0.5410 | 0.5467 | 0.5438 | 0.9578 |
|
100 |
+
| 0.0067 | 12.2235 | 21000 | 0.3093 | 0.5531 | 0.5676 | 0.5603 | 0.9586 |
|
101 |
+
| 0.0067 | 12.5146 | 21500 | 0.3113 | 0.5446 | 0.5644 | 0.5543 | 0.9580 |
|
102 |
+
| 0.0063 | 12.8056 | 22000 | 0.3014 | 0.5501 | 0.5813 | 0.5653 | 0.9580 |
|
103 |
+
| 0.0061 | 13.0966 | 22500 | 0.3200 | 0.5451 | 0.5610 | 0.5529 | 0.9582 |
|
104 |
+
| 0.0052 | 13.3877 | 23000 | 0.3071 | 0.5495 | 0.5659 | 0.5576 | 0.9582 |
|
105 |
+
| 0.0052 | 13.6787 | 23500 | 0.3079 | 0.5647 | 0.5640 | 0.5644 | 0.9586 |
|
106 |
+
| 0.0052 | 13.9697 | 24000 | 0.3142 | 0.5406 | 0.5750 | 0.5572 | 0.9583 |
|
107 |
+
| 0.004 | 14.2608 | 24500 | 0.3146 | 0.5610 | 0.5719 | 0.5664 | 0.9588 |
|
108 |
+
| 0.0039 | 14.5518 | 25000 | 0.3268 | 0.5504 | 0.5712 | 0.5606 | 0.9587 |
|
109 |
+
| 0.0045 | 14.8428 | 25500 | 0.3133 | 0.5569 | 0.5713 | 0.5640 | 0.9588 |
|
110 |
+
| 0.0043 | 15.1339 | 26000 | 0.3308 | 0.5599 | 0.5575 | 0.5587 | 0.9587 |
|
111 |
+
| 0.0031 | 15.4249 | 26500 | 0.3380 | 0.5493 | 0.5638 | 0.5565 | 0.9580 |
|
112 |
+
| 0.0035 | 15.7159 | 27000 | 0.3410 | 0.5559 | 0.5462 | 0.5510 | 0.9579 |
|
113 |
+
| 0.0033 | 16.0070 | 27500 | 0.3326 | 0.5550 | 0.5709 | 0.5628 | 0.9585 |
|
114 |
+
| 0.0028 | 16.2980 | 28000 | 0.3400 | 0.5580 | 0.5751 | 0.5664 | 0.9590 |
|
115 |
+
| 0.003 | 16.5891 | 28500 | 0.3418 | 0.5601 | 0.5624 | 0.5612 | 0.9586 |
|
116 |
+
| 0.003 | 16.8801 | 29000 | 0.3340 | 0.5394 | 0.5874 | 0.5624 | 0.9577 |
|
117 |
+
| 0.0029 | 17.1711 | 29500 | 0.3431 | 0.5511 | 0.5915 | 0.5706 | 0.9589 |
|
118 |
+
| 0.0025 | 17.4622 | 30000 | 0.3326 | 0.5545 | 0.5832 | 0.5685 | 0.9583 |
|
119 |
+
| 0.0023 | 17.7532 | 30500 | 0.3492 | 0.5364 | 0.5892 | 0.5616 | 0.9581 |
|
120 |
+
| 0.0025 | 18.0442 | 31000 | 0.3481 | 0.5655 | 0.5695 | 0.5675 | 0.9586 |
|
121 |
+
| 0.002 | 18.3353 | 31500 | 0.3450 | 0.5542 | 0.5768 | 0.5653 | 0.9585 |
|
122 |
+
| 0.0023 | 18.6263 | 32000 | 0.3443 | 0.5611 | 0.5700 | 0.5655 | 0.9589 |
|
123 |
+
| 0.0021 | 18.9173 | 32500 | 0.3484 | 0.5567 | 0.5871 | 0.5715 | 0.9588 |
|
124 |
+
| 0.002 | 19.2084 | 33000 | 0.3528 | 0.5675 | 0.5611 | 0.5643 | 0.9584 |
|
125 |
+
| 0.0018 | 19.4994 | 33500 | 0.3496 | 0.5511 | 0.5765 | 0.5635 | 0.9583 |
|
126 |
+
| 0.0019 | 19.7905 | 34000 | 0.3608 | 0.5666 | 0.5692 | 0.5679 | 0.9586 |
|
127 |
+
| 0.0015 | 20.0815 | 34500 | 0.3517 | 0.5674 | 0.5819 | 0.5745 | 0.9591 |
|
128 |
+
| 0.0015 | 20.3725 | 35000 | 0.3551 | 0.5603 | 0.5764 | 0.5682 | 0.9585 |
|
129 |
+
| 0.0015 | 20.6636 | 35500 | 0.3604 | 0.5724 | 0.5563 | 0.5642 | 0.9592 |
|
130 |
+
| 0.0015 | 20.9546 | 36000 | 0.3649 | 0.5709 | 0.5742 | 0.5726 | 0.9594 |
|
131 |
+
| 0.0013 | 21.2456 | 36500 | 0.3597 | 0.5482 | 0.5924 | 0.5694 | 0.9590 |
|
132 |
+
| 0.0012 | 21.5367 | 37000 | 0.3557 | 0.5710 | 0.5715 | 0.5712 | 0.9594 |
|
133 |
+
| 0.0013 | 21.8277 | 37500 | 0.3591 | 0.5774 | 0.5605 | 0.5688 | 0.9597 |
|
134 |
+
| 0.0013 | 22.1187 | 38000 | 0.3563 | 0.5679 | 0.5813 | 0.5745 | 0.9594 |
|
135 |
+
| 0.0007 | 22.4098 | 38500 | 0.3520 | 0.5590 | 0.5838 | 0.5711 | 0.9589 |
|
136 |
+
| 0.001 | 22.7008 | 39000 | 0.3606 | 0.5703 | 0.5711 | 0.5707 | 0.9593 |
|
137 |
+
| 0.001 | 22.9919 | 39500 | 0.3600 | 0.5618 | 0.5920 | 0.5765 | 0.9593 |
|
138 |
+
| 0.001 | 23.2829 | 40000 | 0.3595 | 0.5635 | 0.5709 | 0.5672 | 0.9586 |
|
139 |
+
| 0.0008 | 23.5739 | 40500 | 0.3658 | 0.5735 | 0.5689 | 0.5712 | 0.9592 |
|
140 |
+
| 0.001 | 23.8650 | 41000 | 0.3589 | 0.5677 | 0.5793 | 0.5734 | 0.9594 |
|
141 |
+
| 0.0007 | 24.1560 | 41500 | 0.3704 | 0.5819 | 0.5708 | 0.5763 | 0.9600 |
|
142 |
+
| 0.0008 | 24.4470 | 42000 | 0.3656 | 0.5779 | 0.5702 | 0.5740 | 0.9595 |
|
143 |
+
| 0.0007 | 24.7381 | 42500 | 0.3683 | 0.5647 | 0.5791 | 0.5718 | 0.9594 |
|
144 |
+
| 0.0008 | 25.0291 | 43000 | 0.3781 | 0.5766 | 0.5638 | 0.5701 | 0.9596 |
|
145 |
+
| 0.0007 | 25.3201 | 43500 | 0.3782 | 0.5738 | 0.5602 | 0.5669 | 0.9593 |
|
146 |
+
| 0.0006 | 25.6112 | 44000 | 0.3658 | 0.5736 | 0.5750 | 0.5743 | 0.9593 |
|
147 |
+
| 0.0006 | 25.9022 | 44500 | 0.3688 | 0.5788 | 0.5610 | 0.5698 | 0.9595 |
|
148 |
+
| 0.0005 | 26.1932 | 45000 | 0.3696 | 0.5769 | 0.5842 | 0.5805 | 0.9599 |
|
149 |
+
| 0.0004 | 26.4843 | 45500 | 0.3710 | 0.5781 | 0.5797 | 0.5789 | 0.9599 |
|
150 |
+
| 0.0004 | 26.7753 | 46000 | 0.3742 | 0.5772 | 0.5670 | 0.5721 | 0.9595 |
|
151 |
+
| 0.0006 | 27.0664 | 46500 | 0.3725 | 0.5672 | 0.5869 | 0.5769 | 0.9595 |
|
152 |
+
| 0.0003 | 27.3574 | 47000 | 0.3750 | 0.5613 | 0.5874 | 0.5740 | 0.9595 |
|
153 |
+
| 0.0004 | 27.6484 | 47500 | 0.3728 | 0.5633 | 0.5887 | 0.5757 | 0.9596 |
|
154 |
+
| 0.0005 | 27.9395 | 48000 | 0.3728 | 0.5645 | 0.5793 | 0.5718 | 0.9596 |
|
155 |
+
| 0.0003 | 28.2305 | 48500 | 0.3766 | 0.5719 | 0.5793 | 0.5756 | 0.9598 |
|
156 |
+
| 0.0003 | 28.5215 | 49000 | 0.3821 | 0.5697 | 0.5793 | 0.5744 | 0.9598 |
|
157 |
+
| 0.0004 | 28.8126 | 49500 | 0.3774 | 0.5726 | 0.5735 | 0.5731 | 0.9596 |
|
158 |
+
| 0.0004 | 29.1036 | 50000 | 0.3798 | 0.5779 | 0.5734 | 0.5756 | 0.9598 |
|
159 |
+
| 0.0004 | 29.3946 | 50500 | 0.3764 | 0.5721 | 0.5786 | 0.5753 | 0.9596 |
|
160 |
+
| 0.0003 | 29.6857 | 51000 | 0.3763 | 0.5731 | 0.5800 | 0.5766 | 0.9597 |
|
161 |
+
| 0.0002 | 29.9767 | 51500 | 0.3770 | 0.5737 | 0.5773 | 0.5755 | 0.9597 |
|
162 |
|
163 |
|
164 |
### Framework versions
|
eval_result_ner.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"ceb_gja": {"precision": 0.
|
|
|
1 |
+
{"ceb_gja": {"precision": 0.32, "recall": 0.6530612244897959, "f1": 0.42953020134228187, "accuracy": 0.9343629343629344}, "en_pud": {"precision": 0.4531914893617021, "recall": 0.39627906976744187, "f1": 0.4228287841191067, "accuracy": 0.9472043823196071}, "de_pud": {"precision": 0.11494688922610015, "recall": 0.29162656400384984, "f1": 0.16489795918367348, "accuracy": 0.8384979607144531}, "pt_pud": {"precision": 0.538961038961039, "recall": 0.5286624203821656, "f1": 0.5337620578778135, "accuracy": 0.9591575169820994}, "ru_pud": {"precision": 0.015295815295815297, "recall": 0.05115830115830116, "f1": 0.023550322150633192, "accuracy": 0.6840092999225006}, "sv_pud": {"precision": 0.4992526158445441, "recall": 0.32458697764820216, "f1": 0.3934040047114252, "accuracy": 0.9457957643111764}, "tl_trg": {"precision": 0.22448979591836735, "recall": 0.4782608695652174, "f1": 0.3055555555555556, "accuracy": 0.9400544959128065}, "tl_ugnayan": {"precision": 0.06172839506172839, "recall": 0.15151515151515152, "f1": 0.08771929824561403, "accuracy": 0.8960802187784868}, "zh_gsd": {"precision": 0.45828144458281445, "recall": 0.47979139504563234, "f1": 0.46878980891719746, "accuracy": 0.9306526806526807}, "zh_gsdsimp": {"precision": 0.4910941475826972, "recall": 0.5058977719528178, "f1": 0.4983860555196902, "accuracy": 0.9314019314019314}, "hr_set": {"precision": 0.7253371185237757, "recall": 0.7284390591589451, "f1": 0.7268847795163584, "accuracy": 0.9685902720527617}, "da_ddt": {"precision": 0.6297229219143576, "recall": 0.5592841163310962, "f1": 0.5924170616113743, "accuracy": 0.9701686121919585}, "en_ewt": {"precision": 0.569055036344756, "recall": 0.5036764705882353, "f1": 0.5343734763529986, "accuracy": 0.956927122763677}, "pt_bosque": {"precision": 0.5757085020242915, "recall": 0.5851851851851851, "f1": 0.5804081632653061, "accuracy": 0.963048833502391}, "sr_set": {"precision": 0.7763466042154566, "recall": 0.7827626918536009, "f1": 0.7795414462081128, "accuracy": 0.9688293494440067}, "sk_snk": {"precision": 0.39939485627836613, "recall": 0.28852459016393445, "f1": 0.3350253807106599, "accuracy": 0.9159076633165829}, "sv_talbanken": {"precision": 0.672514619883041, "recall": 0.5867346938775511, "f1": 0.6267029972752044, "accuracy": 0.9936693330716003}}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 939737140
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1908aa29a9e764da623cd0bb045d6f4a5fde2725e103cb029ed4ebcefe4ed6a1
|
3 |
size 939737140
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5304
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9a200c39ad919b8c06db9ecd09e865c59c941095af4a96ada9443bce42689fd0
|
3 |
size 5304
|