Initial Commit
Browse files- README.md +111 -99
- eval_result_ner.json +1 -1
- model.safetensors +1 -1
- training_args.bin +1 -1
README.md
CHANGED
@@ -1,14 +1,14 @@
|
|
1 |
---
|
2 |
-
base_model: FacebookAI/xlm-roberta-base
|
3 |
library_name: transformers
|
4 |
license: mit
|
|
|
|
|
|
|
5 |
metrics:
|
6 |
- precision
|
7 |
- recall
|
8 |
- f1
|
9 |
- accuracy
|
10 |
-
tags:
|
11 |
-
- generated_from_trainer
|
12 |
model-index:
|
13 |
- name: scenario-non-kd-scr-ner-full-xlmr_data-univner_full55
|
14 |
results: []
|
@@ -21,11 +21,11 @@ should probably proofread and complete it, then remove this comment. -->
|
|
21 |
|
22 |
This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on the None dataset.
|
23 |
It achieves the following results on the evaluation set:
|
24 |
-
- Loss: 0.
|
25 |
-
- Precision: 0.
|
26 |
-
- Recall: 0.
|
27 |
-
- F1: 0.
|
28 |
-
- Accuracy: 0.
|
29 |
|
30 |
## Model description
|
31 |
|
@@ -56,97 +56,109 @@ The following hyperparameters were used during training:
|
|
56 |
|
57 |
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|
58 |
|:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.2311 | 0.8731 | 1500 | 0.
|
62 |
-
| 0.2056 | 1.1641 | 2000 | 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.
|
98 |
-
| 0.
|
99 |
-
| 0.
|
100 |
-
| 0.
|
101 |
-
| 0.
|
102 |
-
| 0.
|
103 |
-
| 0.
|
104 |
-
| 0.
|
105 |
-
| 0.
|
106 |
-
| 0.
|
107 |
-
| 0.
|
108 |
-
| 0.
|
109 |
-
| 0.0043 | 14.8428 | 25500 | 0.
|
110 |
-
| 0.
|
111 |
-
| 0.
|
112 |
-
| 0.
|
113 |
-
| 0.
|
114 |
-
| 0.
|
115 |
-
| 0.
|
116 |
-
| 0.
|
117 |
-
| 0.
|
118 |
-
| 0.0022 | 17.4622 | 30000 | 0.
|
119 |
-
| 0.
|
120 |
-
| 0.
|
121 |
-
| 0.
|
122 |
-
| 0.
|
123 |
-
| 0.
|
124 |
-
| 0.0019 | 19.2084 | 33000 | 0.
|
125 |
-
| 0.
|
126 |
-
| 0.0019 | 19.7905 | 34000 | 0.
|
127 |
-
| 0.
|
128 |
-
| 0.
|
129 |
-
| 0.
|
130 |
-
| 0.
|
131 |
-
| 0.
|
132 |
-
| 0.0012 | 21.5367 | 37000 | 0.
|
133 |
-
| 0.0012 | 21.8277 | 37500 | 0.
|
134 |
-
| 0.
|
135 |
-
| 0.
|
136 |
-
| 0.
|
137 |
-
| 0.
|
138 |
-
| 0.
|
139 |
-
| 0.
|
140 |
-
| 0.
|
141 |
-
| 0.
|
142 |
-
| 0.
|
143 |
-
| 0.0008 | 24.7381 | 42500 | 0.
|
144 |
-
| 0.0007 | 25.0291 | 43000 | 0.
|
145 |
-
| 0.
|
146 |
-
| 0.0005 | 25.6112 | 44000 | 0.
|
147 |
-
| 0.
|
148 |
-
| 0.
|
149 |
-
| 0.0005 | 26.4843 | 45500 | 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
150 |
|
151 |
|
152 |
### Framework versions
|
|
|
1 |
---
|
|
|
2 |
library_name: transformers
|
3 |
license: mit
|
4 |
+
base_model: FacebookAI/xlm-roberta-base
|
5 |
+
tags:
|
6 |
+
- generated_from_trainer
|
7 |
metrics:
|
8 |
- precision
|
9 |
- recall
|
10 |
- f1
|
11 |
- accuracy
|
|
|
|
|
12 |
model-index:
|
13 |
- name: scenario-non-kd-scr-ner-full-xlmr_data-univner_full55
|
14 |
results: []
|
|
|
21 |
|
22 |
This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on the None dataset.
|
23 |
It achieves the following results on the evaluation set:
|
24 |
+
- Loss: 0.3759
|
25 |
+
- Precision: 0.5780
|
26 |
+
- Recall: 0.5852
|
27 |
+
- F1: 0.5816
|
28 |
+
- Accuracy: 0.9601
|
29 |
|
30 |
## Model description
|
31 |
|
|
|
56 |
|
57 |
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|
58 |
|:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
|
59 |
+
| 0.3355 | 0.2910 | 500 | 0.2828 | 0.3844 | 0.1433 | 0.2087 | 0.9298 |
|
60 |
+
| 0.2605 | 0.5821 | 1000 | 0.2558 | 0.3871 | 0.1868 | 0.2520 | 0.9344 |
|
61 |
+
| 0.2311 | 0.8731 | 1500 | 0.2241 | 0.3571 | 0.2806 | 0.3143 | 0.9378 |
|
62 |
+
| 0.2056 | 1.1641 | 2000 | 0.2276 | 0.3605 | 0.2643 | 0.3050 | 0.9393 |
|
63 |
+
| 0.1852 | 1.4552 | 2500 | 0.2054 | 0.3487 | 0.3444 | 0.3465 | 0.9387 |
|
64 |
+
| 0.1757 | 1.7462 | 3000 | 0.1992 | 0.3871 | 0.3546 | 0.3702 | 0.9420 |
|
65 |
+
| 0.1625 | 2.0373 | 3500 | 0.2113 | 0.4095 | 0.3499 | 0.3773 | 0.9433 |
|
66 |
+
| 0.1434 | 2.3283 | 4000 | 0.1937 | 0.4170 | 0.3934 | 0.4049 | 0.9443 |
|
67 |
+
| 0.1315 | 2.6193 | 4500 | 0.1924 | 0.4105 | 0.4115 | 0.4110 | 0.9450 |
|
68 |
+
| 0.1241 | 2.9104 | 5000 | 0.1836 | 0.4421 | 0.4601 | 0.4509 | 0.9486 |
|
69 |
+
| 0.104 | 3.2014 | 5500 | 0.1876 | 0.4825 | 0.4428 | 0.4618 | 0.9504 |
|
70 |
+
| 0.0915 | 3.4924 | 6000 | 0.1854 | 0.4810 | 0.4807 | 0.4809 | 0.9517 |
|
71 |
+
| 0.0897 | 3.7835 | 6500 | 0.1792 | 0.4946 | 0.4728 | 0.4834 | 0.9527 |
|
72 |
+
| 0.0775 | 4.0745 | 7000 | 0.1919 | 0.4944 | 0.5053 | 0.4998 | 0.9532 |
|
73 |
+
| 0.0633 | 4.3655 | 7500 | 0.2008 | 0.4805 | 0.4996 | 0.4899 | 0.9531 |
|
74 |
+
| 0.0631 | 4.6566 | 8000 | 0.1886 | 0.5087 | 0.4620 | 0.4842 | 0.9532 |
|
75 |
+
| 0.0604 | 4.9476 | 8500 | 0.1795 | 0.5262 | 0.5372 | 0.5316 | 0.9562 |
|
76 |
+
| 0.0471 | 5.2386 | 9000 | 0.1945 | 0.5189 | 0.5359 | 0.5273 | 0.9554 |
|
77 |
+
| 0.042 | 5.5297 | 9500 | 0.2005 | 0.5047 | 0.5656 | 0.5334 | 0.9553 |
|
78 |
+
| 0.0434 | 5.8207 | 10000 | 0.2155 | 0.5327 | 0.5162 | 0.5243 | 0.9568 |
|
79 |
+
| 0.04 | 6.1118 | 10500 | 0.2309 | 0.4978 | 0.5470 | 0.5212 | 0.9552 |
|
80 |
+
| 0.0327 | 6.4028 | 11000 | 0.2202 | 0.5152 | 0.5675 | 0.5401 | 0.9562 |
|
81 |
+
| 0.0312 | 6.6938 | 11500 | 0.2149 | 0.4986 | 0.5849 | 0.5383 | 0.9548 |
|
82 |
+
| 0.0307 | 6.9849 | 12000 | 0.2269 | 0.5241 | 0.5321 | 0.5281 | 0.9568 |
|
83 |
+
| 0.022 | 7.2759 | 12500 | 0.2424 | 0.5394 | 0.5247 | 0.5320 | 0.9562 |
|
84 |
+
| 0.0236 | 7.5669 | 13000 | 0.2423 | 0.5469 | 0.5444 | 0.5456 | 0.9580 |
|
85 |
+
| 0.0226 | 7.8580 | 13500 | 0.2340 | 0.5453 | 0.5555 | 0.5504 | 0.9578 |
|
86 |
+
| 0.0202 | 8.1490 | 14000 | 0.2501 | 0.5650 | 0.5291 | 0.5465 | 0.9581 |
|
87 |
+
| 0.0174 | 8.4400 | 14500 | 0.2597 | 0.5297 | 0.5572 | 0.5431 | 0.9576 |
|
88 |
+
| 0.0174 | 8.7311 | 15000 | 0.2535 | 0.5508 | 0.5507 | 0.5508 | 0.9579 |
|
89 |
+
| 0.0164 | 9.0221 | 15500 | 0.2607 | 0.5234 | 0.5768 | 0.5488 | 0.9569 |
|
90 |
+
| 0.0129 | 9.3132 | 16000 | 0.2679 | 0.5232 | 0.5669 | 0.5441 | 0.9563 |
|
91 |
+
| 0.0133 | 9.6042 | 16500 | 0.2643 | 0.5308 | 0.5712 | 0.5502 | 0.9571 |
|
92 |
+
| 0.0135 | 9.8952 | 17000 | 0.2712 | 0.5409 | 0.5721 | 0.5561 | 0.9578 |
|
93 |
+
| 0.0116 | 10.1863 | 17500 | 0.2710 | 0.5568 | 0.5448 | 0.5507 | 0.9582 |
|
94 |
+
| 0.0099 | 10.4773 | 18000 | 0.2728 | 0.5304 | 0.5726 | 0.5507 | 0.9573 |
|
95 |
+
| 0.0106 | 10.7683 | 18500 | 0.2941 | 0.5576 | 0.5438 | 0.5506 | 0.9583 |
|
96 |
+
| 0.0113 | 11.0594 | 19000 | 0.2930 | 0.5597 | 0.5569 | 0.5583 | 0.9585 |
|
97 |
+
| 0.0077 | 11.3504 | 19500 | 0.2816 | 0.5526 | 0.5719 | 0.5621 | 0.9586 |
|
98 |
+
| 0.0086 | 11.6414 | 20000 | 0.2954 | 0.5673 | 0.5447 | 0.5558 | 0.9585 |
|
99 |
+
| 0.0089 | 11.9325 | 20500 | 0.2833 | 0.5848 | 0.5423 | 0.5628 | 0.9593 |
|
100 |
+
| 0.0077 | 12.2235 | 21000 | 0.2880 | 0.5434 | 0.5826 | 0.5623 | 0.9581 |
|
101 |
+
| 0.0059 | 12.5146 | 21500 | 0.3104 | 0.5639 | 0.5409 | 0.5522 | 0.9584 |
|
102 |
+
| 0.0073 | 12.8056 | 22000 | 0.2894 | 0.5675 | 0.5578 | 0.5626 | 0.9591 |
|
103 |
+
| 0.0059 | 13.0966 | 22500 | 0.3091 | 0.5471 | 0.5786 | 0.5624 | 0.9581 |
|
104 |
+
| 0.0056 | 13.3877 | 23000 | 0.3030 | 0.5525 | 0.5724 | 0.5623 | 0.9577 |
|
105 |
+
| 0.006 | 13.6787 | 23500 | 0.2938 | 0.5554 | 0.5767 | 0.5659 | 0.9575 |
|
106 |
+
| 0.0052 | 13.9697 | 24000 | 0.3198 | 0.5519 | 0.5780 | 0.5647 | 0.9587 |
|
107 |
+
| 0.0048 | 14.2608 | 24500 | 0.3026 | 0.5562 | 0.5826 | 0.5691 | 0.9589 |
|
108 |
+
| 0.0046 | 14.5518 | 25000 | 0.3129 | 0.5488 | 0.5788 | 0.5634 | 0.9588 |
|
109 |
+
| 0.0043 | 14.8428 | 25500 | 0.3114 | 0.5622 | 0.5689 | 0.5655 | 0.9590 |
|
110 |
+
| 0.0037 | 15.1339 | 26000 | 0.3201 | 0.5652 | 0.5739 | 0.5695 | 0.9591 |
|
111 |
+
| 0.0038 | 15.4249 | 26500 | 0.3291 | 0.5575 | 0.5685 | 0.5629 | 0.9590 |
|
112 |
+
| 0.004 | 15.7159 | 27000 | 0.3273 | 0.5615 | 0.5754 | 0.5684 | 0.9592 |
|
113 |
+
| 0.004 | 16.0070 | 27500 | 0.3250 | 0.5847 | 0.5291 | 0.5555 | 0.9584 |
|
114 |
+
| 0.0031 | 16.2980 | 28000 | 0.3263 | 0.5560 | 0.5783 | 0.5669 | 0.9584 |
|
115 |
+
| 0.0029 | 16.5891 | 28500 | 0.3374 | 0.5577 | 0.5799 | 0.5685 | 0.9591 |
|
116 |
+
| 0.0031 | 16.8801 | 29000 | 0.3300 | 0.5492 | 0.5858 | 0.5669 | 0.9586 |
|
117 |
+
| 0.0032 | 17.1711 | 29500 | 0.3334 | 0.5554 | 0.5804 | 0.5676 | 0.9588 |
|
118 |
+
| 0.0022 | 17.4622 | 30000 | 0.3447 | 0.5689 | 0.5780 | 0.5734 | 0.9594 |
|
119 |
+
| 0.0026 | 17.7532 | 30500 | 0.3441 | 0.5632 | 0.5607 | 0.5619 | 0.9591 |
|
120 |
+
| 0.0029 | 18.0442 | 31000 | 0.3405 | 0.5559 | 0.5894 | 0.5722 | 0.9591 |
|
121 |
+
| 0.002 | 18.3353 | 31500 | 0.3388 | 0.5406 | 0.5872 | 0.5629 | 0.9579 |
|
122 |
+
| 0.0025 | 18.6263 | 32000 | 0.3423 | 0.5415 | 0.5963 | 0.5676 | 0.9582 |
|
123 |
+
| 0.0024 | 18.9173 | 32500 | 0.3430 | 0.5574 | 0.5545 | 0.5559 | 0.9585 |
|
124 |
+
| 0.0019 | 19.2084 | 33000 | 0.3432 | 0.5440 | 0.5881 | 0.5652 | 0.9581 |
|
125 |
+
| 0.0019 | 19.4994 | 33500 | 0.3476 | 0.5577 | 0.5839 | 0.5705 | 0.9592 |
|
126 |
+
| 0.0019 | 19.7905 | 34000 | 0.3479 | 0.5583 | 0.5853 | 0.5715 | 0.9595 |
|
127 |
+
| 0.0019 | 20.0815 | 34500 | 0.3628 | 0.5721 | 0.5644 | 0.5682 | 0.9598 |
|
128 |
+
| 0.0016 | 20.3725 | 35000 | 0.3552 | 0.5896 | 0.5537 | 0.5711 | 0.9595 |
|
129 |
+
| 0.0016 | 20.6636 | 35500 | 0.3632 | 0.5498 | 0.5679 | 0.5587 | 0.9586 |
|
130 |
+
| 0.0017 | 20.9546 | 36000 | 0.3484 | 0.5727 | 0.5682 | 0.5704 | 0.9592 |
|
131 |
+
| 0.0016 | 21.2456 | 36500 | 0.3647 | 0.5698 | 0.5784 | 0.5741 | 0.9597 |
|
132 |
+
| 0.0012 | 21.5367 | 37000 | 0.3680 | 0.5697 | 0.5663 | 0.5680 | 0.9597 |
|
133 |
+
| 0.0012 | 21.8277 | 37500 | 0.3599 | 0.5823 | 0.5702 | 0.5762 | 0.9593 |
|
134 |
+
| 0.0013 | 22.1187 | 38000 | 0.3634 | 0.5771 | 0.5735 | 0.5753 | 0.9596 |
|
135 |
+
| 0.0011 | 22.4098 | 38500 | 0.3678 | 0.5831 | 0.5563 | 0.5694 | 0.9594 |
|
136 |
+
| 0.0011 | 22.7008 | 39000 | 0.3684 | 0.5666 | 0.5640 | 0.5653 | 0.9594 |
|
137 |
+
| 0.0012 | 22.9919 | 39500 | 0.3588 | 0.5696 | 0.5848 | 0.5771 | 0.9595 |
|
138 |
+
| 0.0009 | 23.2829 | 40000 | 0.3711 | 0.5794 | 0.5654 | 0.5723 | 0.9595 |
|
139 |
+
| 0.001 | 23.5739 | 40500 | 0.3736 | 0.5585 | 0.5810 | 0.5695 | 0.9592 |
|
140 |
+
| 0.001 | 23.8650 | 41000 | 0.3715 | 0.5729 | 0.5728 | 0.5728 | 0.9595 |
|
141 |
+
| 0.001 | 24.1560 | 41500 | 0.3625 | 0.5689 | 0.5768 | 0.5728 | 0.9594 |
|
142 |
+
| 0.0007 | 24.4470 | 42000 | 0.3724 | 0.5704 | 0.5758 | 0.5731 | 0.9598 |
|
143 |
+
| 0.0008 | 24.7381 | 42500 | 0.3690 | 0.5798 | 0.5608 | 0.5702 | 0.9598 |
|
144 |
+
| 0.0007 | 25.0291 | 43000 | 0.3744 | 0.5831 | 0.5653 | 0.5741 | 0.9599 |
|
145 |
+
| 0.0006 | 25.3201 | 43500 | 0.3704 | 0.5782 | 0.5721 | 0.5751 | 0.9598 |
|
146 |
+
| 0.0005 | 25.6112 | 44000 | 0.3757 | 0.5770 | 0.5695 | 0.5732 | 0.9599 |
|
147 |
+
| 0.0007 | 25.9022 | 44500 | 0.3650 | 0.5709 | 0.5806 | 0.5757 | 0.9595 |
|
148 |
+
| 0.0005 | 26.1932 | 45000 | 0.3749 | 0.5728 | 0.5731 | 0.5730 | 0.9599 |
|
149 |
+
| 0.0005 | 26.4843 | 45500 | 0.3741 | 0.5819 | 0.5682 | 0.5750 | 0.9601 |
|
150 |
+
| 0.0006 | 26.7753 | 46000 | 0.3795 | 0.5832 | 0.5680 | 0.5755 | 0.9601 |
|
151 |
+
| 0.0005 | 27.0664 | 46500 | 0.3718 | 0.5742 | 0.5820 | 0.5781 | 0.9601 |
|
152 |
+
| 0.0005 | 27.3574 | 47000 | 0.3745 | 0.5771 | 0.5810 | 0.5790 | 0.9600 |
|
153 |
+
| 0.0004 | 27.6484 | 47500 | 0.3839 | 0.5795 | 0.5578 | 0.5684 | 0.9596 |
|
154 |
+
| 0.0006 | 27.9395 | 48000 | 0.3732 | 0.5760 | 0.5879 | 0.5819 | 0.9601 |
|
155 |
+
| 0.0004 | 28.2305 | 48500 | 0.3752 | 0.5774 | 0.5853 | 0.5814 | 0.9600 |
|
156 |
+
| 0.0004 | 28.5215 | 49000 | 0.3761 | 0.5727 | 0.5872 | 0.5799 | 0.9599 |
|
157 |
+
| 0.0004 | 28.8126 | 49500 | 0.3765 | 0.5739 | 0.5832 | 0.5785 | 0.9599 |
|
158 |
+
| 0.0003 | 29.1036 | 50000 | 0.3764 | 0.5784 | 0.5823 | 0.5803 | 0.9600 |
|
159 |
+
| 0.0002 | 29.3946 | 50500 | 0.3776 | 0.5812 | 0.5806 | 0.5809 | 0.9602 |
|
160 |
+
| 0.0003 | 29.6857 | 51000 | 0.3750 | 0.5742 | 0.5915 | 0.5828 | 0.9599 |
|
161 |
+
| 0.0003 | 29.9767 | 51500 | 0.3759 | 0.5780 | 0.5852 | 0.5816 | 0.9601 |
|
162 |
|
163 |
|
164 |
### Framework versions
|
eval_result_ner.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"ceb_gja": {"precision": 0.3258426966292135, "recall": 0.5918367346938775, "f1": 0.42028985507246375, "accuracy": 0.
|
|
|
1 |
+
{"ceb_gja": {"precision": 0.3258426966292135, "recall": 0.5918367346938775, "f1": 0.42028985507246375, "accuracy": 0.9374517374517375}, "en_pud": {"precision": 0.46381243628950053, "recall": 0.4232558139534884, "f1": 0.44260700389105057, "accuracy": 0.9476293917642614}, "de_pud": {"precision": 0.11134091692519821, "recall": 0.3108758421559192, "f1": 0.16395939086294417, "accuracy": 0.8230275186348507}, "pt_pud": {"precision": 0.5327433628318584, "recall": 0.5477707006369427, "f1": 0.5401525347689548, "accuracy": 0.9596701841329517}, "ru_pud": {"precision": 0.016121384542437174, "recall": 0.06563706563706563, "f1": 0.025885039969547007, "accuracy": 0.6260397830018083}, "sv_pud": {"precision": 0.5033921302578019, "recall": 0.36054421768707484, "f1": 0.42015855039637606, "accuracy": 0.9462151394422311}, "tl_trg": {"precision": 0.2909090909090909, "recall": 0.6956521739130435, "f1": 0.41025641025641024, "accuracy": 0.9373297002724795}, "tl_ugnayan": {"precision": 0.039603960396039604, "recall": 0.12121212121212122, "f1": 0.05970149253731343, "accuracy": 0.8696444849589791}, "zh_gsd": {"precision": 0.5056890012642224, "recall": 0.5215123859191656, "f1": 0.5134788189987163, "accuracy": 0.9345654345654346}, "zh_gsdsimp": {"precision": 0.48717948717948717, "recall": 0.4980340760157274, "f1": 0.49254698639014904, "accuracy": 0.9330669330669331}, "hr_set": {"precision": 0.7071330589849109, "recall": 0.7348538845331433, "f1": 0.7207270185249912, "accuracy": 0.9701154163231657}, "da_ddt": {"precision": 0.590047393364929, "recall": 0.5570469798657718, "f1": 0.5730724971231301, "accuracy": 0.9689713658585254}, "en_ewt": {"precision": 0.5788934426229508, "recall": 0.5193014705882353, "f1": 0.5474806201550387, "accuracy": 0.958560784157469}, "pt_bosque": {"precision": 0.5647149460708782, "recall": 0.6032921810699589, "f1": 0.583366494230004, "accuracy": 0.9634110998406028}, "sr_set": {"precision": 0.7679814385150812, "recall": 0.781582054309327, "f1": 0.7747220596840257, "accuracy": 0.9699676035373435}, "sk_snk": {"precision": 0.3611111111111111, "recall": 0.28415300546448086, "f1": 0.3180428134556575, "accuracy": 0.9156721105527639}, "sv_talbanken": {"precision": 0.6595744680851063, "recall": 0.6326530612244898, "f1": 0.6458333333333333, "accuracy": 0.9939147077587476}}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 939737140
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6a2d07c4cc2e3638b69757e623808e7a42a4ec929f1c0ee7ae52281f00a3561c
|
3 |
size 939737140
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5304
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5eed5584cabc6c38d6c245f58b09461818e09ef8b9eea5271d8a0e83bd8f69be
|
3 |
size 5304
|