haryoaw commited on
Commit
b817a72
1 Parent(s): 4ba4e20

Initial Commit

Browse files
Files changed (4) hide show
  1. README.md +73 -87
  2. eval_result_ner.json +1 -1
  3. model.safetensors +1 -1
  4. training_args.bin +1 -1
README.md CHANGED
@@ -1,14 +1,14 @@
1
  ---
2
- base_model: microsoft/mdeberta-v3-base
3
  library_name: transformers
4
  license: mit
 
 
 
5
  metrics:
6
  - precision
7
  - recall
8
  - f1
9
  - accuracy
10
- tags:
11
- - generated_from_trainer
12
  model-index:
13
  - name: scenario-non-kd-scr-ner-half-mdeberta_data-univner_full44
14
  results: []
@@ -21,11 +21,11 @@ should probably proofread and complete it, then remove this comment. -->
21
 
22
  This model is a fine-tuned version of [microsoft/mdeberta-v3-base](https://huggingface.co/microsoft/mdeberta-v3-base) on the None dataset.
23
  It achieves the following results on the evaluation set:
24
- - Loss: 0.3378
25
- - Precision: 0.6195
26
- - Recall: 0.5879
27
- - F1: 0.6033
28
- - Accuracy: 0.9616
29
 
30
  ## Model description
31
 
@@ -56,85 +56,71 @@ The following hyperparameters were used during training:
56
 
57
  | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
58
  |:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
59
- | 0.3563 | 0.2910 | 500 | 0.2861 | 0.3203 | 0.1180 | 0.1725 | 0.9287 |
60
- | 0.2401 | 0.5821 | 1000 | 0.2090 | 0.3505 | 0.2733 | 0.3071 | 0.9387 |
61
- | 0.174 | 0.8731 | 1500 | 0.1769 | 0.4394 | 0.3834 | 0.4095 | 0.9471 |
62
- | 0.1313 | 1.1641 | 2000 | 0.1604 | 0.4853 | 0.4826 | 0.4840 | 0.9521 |
63
- | 0.1095 | 1.4552 | 2500 | 0.1546 | 0.5202 | 0.5009 | 0.5104 | 0.9550 |
64
- | 0.1015 | 1.7462 | 3000 | 0.1461 | 0.5462 | 0.5275 | 0.5367 | 0.9573 |
65
- | 0.0906 | 2.0373 | 3500 | 0.1468 | 0.5191 | 0.5465 | 0.5325 | 0.9568 |
66
- | 0.0657 | 2.3283 | 4000 | 0.1519 | 0.5667 | 0.5585 | 0.5626 | 0.9595 |
67
- | 0.0646 | 2.6193 | 4500 | 0.1503 | 0.5462 | 0.5764 | 0.5609 | 0.9591 |
68
- | 0.064 | 2.9104 | 5000 | 0.1444 | 0.5672 | 0.5866 | 0.5767 | 0.9605 |
69
- | 0.0493 | 3.2014 | 5500 | 0.1597 | 0.5855 | 0.5685 | 0.5769 | 0.9605 |
70
- | 0.0433 | 3.4924 | 6000 | 0.1598 | 0.5787 | 0.5647 | 0.5716 | 0.9605 |
71
- | 0.0437 | 3.7835 | 6500 | 0.1612 | 0.5897 | 0.5797 | 0.5846 | 0.9608 |
72
- | 0.0405 | 4.0745 | 7000 | 0.1684 | 0.5856 | 0.5878 | 0.5867 | 0.9611 |
73
- | 0.0313 | 4.3655 | 7500 | 0.1723 | 0.5919 | 0.5934 | 0.5927 | 0.9613 |
74
- | 0.0307 | 4.6566 | 8000 | 0.1733 | 0.6067 | 0.5786 | 0.5923 | 0.9612 |
75
- | 0.0304 | 4.9476 | 8500 | 0.1748 | 0.5973 | 0.5840 | 0.5906 | 0.9614 |
76
- | 0.0216 | 5.2386 | 9000 | 0.1873 | 0.6119 | 0.5810 | 0.5961 | 0.9616 |
77
- | 0.0212 | 5.5297 | 9500 | 0.1899 | 0.6016 | 0.5848 | 0.5931 | 0.9612 |
78
- | 0.0227 | 5.8207 | 10000 | 0.1950 | 0.5911 | 0.5917 | 0.5914 | 0.9610 |
79
- | 0.0202 | 6.1118 | 10500 | 0.1940 | 0.5989 | 0.5976 | 0.5983 | 0.9614 |
80
- | 0.0149 | 6.4028 | 11000 | 0.2050 | 0.5884 | 0.5950 | 0.5917 | 0.9607 |
81
- | 0.0165 | 6.6938 | 11500 | 0.2098 | 0.6098 | 0.5823 | 0.5957 | 0.9612 |
82
- | 0.0166 | 6.9849 | 12000 | 0.2119 | 0.6263 | 0.5780 | 0.6012 | 0.9620 |
83
- | 0.0119 | 7.2759 | 12500 | 0.2172 | 0.6047 | 0.5855 | 0.5949 | 0.9615 |
84
- | 0.0119 | 7.5669 | 13000 | 0.2149 | 0.5934 | 0.6034 | 0.5984 | 0.9604 |
85
- | 0.0119 | 7.8580 | 13500 | 0.2251 | 0.6080 | 0.5980 | 0.6030 | 0.9617 |
86
- | 0.0102 | 8.1490 | 14000 | 0.2327 | 0.6112 | 0.5787 | 0.5945 | 0.9616 |
87
- | 0.0088 | 8.4400 | 14500 | 0.2338 | 0.6080 | 0.5979 | 0.6029 | 0.9611 |
88
- | 0.0096 | 8.7311 | 15000 | 0.2313 | 0.6128 | 0.5952 | 0.6039 | 0.9617 |
89
- | 0.0088 | 9.0221 | 15500 | 0.2380 | 0.6062 | 0.5947 | 0.6004 | 0.9614 |
90
- | 0.0068 | 9.3132 | 16000 | 0.2441 | 0.6051 | 0.5911 | 0.5980 | 0.9612 |
91
- | 0.0074 | 9.6042 | 16500 | 0.2476 | 0.6019 | 0.5823 | 0.5920 | 0.9606 |
92
- | 0.0068 | 9.8952 | 17000 | 0.2475 | 0.6062 | 0.6034 | 0.6048 | 0.9614 |
93
- | 0.0064 | 10.1863 | 17500 | 0.2572 | 0.6234 | 0.5813 | 0.6016 | 0.9616 |
94
- | 0.0049 | 10.4773 | 18000 | 0.2610 | 0.6253 | 0.5835 | 0.6037 | 0.9615 |
95
- | 0.006 | 10.7683 | 18500 | 0.2564 | 0.6156 | 0.5872 | 0.6011 | 0.9613 |
96
- | 0.0055 | 11.0594 | 19000 | 0.2615 | 0.6078 | 0.5954 | 0.6016 | 0.9616 |
97
- | 0.0039 | 11.3504 | 19500 | 0.2723 | 0.6091 | 0.5949 | 0.6019 | 0.9616 |
98
- | 0.0048 | 11.6414 | 20000 | 0.2689 | 0.6125 | 0.5934 | 0.6028 | 0.9617 |
99
- | 0.0043 | 11.9325 | 20500 | 0.2658 | 0.5989 | 0.6040 | 0.6014 | 0.9611 |
100
- | 0.0037 | 12.2235 | 21000 | 0.2737 | 0.6320 | 0.5816 | 0.6058 | 0.9618 |
101
- | 0.0029 | 12.5146 | 21500 | 0.2780 | 0.6253 | 0.5801 | 0.6019 | 0.9618 |
102
- | 0.0037 | 12.8056 | 22000 | 0.2790 | 0.6266 | 0.5882 | 0.6068 | 0.9619 |
103
- | 0.0039 | 13.0966 | 22500 | 0.2777 | 0.6176 | 0.5853 | 0.6010 | 0.9615 |
104
- | 0.003 | 13.3877 | 23000 | 0.2797 | 0.6363 | 0.5747 | 0.6039 | 0.9619 |
105
- | 0.0028 | 13.6787 | 23500 | 0.2866 | 0.6217 | 0.5838 | 0.6021 | 0.9618 |
106
- | 0.0031 | 13.9697 | 24000 | 0.2908 | 0.6365 | 0.5729 | 0.6030 | 0.9621 |
107
- | 0.0024 | 14.2608 | 24500 | 0.2903 | 0.6078 | 0.6003 | 0.6041 | 0.9615 |
108
- | 0.0023 | 14.5518 | 25000 | 0.2900 | 0.6224 | 0.5901 | 0.6058 | 0.9619 |
109
- | 0.0029 | 14.8428 | 25500 | 0.2982 | 0.6301 | 0.5754 | 0.6015 | 0.9618 |
110
- | 0.0025 | 15.1339 | 26000 | 0.2919 | 0.6172 | 0.5943 | 0.6055 | 0.9616 |
111
- | 0.0016 | 15.4249 | 26500 | 0.2958 | 0.6299 | 0.5799 | 0.6039 | 0.9619 |
112
- | 0.0025 | 15.7159 | 27000 | 0.2970 | 0.6207 | 0.5868 | 0.6033 | 0.9612 |
113
- | 0.0022 | 16.0070 | 27500 | 0.2983 | 0.6241 | 0.5858 | 0.6043 | 0.9616 |
114
- | 0.0016 | 16.2980 | 28000 | 0.3050 | 0.6342 | 0.5765 | 0.6040 | 0.9618 |
115
- | 0.0019 | 16.5891 | 28500 | 0.3049 | 0.6241 | 0.5921 | 0.6077 | 0.9619 |
116
- | 0.0016 | 16.8801 | 29000 | 0.3085 | 0.6222 | 0.5874 | 0.6043 | 0.9620 |
117
- | 0.0017 | 17.1711 | 29500 | 0.3049 | 0.6237 | 0.5989 | 0.6111 | 0.9620 |
118
- | 0.0013 | 17.4622 | 30000 | 0.3155 | 0.6333 | 0.5839 | 0.6076 | 0.9620 |
119
- | 0.0014 | 17.7532 | 30500 | 0.3114 | 0.6166 | 0.5879 | 0.6019 | 0.9613 |
120
- | 0.0012 | 18.0442 | 31000 | 0.3186 | 0.6207 | 0.5868 | 0.6033 | 0.9618 |
121
- | 0.0009 | 18.3353 | 31500 | 0.3182 | 0.6202 | 0.5944 | 0.6070 | 0.9619 |
122
- | 0.0012 | 18.6263 | 32000 | 0.3183 | 0.6150 | 0.5908 | 0.6026 | 0.9615 |
123
- | 0.0014 | 18.9173 | 32500 | 0.3284 | 0.6158 | 0.5807 | 0.5978 | 0.9613 |
124
- | 0.0013 | 19.2084 | 33000 | 0.3241 | 0.6114 | 0.5892 | 0.6001 | 0.9614 |
125
- | 0.0011 | 19.4994 | 33500 | 0.3270 | 0.6190 | 0.5858 | 0.6019 | 0.9614 |
126
- | 0.0011 | 19.7905 | 34000 | 0.3284 | 0.6254 | 0.5722 | 0.5976 | 0.9611 |
127
- | 0.0009 | 20.0815 | 34500 | 0.3245 | 0.6250 | 0.5884 | 0.6061 | 0.9618 |
128
- | 0.0012 | 20.3725 | 35000 | 0.3238 | 0.6221 | 0.5833 | 0.6021 | 0.9616 |
129
- | 0.0009 | 20.6636 | 35500 | 0.3231 | 0.6015 | 0.6029 | 0.6022 | 0.9612 |
130
- | 0.0009 | 20.9546 | 36000 | 0.3270 | 0.6204 | 0.5914 | 0.6056 | 0.9617 |
131
- | 0.0008 | 21.2456 | 36500 | 0.3288 | 0.6288 | 0.5807 | 0.6038 | 0.9615 |
132
- | 0.0006 | 21.5367 | 37000 | 0.3358 | 0.6196 | 0.5829 | 0.6007 | 0.9614 |
133
- | 0.0009 | 21.8277 | 37500 | 0.3353 | 0.6323 | 0.5784 | 0.6042 | 0.9618 |
134
- | 0.0008 | 22.1187 | 38000 | 0.3339 | 0.6295 | 0.5891 | 0.6086 | 0.9620 |
135
- | 0.0005 | 22.4098 | 38500 | 0.3380 | 0.6170 | 0.5923 | 0.6044 | 0.9617 |
136
- | 0.0007 | 22.7008 | 39000 | 0.3382 | 0.6183 | 0.5866 | 0.6021 | 0.9619 |
137
- | 0.0008 | 22.9919 | 39500 | 0.3378 | 0.6195 | 0.5879 | 0.6033 | 0.9616 |
138
 
139
 
140
  ### Framework versions
 
1
  ---
 
2
  library_name: transformers
3
  license: mit
4
+ base_model: microsoft/mdeberta-v3-base
5
+ tags:
6
+ - generated_from_trainer
7
  metrics:
8
  - precision
9
  - recall
10
  - f1
11
  - accuracy
 
 
12
  model-index:
13
  - name: scenario-non-kd-scr-ner-half-mdeberta_data-univner_full44
14
  results: []
 
21
 
22
  This model is a fine-tuned version of [microsoft/mdeberta-v3-base](https://huggingface.co/microsoft/mdeberta-v3-base) on the None dataset.
23
  It achieves the following results on the evaluation set:
24
+ - Loss: 0.3227
25
+ - Precision: 0.6152
26
+ - Recall: 0.5804
27
+ - F1: 0.5973
28
+ - Accuracy: 0.9617
29
 
30
  ## Model description
31
 
 
56
 
57
  | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
58
  |:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
59
+ | 0.357 | 0.2910 | 500 | 0.2880 | 0.3006 | 0.1147 | 0.1660 | 0.9285 |
60
+ | 0.2425 | 0.5821 | 1000 | 0.2106 | 0.3468 | 0.2782 | 0.3087 | 0.9383 |
61
+ | 0.1767 | 0.8731 | 1500 | 0.1785 | 0.4322 | 0.3857 | 0.4076 | 0.9469 |
62
+ | 0.1352 | 1.1641 | 2000 | 0.1621 | 0.4749 | 0.4745 | 0.4747 | 0.9520 |
63
+ | 0.1107 | 1.4552 | 2500 | 0.1556 | 0.5238 | 0.4991 | 0.5111 | 0.9553 |
64
+ | 0.1031 | 1.7462 | 3000 | 0.1480 | 0.5536 | 0.5207 | 0.5367 | 0.9576 |
65
+ | 0.0912 | 2.0373 | 3500 | 0.1435 | 0.5286 | 0.5578 | 0.5428 | 0.9579 |
66
+ | 0.0661 | 2.3283 | 4000 | 0.1496 | 0.5510 | 0.5698 | 0.5602 | 0.9589 |
67
+ | 0.066 | 2.6193 | 4500 | 0.1502 | 0.5587 | 0.5742 | 0.5663 | 0.9594 |
68
+ | 0.0646 | 2.9104 | 5000 | 0.1440 | 0.5779 | 0.5803 | 0.5791 | 0.9609 |
69
+ | 0.0492 | 3.2014 | 5500 | 0.1590 | 0.5898 | 0.5656 | 0.5774 | 0.9608 |
70
+ | 0.0428 | 3.4924 | 6000 | 0.1613 | 0.5819 | 0.5634 | 0.5725 | 0.9603 |
71
+ | 0.0447 | 3.7835 | 6500 | 0.1602 | 0.5970 | 0.5742 | 0.5854 | 0.9615 |
72
+ | 0.0407 | 4.0745 | 7000 | 0.1667 | 0.5744 | 0.5995 | 0.5867 | 0.9611 |
73
+ | 0.0311 | 4.3655 | 7500 | 0.1762 | 0.5897 | 0.5754 | 0.5824 | 0.9610 |
74
+ | 0.0308 | 4.6566 | 8000 | 0.1707 | 0.5928 | 0.5862 | 0.5895 | 0.9609 |
75
+ | 0.0303 | 4.9476 | 8500 | 0.1717 | 0.5882 | 0.5915 | 0.5899 | 0.9610 |
76
+ | 0.0217 | 5.2386 | 9000 | 0.1826 | 0.5808 | 0.6025 | 0.5915 | 0.9611 |
77
+ | 0.0212 | 5.5297 | 9500 | 0.1827 | 0.5949 | 0.6006 | 0.5977 | 0.9613 |
78
+ | 0.0228 | 5.8207 | 10000 | 0.1942 | 0.5760 | 0.5809 | 0.5784 | 0.9601 |
79
+ | 0.02 | 6.1118 | 10500 | 0.1973 | 0.5982 | 0.5913 | 0.5947 | 0.9611 |
80
+ | 0.0146 | 6.4028 | 11000 | 0.2058 | 0.5938 | 0.5871 | 0.5904 | 0.9608 |
81
+ | 0.0161 | 6.6938 | 11500 | 0.2025 | 0.5973 | 0.5878 | 0.5925 | 0.9612 |
82
+ | 0.0166 | 6.9849 | 12000 | 0.2053 | 0.5972 | 0.5921 | 0.5947 | 0.9613 |
83
+ | 0.0115 | 7.2759 | 12500 | 0.2259 | 0.6083 | 0.5601 | 0.5832 | 0.9609 |
84
+ | 0.0116 | 7.5669 | 13000 | 0.2133 | 0.5944 | 0.6029 | 0.5986 | 0.9608 |
85
+ | 0.0114 | 7.8580 | 13500 | 0.2208 | 0.5883 | 0.5973 | 0.5928 | 0.9608 |
86
+ | 0.0098 | 8.1490 | 14000 | 0.2363 | 0.6118 | 0.5745 | 0.5926 | 0.9611 |
87
+ | 0.0084 | 8.4400 | 14500 | 0.2387 | 0.6094 | 0.5748 | 0.5916 | 0.9611 |
88
+ | 0.0097 | 8.7311 | 15000 | 0.2285 | 0.5819 | 0.5998 | 0.5907 | 0.9602 |
89
+ | 0.0083 | 9.0221 | 15500 | 0.2402 | 0.5992 | 0.5806 | 0.5897 | 0.9610 |
90
+ | 0.0064 | 9.3132 | 16000 | 0.2456 | 0.6297 | 0.5679 | 0.5972 | 0.9617 |
91
+ | 0.0068 | 9.6042 | 16500 | 0.2487 | 0.6035 | 0.5752 | 0.5890 | 0.9607 |
92
+ | 0.0072 | 9.8952 | 17000 | 0.2403 | 0.5910 | 0.6009 | 0.5959 | 0.9610 |
93
+ | 0.0062 | 10.1863 | 17500 | 0.2465 | 0.5981 | 0.5972 | 0.5976 | 0.9615 |
94
+ | 0.0045 | 10.4773 | 18000 | 0.2562 | 0.6062 | 0.5776 | 0.5915 | 0.9611 |
95
+ | 0.0055 | 10.7683 | 18500 | 0.2542 | 0.6139 | 0.5826 | 0.5978 | 0.9615 |
96
+ | 0.0054 | 11.0594 | 19000 | 0.2596 | 0.6128 | 0.5807 | 0.5963 | 0.9616 |
97
+ | 0.0037 | 11.3504 | 19500 | 0.2631 | 0.5872 | 0.6015 | 0.5943 | 0.9607 |
98
+ | 0.0048 | 11.6414 | 20000 | 0.2613 | 0.5998 | 0.6012 | 0.6005 | 0.9615 |
99
+ | 0.004 | 11.9325 | 20500 | 0.2576 | 0.6108 | 0.5892 | 0.5998 | 0.9616 |
100
+ | 0.0042 | 12.2235 | 21000 | 0.2647 | 0.5943 | 0.6027 | 0.5984 | 0.9611 |
101
+ | 0.0029 | 12.5146 | 21500 | 0.2773 | 0.6058 | 0.5819 | 0.5936 | 0.9613 |
102
+ | 0.0037 | 12.8056 | 22000 | 0.2785 | 0.6111 | 0.5874 | 0.5990 | 0.9612 |
103
+ | 0.0031 | 13.0966 | 22500 | 0.2819 | 0.6281 | 0.5790 | 0.6026 | 0.9618 |
104
+ | 0.0029 | 13.3877 | 23000 | 0.2794 | 0.6002 | 0.5915 | 0.5958 | 0.9609 |
105
+ | 0.0024 | 13.6787 | 23500 | 0.2842 | 0.6017 | 0.6019 | 0.6018 | 0.9615 |
106
+ | 0.0034 | 13.9697 | 24000 | 0.2889 | 0.6133 | 0.5806 | 0.5965 | 0.9616 |
107
+ | 0.0021 | 14.2608 | 24500 | 0.2876 | 0.6133 | 0.5803 | 0.5963 | 0.9616 |
108
+ | 0.0025 | 14.5518 | 25000 | 0.2871 | 0.6130 | 0.5845 | 0.5984 | 0.9614 |
109
+ | 0.0027 | 14.8428 | 25500 | 0.2921 | 0.6087 | 0.5835 | 0.5958 | 0.9613 |
110
+ | 0.0021 | 15.1339 | 26000 | 0.2888 | 0.5822 | 0.5998 | 0.5909 | 0.9607 |
111
+ | 0.0017 | 15.4249 | 26500 | 0.2899 | 0.6095 | 0.5911 | 0.6002 | 0.9613 |
112
+ | 0.0026 | 15.7159 | 27000 | 0.2968 | 0.6065 | 0.5839 | 0.5950 | 0.9613 |
113
+ | 0.002 | 16.0070 | 27500 | 0.3023 | 0.6158 | 0.5752 | 0.5949 | 0.9614 |
114
+ | 0.0015 | 16.2980 | 28000 | 0.2988 | 0.6006 | 0.5954 | 0.5980 | 0.9614 |
115
+ | 0.002 | 16.5891 | 28500 | 0.2983 | 0.5905 | 0.6045 | 0.5974 | 0.9611 |
116
+ | 0.0017 | 16.8801 | 29000 | 0.3006 | 0.6080 | 0.5838 | 0.5956 | 0.9614 |
117
+ | 0.0016 | 17.1711 | 29500 | 0.3078 | 0.5986 | 0.5921 | 0.5953 | 0.9612 |
118
+ | 0.0016 | 17.4622 | 30000 | 0.3066 | 0.6084 | 0.5892 | 0.5987 | 0.9617 |
119
+ | 0.0015 | 17.7532 | 30500 | 0.3153 | 0.6110 | 0.5786 | 0.5943 | 0.9617 |
120
+ | 0.0015 | 18.0442 | 31000 | 0.3134 | 0.5952 | 0.5954 | 0.5953 | 0.9611 |
121
+ | 0.0009 | 18.3353 | 31500 | 0.3201 | 0.6045 | 0.5904 | 0.5974 | 0.9615 |
122
+ | 0.0017 | 18.6263 | 32000 | 0.3149 | 0.6095 | 0.5875 | 0.5983 | 0.9614 |
123
+ | 0.0014 | 18.9173 | 32500 | 0.3227 | 0.6152 | 0.5804 | 0.5973 | 0.9617 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
 
125
 
126
  ### Framework versions
eval_result_ner.json CHANGED
@@ -1 +1 @@
1
- {"ceb_gja": {"precision": 0.29213483146067415, "recall": 0.5306122448979592, "f1": 0.37681159420289856, "accuracy": 0.9312741312741313}, "en_pud": {"precision": 0.47300215982721383, "recall": 0.40744186046511627, "f1": 0.43778110944527737, "accuracy": 0.9476293917642614}, "de_pud": {"precision": 0.13361072584373554, "recall": 0.2781520692974013, "f1": 0.18051217988757023, "accuracy": 0.8577656931226852}, "pt_pud": {"precision": 0.5815533980582525, "recall": 0.5450409463148317, "f1": 0.5627054955378111, "accuracy": 0.9598410731832357}, "ru_pud": {"precision": 0.0183049147442327, "recall": 0.07046332046332046, "f1": 0.029060509554140128, "accuracy": 0.6514595711702402}, "sv_pud": {"precision": 0.497737556561086, "recall": 0.3206997084548105, "f1": 0.3900709219858156, "accuracy": 0.9455336548542671}, "tl_trg": {"precision": 0.171875, "recall": 0.4782608695652174, "f1": 0.25287356321839083, "accuracy": 0.9168937329700273}, "tl_ugnayan": {"precision": 0.024390243902439025, "recall": 0.06060606060606061, "f1": 0.034782608695652174, "accuracy": 0.8960802187784868}, "zh_gsd": {"precision": 0.5587096774193548, "recall": 0.5645371577574967, "f1": 0.5616083009079117, "accuracy": 0.9429736929736929}, "zh_gsdsimp": {"precision": 0.5513307984790875, "recall": 0.5701179554390564, "f1": 0.5605670103092785, "accuracy": 0.9417249417249417}, "hr_set": {"precision": 0.7652987760979122, "recall": 0.7576621525302922, "f1": 0.761461318051576, "accuracy": 0.9717230008244023}, "da_ddt": {"precision": 0.6149870801033591, "recall": 0.5324384787472036, "f1": 0.5707434052757793, "accuracy": 0.9701686121919585}, "en_ewt": {"precision": 0.6163655685441021, "recall": 0.5330882352941176, "f1": 0.5717102020699852, "accuracy": 0.9601147547515639}, "pt_bosque": {"precision": 0.6460101867572157, "recall": 0.6263374485596708, "f1": 0.6360217300459673, "accuracy": 0.9665265903492247}, "sr_set": {"precision": 0.7976608187134503, "recall": 0.8051948051948052, "f1": 0.8014101057579318, "accuracy": 0.9690920234655459}, "sk_snk": {"precision": 0.4172077922077922, "recall": 0.28087431693989073, "f1": 0.3357282821685173, "accuracy": 0.9151224874371859}, "sv_talbanken": {"precision": 0.6077348066298343, "recall": 0.5612244897959183, "f1": 0.583554376657825, "accuracy": 0.9932276586347353}}
 
1
+ {"ceb_gja": {"precision": 0.26, "recall": 0.5306122448979592, "f1": 0.348993288590604, "accuracy": 0.9196911196911197}, "en_pud": {"precision": 0.493491124260355, "recall": 0.38790697674418606, "f1": 0.434375, "accuracy": 0.9478655081224027}, "de_pud": {"precision": 0.128099173553719, "recall": 0.2685274302213667, "f1": 0.1734535281317998, "accuracy": 0.8564530495522947}, "pt_pud": {"precision": 0.554679802955665, "recall": 0.5122838944494995, "f1": 0.532639545884579, "accuracy": 0.9577904045798266}, "ru_pud": {"precision": 0.01598173515981735, "recall": 0.04054054054054054, "f1": 0.02292576419213974, "accuracy": 0.7532937225523121}, "sv_pud": {"precision": 0.5259938837920489, "recall": 0.33430515063168126, "f1": 0.4087938205585264, "accuracy": 0.9465296707905221}, "tl_trg": {"precision": 0.24193548387096775, "recall": 0.6521739130434783, "f1": 0.35294117647058826, "accuracy": 0.9277929155313351}, "tl_ugnayan": {"precision": 0.06666666666666667, "recall": 0.15151515151515152, "f1": 0.09259259259259259, "accuracy": 0.8997265268915223}, "zh_gsd": {"precision": 0.5648535564853556, "recall": 0.5280312907431551, "f1": 0.545822102425876, "accuracy": 0.9410589410589411}, "zh_gsdsimp": {"precision": 0.5839017735334243, "recall": 0.5609436435124509, "f1": 0.5721925133689839, "accuracy": 0.9446386946386947}, "hr_set": {"precision": 0.7600585223116313, "recall": 0.7405559515324305, "f1": 0.7501805054151623, "accuracy": 0.9703627370156637}, "da_ddt": {"precision": 0.6727748691099477, "recall": 0.5749440715883669, "f1": 0.6200241254523522, "accuracy": 0.9720642522198942}, "en_ewt": {"precision": 0.6070686070686071, "recall": 0.5367647058823529, "f1": 0.5697560975609756, "accuracy": 0.9593576921544408}, "pt_bosque": {"precision": 0.6539130434782608, "recall": 0.6189300411522634, "f1": 0.6359408033826638, "accuracy": 0.9660194174757282}, "sr_set": {"precision": 0.7801672640382318, "recall": 0.7709563164108618, "f1": 0.7755344418052256, "accuracy": 0.9667279572716925}, "sk_snk": {"precision": 0.39080459770114945, "recall": 0.26010928961748636, "f1": 0.3123359580052494, "accuracy": 0.914572864321608}, "sv_talbanken": {"precision": 0.7088607594936709, "recall": 0.5714285714285714, "f1": 0.632768361581921, "accuracy": 0.993963782696177}}
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:abb957699839d24053c05ba6f20077786fdf05a88df0bbd155b894143e68282b
3
  size 428939068
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44344290029bdd862c294ff09e558ab6f3207276283256111bd7e74ab26bfbb9
3
  size 428939068
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08c354ba89f21429ac330b863dc692960aca57fa63ab7140ab3615ed9efa627f
3
  size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e8bd72e6d5081aa3461c60ff89546899aaf2ea31dee6c5683937c951e13801b
3
  size 5304