mihaimasala
commited on
Commit
•
e7c509f
1
Parent(s):
30131bb
Update README.md
Browse files
README.md
CHANGED
@@ -116,6 +116,24 @@ model-index:
|
|
116 |
- name: Average macro-f1
|
117 |
type: macro-f1
|
118 |
value: 62.49
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
119 |
- task:
|
120 |
type: text-generation
|
121 |
dataset:
|
@@ -134,6 +152,24 @@ model-index:
|
|
134 |
- name: Average bleu
|
135 |
type: bleu
|
136 |
value: 23.16
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
137 |
- task:
|
138 |
type: text-generation
|
139 |
dataset:
|
@@ -152,6 +188,24 @@ model-index:
|
|
152 |
- name: Average f1
|
153 |
type: f1
|
154 |
value: 70.74
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
- task:
|
156 |
type: text-generation
|
157 |
dataset:
|
@@ -170,6 +224,24 @@ model-index:
|
|
170 |
- name: Average pearson
|
171 |
type: pearson
|
172 |
value: 77.10
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
173 |
- task:
|
174 |
type: text-generation
|
175 |
dataset:
|
@@ -417,6 +489,7 @@ model-index:
|
|
417 |
type: pearson
|
418 |
value: 71.33
|
419 |
|
|
|
420 |
---
|
421 |
|
422 |
# Model Card for Model ID
|
|
|
116 |
- name: Average macro-f1
|
117 |
type: macro-f1
|
118 |
value: 62.49
|
119 |
+
- task:
|
120 |
+
type: text-generation
|
121 |
+
dataset:
|
122 |
+
name: LaRoSeDa_binary_finetuned
|
123 |
+
type: LaRoSeDa_binary_finetuned
|
124 |
+
metrics:
|
125 |
+
- name: Average macro-f1
|
126 |
+
type: macro-f1
|
127 |
+
value: 98.93
|
128 |
+
- task:
|
129 |
+
type: text-generation
|
130 |
+
dataset:
|
131 |
+
name: LaRoSeDa_multiclass_finetuned
|
132 |
+
type: LaRoSeDa_multiclass_finetuned
|
133 |
+
metrics:
|
134 |
+
- name: Average macro-f1
|
135 |
+
type: macro-f1
|
136 |
+
value: 88.33
|
137 |
- task:
|
138 |
type: text-generation
|
139 |
dataset:
|
|
|
152 |
- name: Average bleu
|
153 |
type: bleu
|
154 |
value: 23.16
|
155 |
+
- task:
|
156 |
+
type: text-generation
|
157 |
+
dataset:
|
158 |
+
name: WMT_EN-RO_finetuned
|
159 |
+
type: WMT_EN-RO_finetuned
|
160 |
+
metrics:
|
161 |
+
- name: Average bleu
|
162 |
+
type: bleu
|
163 |
+
value: 28.43
|
164 |
+
- task:
|
165 |
+
type: text-generation
|
166 |
+
dataset:
|
167 |
+
name: WMT_RO-EN_finetuned
|
168 |
+
type: WMT_RO-EN_finetuned
|
169 |
+
metrics:
|
170 |
+
- name: Average bleu
|
171 |
+
type: bleu
|
172 |
+
value: 40.94
|
173 |
- task:
|
174 |
type: text-generation
|
175 |
dataset:
|
|
|
188 |
- name: Average f1
|
189 |
type: f1
|
190 |
value: 70.74
|
191 |
+
- task:
|
192 |
+
type: text-generation
|
193 |
+
dataset:
|
194 |
+
name: XQuAD_finetuned
|
195 |
+
type: XQuAD_finetuned
|
196 |
+
metrics:
|
197 |
+
- name: Average exact_match
|
198 |
+
type: exact_match
|
199 |
+
value: 50.00
|
200 |
+
- task:
|
201 |
+
type: text-generation
|
202 |
+
dataset:
|
203 |
+
name: XQuAD_finetuned
|
204 |
+
type: XQuAD_finetuned
|
205 |
+
metrics:
|
206 |
+
- name: Average f1
|
207 |
+
type: f1
|
208 |
+
value: 64.10
|
209 |
- task:
|
210 |
type: text-generation
|
211 |
dataset:
|
|
|
224 |
- name: Average pearson
|
225 |
type: pearson
|
226 |
value: 77.10
|
227 |
+
- task:
|
228 |
+
type: text-generation
|
229 |
+
dataset:
|
230 |
+
name: STS_finetuned
|
231 |
+
type: STS_finetuned
|
232 |
+
metrics:
|
233 |
+
- name: Average spearman
|
234 |
+
type: spearman
|
235 |
+
value: 89.45
|
236 |
+
- task:
|
237 |
+
type: text-generation
|
238 |
+
dataset:
|
239 |
+
name: STS_finetuned
|
240 |
+
type: STS_finetuned
|
241 |
+
metrics:
|
242 |
+
- name: Average pearson
|
243 |
+
type: pearson
|
244 |
+
value: 89.89
|
245 |
- task:
|
246 |
type: text-generation
|
247 |
dataset:
|
|
|
489 |
type: pearson
|
490 |
value: 71.33
|
491 |
|
492 |
+
|
493 |
---
|
494 |
|
495 |
# Model Card for Model ID
|