File size: 1,737 Bytes
053befb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
{
"dataset_revision": "4672e20407010da34463acc759c162ca9734bca6",
"task_name": "MassiveIntentClassification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.652354,
"f1": 0.635945,
"f1_weighted": 0.650395,
"scores_per_experiment": [
{
"accuracy": 0.644923,
"f1": 0.639513,
"f1_weighted": 0.640635
},
{
"accuracy": 0.677539,
"f1": 0.652575,
"f1_weighted": 0.67554
},
{
"accuracy": 0.649294,
"f1": 0.630992,
"f1_weighted": 0.645129
},
{
"accuracy": 0.680565,
"f1": 0.659978,
"f1_weighted": 0.675892
},
{
"accuracy": 0.653329,
"f1": 0.637184,
"f1_weighted": 0.649634
},
{
"accuracy": 0.641896,
"f1": 0.624381,
"f1_weighted": 0.640526
},
{
"accuracy": 0.642233,
"f1": 0.633383,
"f1_weighted": 0.643923
},
{
"accuracy": 0.638534,
"f1": 0.618431,
"f1_weighted": 0.633252
},
{
"accuracy": 0.64694,
"f1": 0.63405,
"f1_weighted": 0.649656
},
{
"accuracy": 0.648285,
"f1": 0.628963,
"f1_weighted": 0.649767
}
],
"main_score": 0.652354,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 11.81202483177185,
"kg_co2_emissions": null
} |