{ "dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8", "task_name": "MassiveScenarioClassification", "mteb_version": "1.25.1", "scores": { "test": [ { "accuracy": 0.726126, "f1": 0.724117, "f1_weighted": 0.726333, "scores_per_experiment": [ { "accuracy": 0.726295, "f1": 0.724368, "f1_weighted": 0.72364 }, { "accuracy": 0.734701, "f1": 0.737144, "f1_weighted": 0.733128 }, { "accuracy": 0.711163, "f1": 0.711532, "f1_weighted": 0.709362 }, { "accuracy": 0.705783, "f1": 0.699515, "f1_weighted": 0.709819 }, { "accuracy": 0.737727, "f1": 0.733102, "f1_weighted": 0.739757 }, { "accuracy": 0.740081, "f1": 0.729691, "f1_weighted": 0.74056 }, { "accuracy": 0.72226, "f1": 0.724384, "f1_weighted": 0.726585 }, { "accuracy": 0.722596, "f1": 0.725063, "f1_weighted": 0.721339 }, { "accuracy": 0.736382, "f1": 0.733771, "f1_weighted": 0.736615 }, { "accuracy": 0.724277, "f1": 0.722599, "f1_weighted": 0.722527 } ], "main_score": 0.726126, "hf_subset": "en", "languages": [ "eng-Latn" ] } ] }, "evaluation_time": 4.415860176086426, "kg_co2_emissions": null }