{ "dataset_revision": "3d86128a09e091d6018b6d26cad27f2739fc2db7", "task_name": "ImdbClassification", "mteb_version": "1.25.1", "scores": { "test": [ { "accuracy": 0.61454, "f1": 0.611751, "f1_weighted": 0.611751, "ap": 0.572238, "ap_weighted": 0.572238, "scores_per_experiment": [ { "accuracy": 0.64184, "f1": 0.641046, "f1_weighted": 0.641046, "ap": 0.593128, "ap_weighted": 0.593128 }, { "accuracy": 0.64316, "f1": 0.643142, "f1_weighted": 0.643142, "ap": 0.591785, "ap_weighted": 0.591785 }, { "accuracy": 0.5602, "f1": 0.556495, "f1_weighted": 0.556495, "ap": 0.534535, "ap_weighted": 0.534535 }, { "accuracy": 0.63604, "f1": 0.635737, "f1_weighted": 0.635737, "ap": 0.58766, "ap_weighted": 0.58766 }, { "accuracy": 0.60992, "f1": 0.604073, "f1_weighted": 0.604073, "ap": 0.56468, "ap_weighted": 0.56468 }, { "accuracy": 0.60764, "f1": 0.606284, "f1_weighted": 0.606284, "ap": 0.566947, "ap_weighted": 0.566947 }, { "accuracy": 0.60716, "f1": 0.592887, "f1_weighted": 0.592887, "ap": 0.571938, "ap_weighted": 0.571938 }, { "accuracy": 0.60104, "f1": 0.599904, "f1_weighted": 0.599904, "ap": 0.561947, "ap_weighted": 0.561947 }, { "accuracy": 0.5806, "f1": 0.580167, "f1_weighted": 0.580167, "ap": 0.546404, "ap_weighted": 0.546404 }, { "accuracy": 0.6578, "f1": 0.657772, "f1_weighted": 0.657772, "ap": 0.603361, "ap_weighted": 0.603361 } ], "main_score": 0.61454, "hf_subset": "default", "languages": [ "eng-Latn" ] } ] }, "evaluation_time": 65.28035974502563, "kg_co2_emissions": null }