aspire's picture
upload file
4ec0bf2 verified
raw
history blame
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.1.1",
"validation": {
"cos_sim": {
"accuracy": 0.8110449377368706,
"accuracy_threshold": 0.5930440425872803,
"ap": 0.8516262845422288,
"f1": 0.8297771455666192,
"f1_threshold": 0.5647917985916138,
"precision": 0.7530120481927711,
"recall": 0.9239704329461457
},
"dot": {
"accuracy": 0.8110449377368706,
"accuracy_threshold": 0.5982565879821777,
"ap": 0.8511448381714821,
"f1": 0.8299190090519295,
"f1_threshold": 0.5676579475402832,
"precision": 0.7560763888888888,
"recall": 0.9197465681098205
},
"euclidean": {
"accuracy": 0.8105035192203573,
"accuracy_threshold": 0.8990259170532227,
"ap": 0.8516821622996872,
"f1": 0.8300094966761632,
"f1_threshold": 0.9317486882209778,
"precision": 0.7540983606557377,
"recall": 0.9229144667370645
},
"evaluation_time": 1.5,
"manhattan": {
"accuracy": 0.8115863562533838,
"accuracy_threshold": 30.2585506439209,
"ap": 0.8514921863839019,
"f1": 0.8291048348492102,
"f1_threshold": 31.238189697265625,
"precision": 0.7583187390542907,
"recall": 0.914466737064414
},
"max": {
"accuracy": 0.8115863562533838,
"ap": 0.8516821622996872,
"f1": 0.8300094966761632
}
}
}