OmniEval / eval-results /demo-leaderboard /qwen2-72b_e5-mistral-7b /results_2023-12-08 15:46:20.425378.json
zstanjj's picture
add open source
0bf5b80
raw
history blame contribute delete
979 Bytes
{
"results": {
"retrieval": {
"mrr": 0.303246013667426,
"map": 0.2960516324981017
},
"generation": {
"em": 0.002277904328018223,
"f1": 0.3705164550873997,
"rouge1": 0.3270311806826159,
"rouge2": 0.17476659877087528,
"rougeL": 0.22225645997479143,
"accuracy": 0.385250569476082,
"completeness": 0.5877535101404057,
"hallucination": 1.2922719349215572,
"utilization": 0.4793244030285381,
"numerical_accuracy": 0.28622540250447226
}
},
"config": {
"eval_name": "e5-mistral-7b_e5-mistral-7b",
"generation_model": "intfloat/e5-mistral-7b-instruct",
"generation_model_args": {
"name": "intfloat/e5-mistral-7b-instruct",
"num_params": 7,
"open_source": true
},
"retrieval_model": "intfloat/e5-mistral-7b-instruct",
"retrieval_model_args": {
"name": "intfloat/e5-mistral-7b-instruct",
"num_params": 7,
"open_source": true
}
}
}