Behnamm commited on
Commit
78a5746
1 Parent(s): 13b9202

Upload 10 files

Browse files
matina_leaderboard/CohereForAI/aya-23-35B.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.float16", "model_name": "CohereForAI/aya-23-35B"}, "results": {"QoK (Accuracy)": {"acc": 0.52987}, "khayyam_challenge (Accuracy)": {"acc": 0.37202}, "matina_MC (Accuracy)": {"acc": 0.4768}, "matina_shortanswer (Basic_containment)": {"acc": 0.133}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.10837}, "matina_shortanswer (Rouge)": {"acc": 0.14446}, "parsinlu_mc (Accuracy)": {"acc": 0.42476}, "parsinlu_nli (Accuracy)": {"acc": 0.67364}, "parsinlu_qqp (Accuracy)": {"acc": 0.87578}, "persian_ARC (Accuracy)": {"acc": 0.83399}, "persian_winogrande (Accuracy)": {"acc": 0.57602}}}
matina_leaderboard/CohereForAI/aya-23-8B.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.float16", "model_name": "CohereForAI/aya-23-8B"}, "results": {"QoK (Accuracy)": {"acc": 0.41123}, "khayyam_challenge (Accuracy)": {"acc": 0.31171}, "matina_MC (Accuracy)": {"acc": 0.39585}, "matina_shortanswer (Basic_containment)": {"acc": 0.16256}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.12808}, "matina_shortanswer (Rouge)": {"acc": 0.17871}, "parsinlu_mc (Accuracy)": {"acc": 0.36857}, "parsinlu_nli (Accuracy)": {"acc": 0.57143}, "parsinlu_qqp (Accuracy)": {"acc": 0.60491}, "persian_ARC (Accuracy)": {"acc": 0.69717}, "persian_winogrande (Accuracy)": {"acc": 0.52963}}}
matina_leaderboard/CohereForAI/c4ai-command-r-v01.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.float16", "model_name": "CohereForAI/c4ai-command-r-v01"}, "results": {"QoK (Accuracy)": {"acc": 0.52987}, "khayyam_challenge (Accuracy)": {"acc": 0.36316}, "matina_MC (Accuracy)": {"acc": 0.46199}, "matina_shortanswer (Basic_containment)": {"acc": 0.19704}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.13793}, "matina_shortanswer (Rouge)": {"acc": 0.21424}, "parsinlu_mc (Accuracy)": {"acc": 0.40476}, "parsinlu_nli (Accuracy)": {"acc": 0.59773}, "parsinlu_qqp (Accuracy)": {"acc": 0.84499}, "persian_ARC (Accuracy)": {"acc": 0.81275}, "persian_winogrande (Accuracy)": {"acc": 0.57061}}}
matina_leaderboard/PartAI/Dorna-Llama3-8B-Instruct.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.bfloat16", "model_name": "PartAI/Dorna-Llama3-8B-Instruct"}, "results": {"QoK (Accuracy)": {"acc": 0.37094}, "khayyam_challenge (Accuracy)": {"acc": 0.32499}, "matina_MC (Accuracy)": {"acc": 0.42547}, "matina_shortanswer (Basic_containment)": {"acc": 0.13793}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.10837}, "matina_shortanswer (Rouge)": {"acc": 0.14277}, "parsinlu_mc (Accuracy)": {"acc": 0.38857}, "parsinlu_nli (Accuracy)": {"acc": 0.5798}, "parsinlu_qqp (Accuracy)": {"acc": 0.73225}, "persian_ARC (Accuracy)": {"acc": 0.67025}, "persian_winogrande (Accuracy)": {"acc": 0.54228}}}
matina_leaderboard/Qwen/Qwen2-72B-Instruct.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.bfloat16", "model_name": "Qwen/Qwen2-72B-Instruct"}, "results": {"QoK (Accuracy)": {"acc": 0.5257}, "khayyam_challenge (Accuracy)": {"acc": 0.47911}, "matina_MC (Accuracy)": {"acc": 0.55281}, "matina_shortanswer (Basic_containment)": {"acc": 0.24138}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.17241}, "matina_shortanswer (Rouge)": {"acc": 0.23065}, "parsinlu_mc (Accuracy)": {"acc": 0.53048}, "parsinlu_nli (Accuracy)": {"acc": 0.77764}, "parsinlu_qqp (Accuracy)": {"acc": 0.82463}, "persian_ARC (Accuracy)": {"acc": 0.91785}, "persian_winogrande (Accuracy)": {"acc": 0.73443}}}
matina_leaderboard/Qwen/Qwen2-7B-Instruct.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.bfloat16", "model_name": "Qwen/Qwen2-7B-Instruct"}, "results": {"QoK (Accuracy)": {"acc": 0.37316}, "khayyam_challenge (Accuracy)": {"acc": 0.37009}, "matina_MC (Accuracy)": {"acc": 0.44916}, "matina_shortanswer (Basic_containment)": {"acc": 0.15764}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.12808}, "matina_shortanswer (Rouge)": {"acc": 0.16651}, "parsinlu_mc (Accuracy)": {"acc": 0.41714}, "parsinlu_nli (Accuracy)": {"acc": 0.69277}, "parsinlu_qqp (Accuracy)": {"acc": 0.82672}, "persian_ARC (Accuracy)": {"acc": 0.71983}, "persian_winogrande (Accuracy)": {"acc": 0.54477}}}
matina_leaderboard/meta-llama/Meta-Llama-3-70B-Instruct.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.bfloat16", "model_name": "meta-llama/Meta-Llama-3-70B-Instruct"}, "results": {"QoK (Accuracy)": {"acc": 0.57905}, "khayyam_challenge (Accuracy)": {"acc": 0.45119}, "matina_MC (Accuracy)": {"acc": 0.53011}, "matina_shortanswer (Basic_containment)": {"acc": 0.23645}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.19704}, "matina_shortanswer (Rouge)": {"acc": 0.24377}, "parsinlu_mc (Accuracy)": {"acc": 0.53905}, "parsinlu_nli (Accuracy)": {"acc": 0.78661}, "parsinlu_qqp (Accuracy)": {"acc": 0.88152}, "persian_ARC (Accuracy)": {"acc": 0.91416}, "persian_winogrande (Accuracy)": {"acc": 0.6503}}}
matina_leaderboard/meta-llama/Meta-Llama-3-8B-Instruct.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.bfloat16", "model_name": "meta-llama/Meta-Llama-3-8B-Instruct"}, "results": {"QoK (Accuracy)": {"acc": 0.41595}, "khayyam_challenge (Accuracy)": {"acc": 0.34925}, "matina_MC (Accuracy)": {"acc": 0.43435}, "matina_shortanswer (Basic_containment)": {"acc": 0.133}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.06897}, "matina_shortanswer (Rouge)": {"acc": 0.08993}, "parsinlu_mc (Accuracy)": {"acc": 0.40095}, "parsinlu_nli (Accuracy)": {"acc": 0.6061}, "parsinlu_qqp (Accuracy)": {"acc": 0.80689}, "persian_ARC (Accuracy)": {"acc": 0.71076}, "persian_winogrande (Accuracy)": {"acc": 0.54509}}}
matina_leaderboard/meta-llama/Meta-Llama-3.1-70B-Instruct.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.bfloat16", "model_name": "meta-llama/Meta-Llama-3.1-70B-Instruct"}, "results": {"QoK (Accuracy)": {"acc": 0.62378}, "khayyam_challenge (Accuracy)": {"acc": 0.47049}, "matina_MC (Accuracy)": {"acc": 0.56663}, "matina_shortanswer (Basic_containment)": {"acc": 0.24631}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.22167}, "matina_shortanswer (Rouge)": {"acc": 0.26916}, "parsinlu_mc (Accuracy)": {"acc": 0.58571}, "parsinlu_nli (Accuracy)": {"acc": 0.78542}, "parsinlu_qqp (Accuracy)": {"acc": 0.88466}, "persian_ARC (Accuracy)": {"acc": 0.93059}, "persian_winogrande (Accuracy)": {"acc": 0.72913}}}
matina_leaderboard/meta-llama/Meta-Llama-3.1-8B-Instruct.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"config": {"model_dtype": "torch.bfloat16", "model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct"}, "results": {"QoK (Accuracy)": {"acc": 0.44012}, "khayyam_challenge (Accuracy)": {"acc": 0.35695}, "matina_MC (Accuracy)": {"acc": 0.4304}, "matina_shortanswer (Basic_containment)": {"acc": 0.19212}, "matina_shortanswer (Basic_exact_match)": {"acc": 0.13793}, "matina_shortanswer (Rouge)": {"acc": 0.1933}, "parsinlu_mc (Accuracy)": {"acc": 0.42667}, "parsinlu_nli (Accuracy)": {"acc": 0.59414}, "parsinlu_qqp (Accuracy)": {"acc": 0.80585}, "persian_ARC (Accuracy)": {"acc": 0.73768}, "persian_winogrande (Accuracy)": {"acc": 0.55374}}}