|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.35665529010238906, |
|
"acc_stderr": 0.013998056902620192, |
|
"acc_norm": 0.42918088737201365, |
|
"acc_norm_stderr": 0.014464085894870655 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.39892451702848036, |
|
"acc_stderr": 0.004886764243204055, |
|
"acc_norm": 0.5349531965743876, |
|
"acc_norm_stderr": 0.004977574188421318 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5087719298245614, |
|
"acc_stderr": 0.038342347441649924, |
|
"acc_norm": 0.5087719298245614, |
|
"acc_norm_stderr": 0.038342347441649924 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5922330097087378, |
|
"acc_stderr": 0.04865777570410769, |
|
"acc_norm": 0.5922330097087378, |
|
"acc_norm_stderr": 0.04865777570410769 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.558109833971903, |
|
"acc_stderr": 0.017758800534214414, |
|
"acc_norm": 0.558109833971903, |
|
"acc_norm_stderr": 0.017758800534214414 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45185185185185184, |
|
"acc_stderr": 0.04299268905480863, |
|
"acc_norm": 0.45185185185185184, |
|
"acc_norm_stderr": 0.04299268905480863 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4127659574468085, |
|
"acc_stderr": 0.03218471141400351, |
|
"acc_norm": 0.4127659574468085, |
|
"acc_norm_stderr": 0.03218471141400351 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.40963855421686746, |
|
"acc_stderr": 0.03828401115079021, |
|
"acc_norm": 0.40963855421686746, |
|
"acc_norm_stderr": 0.03828401115079021 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5273311897106109, |
|
"acc_stderr": 0.028355633568328188, |
|
"acc_norm": 0.5273311897106109, |
|
"acc_norm_stderr": 0.028355633568328188 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4663677130044843, |
|
"acc_stderr": 0.033481800170603065, |
|
"acc_norm": 0.4663677130044843, |
|
"acc_norm_stderr": 0.033481800170603065 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5343511450381679, |
|
"acc_stderr": 0.04374928560599738, |
|
"acc_norm": 0.5343511450381679, |
|
"acc_norm_stderr": 0.04374928560599738 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.034273086529999365, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.034273086529999365 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.503448275862069, |
|
"acc_stderr": 0.04166567577101579, |
|
"acc_norm": 0.503448275862069, |
|
"acc_norm_stderr": 0.04166567577101579 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3137254901960784, |
|
"acc_stderr": 0.04617034827006717, |
|
"acc_norm": 0.3137254901960784, |
|
"acc_norm_stderr": 0.04617034827006717 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5336134453781513, |
|
"acc_stderr": 0.03240501447690071, |
|
"acc_norm": 0.5336134453781513, |
|
"acc_norm_stderr": 0.03240501447690071 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5025641025641026, |
|
"acc_stderr": 0.025350672979412188, |
|
"acc_norm": 0.5025641025641026, |
|
"acc_norm_stderr": 0.025350672979412188 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43349753694581283, |
|
"acc_stderr": 0.034867317274198714, |
|
"acc_norm": 0.43349753694581283, |
|
"acc_norm_stderr": 0.034867317274198714 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5225806451612903, |
|
"acc_stderr": 0.02841498501970786, |
|
"acc_norm": 0.5225806451612903, |
|
"acc_norm_stderr": 0.02841498501970786 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7435897435897436, |
|
"acc_stderr": 0.028605953702004253, |
|
"acc_norm": 0.7435897435897436, |
|
"acc_norm_stderr": 0.028605953702004253 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4867924528301887, |
|
"acc_stderr": 0.030762134874500476, |
|
"acc_norm": 0.4867924528301887, |
|
"acc_norm_stderr": 0.030762134874500476 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.509090909090909, |
|
"acc_stderr": 0.04788339768702861, |
|
"acc_norm": 0.509090909090909, |
|
"acc_norm_stderr": 0.04788339768702861 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.02831753349606648, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.02831753349606648 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.37748344370860926, |
|
"acc_stderr": 0.039580272311215706, |
|
"acc_norm": 0.37748344370860926, |
|
"acc_norm_stderr": 0.039580272311215706 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6318407960199005, |
|
"acc_stderr": 0.03410410565495301, |
|
"acc_norm": 0.6318407960199005, |
|
"acc_norm_stderr": 0.03410410565495301 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4393063583815029, |
|
"acc_stderr": 0.037842719328874674, |
|
"acc_norm": 0.4393063583815029, |
|
"acc_norm_stderr": 0.037842719328874674 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3386243386243386, |
|
"acc_stderr": 0.024373197867983056, |
|
"acc_norm": 0.3386243386243386, |
|
"acc_norm_stderr": 0.024373197867983056 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.04174752578923185, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.04174752578923185 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49421965317919075, |
|
"acc_stderr": 0.026917296179149116, |
|
"acc_norm": 0.49421965317919075, |
|
"acc_norm_stderr": 0.026917296179149116 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4723926380368098, |
|
"acc_stderr": 0.039223782906109894, |
|
"acc_norm": 0.4723926380368098, |
|
"acc_norm_stderr": 0.039223782906109894 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5277777777777778, |
|
"acc_stderr": 0.027777777777777797, |
|
"acc_norm": 0.5277777777777778, |
|
"acc_norm_stderr": 0.027777777777777797 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5906735751295337, |
|
"acc_stderr": 0.03548608168860807, |
|
"acc_norm": 0.5906735751295337, |
|
"acc_norm_stderr": 0.03548608168860807 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.3157894736842105, |
|
"acc_stderr": 0.04372748290278008, |
|
"acc_norm": 0.3157894736842105, |
|
"acc_norm_stderr": 0.04372748290278008 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6146788990825688, |
|
"acc_stderr": 0.020865850852794104, |
|
"acc_norm": 0.6146788990825688, |
|
"acc_norm_stderr": 0.020865850852794104 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.039325376803928704, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.039325376803928704 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5130718954248366, |
|
"acc_stderr": 0.028620130800700246, |
|
"acc_norm": 0.5130718954248366, |
|
"acc_norm_stderr": 0.028620130800700246 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6528925619834711, |
|
"acc_stderr": 0.04345724570292534, |
|
"acc_norm": 0.6528925619834711, |
|
"acc_norm_stderr": 0.04345724570292534 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5197368421052632, |
|
"acc_stderr": 0.040657710025626036, |
|
"acc_norm": 0.5197368421052632, |
|
"acc_norm_stderr": 0.040657710025626036 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4068627450980392, |
|
"acc_stderr": 0.019873802005061173, |
|
"acc_norm": 0.4068627450980392, |
|
"acc_norm_stderr": 0.019873802005061173 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.35106382978723405, |
|
"acc_stderr": 0.028473501272963768, |
|
"acc_norm": 0.35106382978723405, |
|
"acc_norm_stderr": 0.028473501272963768 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.29464285714285715, |
|
"acc_stderr": 0.043270409325787317, |
|
"acc_norm": 0.29464285714285715, |
|
"acc_norm_stderr": 0.043270409325787317 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631295, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631295 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24134078212290502, |
|
"acc_stderr": 0.014310999547961443, |
|
"acc_norm": 0.24134078212290502, |
|
"acc_norm_stderr": 0.014310999547961443 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.41544117647058826, |
|
"acc_stderr": 0.029935342707877746, |
|
"acc_norm": 0.41544117647058826, |
|
"acc_norm_stderr": 0.029935342707877746 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.46122448979591835, |
|
"acc_stderr": 0.03191282052669277, |
|
"acc_norm": 0.46122448979591835, |
|
"acc_norm_stderr": 0.03191282052669277 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6497890295358649, |
|
"acc_stderr": 0.031052391937584346, |
|
"acc_norm": 0.6497890295358649, |
|
"acc_norm_stderr": 0.031052391937584346 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.33572359843546284, |
|
"acc_stderr": 0.012061304157664618, |
|
"acc_norm": 0.33572359843546284, |
|
"acc_norm_stderr": 0.012061304157664618 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5343137254901961, |
|
"acc_stderr": 0.03501038327635897, |
|
"acc_norm": 0.5343137254901961, |
|
"acc_norm_stderr": 0.03501038327635897 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5757575757575758, |
|
"acc_stderr": 0.038592681420702636, |
|
"acc_norm": 0.5757575757575758, |
|
"acc_norm_stderr": 0.038592681420702636 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26805385556915545, |
|
"mc1_stderr": 0.015506204722834555, |
|
"mc2": 0.417505165464333, |
|
"mc2_stderr": 0.014838211675706857 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5655253837072018, |
|
"acc_stderr": 0.017042098620824928, |
|
"acc_norm": 0.6056670602125147, |
|
"acc_norm_stderr": 0.01680209067489322 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "GAI-LLM/Yi-Ko-6B-mixed-v13", |
|
"model_sha": "226493d5e98b136d519ed5d3052e8aff77ceef1e", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |