|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.40955631399317405, |
|
"acc_stderr": 0.014370358632472432, |
|
"acc_norm": 0.4522184300341297, |
|
"acc_norm_stderr": 0.014544519880633825 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3794064927305318, |
|
"acc_stderr": 0.004842476363739972, |
|
"acc_norm": 0.49790878311093406, |
|
"acc_norm_stderr": 0.004989737768749952 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5730994152046783, |
|
"acc_stderr": 0.03793620616529916, |
|
"acc_norm": 0.5730994152046783, |
|
"acc_norm_stderr": 0.03793620616529916 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6213592233009708, |
|
"acc_stderr": 0.048026946982589726, |
|
"acc_norm": 0.6213592233009708, |
|
"acc_norm_stderr": 0.048026946982589726 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5057471264367817, |
|
"acc_stderr": 0.01787878232612923, |
|
"acc_norm": 0.5057471264367817, |
|
"acc_norm_stderr": 0.01787878232612923 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464243, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464243 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4595744680851064, |
|
"acc_stderr": 0.032579014820998356, |
|
"acc_norm": 0.4595744680851064, |
|
"acc_norm_stderr": 0.032579014820998356 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.40963855421686746, |
|
"acc_stderr": 0.03828401115079022, |
|
"acc_norm": 0.40963855421686746, |
|
"acc_norm_stderr": 0.03828401115079022 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4983922829581994, |
|
"acc_stderr": 0.02839794490780661, |
|
"acc_norm": 0.4983922829581994, |
|
"acc_norm_stderr": 0.02839794490780661 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5022421524663677, |
|
"acc_stderr": 0.033557465352232634, |
|
"acc_norm": 0.5022421524663677, |
|
"acc_norm_stderr": 0.033557465352232634 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5190839694656488, |
|
"acc_stderr": 0.04382094705550988, |
|
"acc_norm": 0.5190839694656488, |
|
"acc_norm_stderr": 0.04382094705550988 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.05021167315686779, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.05021167315686779 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5050505050505051, |
|
"acc_stderr": 0.035621707606254015, |
|
"acc_norm": 0.5050505050505051, |
|
"acc_norm_stderr": 0.035621707606254015 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4827586206896552, |
|
"acc_stderr": 0.04164188720169377, |
|
"acc_norm": 0.4827586206896552, |
|
"acc_norm_stderr": 0.04164188720169377 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.043364327079931785, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.043364327079931785 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5210084033613446, |
|
"acc_stderr": 0.032449808499900284, |
|
"acc_norm": 0.5210084033613446, |
|
"acc_norm_stderr": 0.032449808499900284 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.47692307692307695, |
|
"acc_stderr": 0.025323990861736128, |
|
"acc_norm": 0.47692307692307695, |
|
"acc_norm_stderr": 0.025323990861736128 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.04803752235190192, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.04803752235190192 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4039408866995074, |
|
"acc_stderr": 0.0345245390382204, |
|
"acc_norm": 0.4039408866995074, |
|
"acc_norm_stderr": 0.0345245390382204 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5129032258064516, |
|
"acc_stderr": 0.02843453315268187, |
|
"acc_norm": 0.5129032258064516, |
|
"acc_norm_stderr": 0.02843453315268187 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7735042735042735, |
|
"acc_stderr": 0.027421007295392923, |
|
"acc_norm": 0.7735042735042735, |
|
"acc_norm_stderr": 0.027421007295392923 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.49056603773584906, |
|
"acc_stderr": 0.030767394707808093, |
|
"acc_norm": 0.49056603773584906, |
|
"acc_norm_stderr": 0.030767394707808093 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5454545454545454, |
|
"acc_stderr": 0.04769300568972744, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.04769300568972744 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.34444444444444444, |
|
"acc_stderr": 0.028972648884844267, |
|
"acc_norm": 0.34444444444444444, |
|
"acc_norm_stderr": 0.028972648884844267 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.03710185726119995, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.03710185726119995 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.681592039800995, |
|
"acc_stderr": 0.03294118479054095, |
|
"acc_norm": 0.681592039800995, |
|
"acc_norm_stderr": 0.03294118479054095 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4393063583815029, |
|
"acc_stderr": 0.037842719328874674, |
|
"acc_norm": 0.4393063583815029, |
|
"acc_norm_stderr": 0.037842719328874674 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.36772486772486773, |
|
"acc_stderr": 0.02483383982556242, |
|
"acc_norm": 0.36772486772486773, |
|
"acc_norm_stderr": 0.02483383982556242 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4097222222222222, |
|
"acc_stderr": 0.04112490974670787, |
|
"acc_norm": 0.4097222222222222, |
|
"acc_norm_stderr": 0.04112490974670787 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.7, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.7, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5722543352601156, |
|
"acc_stderr": 0.02663653974111608, |
|
"acc_norm": 0.5722543352601156, |
|
"acc_norm_stderr": 0.02663653974111608 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4539877300613497, |
|
"acc_stderr": 0.0391170190467718, |
|
"acc_norm": 0.4539877300613497, |
|
"acc_norm_stderr": 0.0391170190467718 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5370370370370371, |
|
"acc_stderr": 0.027744313443376536, |
|
"acc_norm": 0.5370370370370371, |
|
"acc_norm_stderr": 0.027744313443376536 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5544041450777202, |
|
"acc_stderr": 0.03587014986075659, |
|
"acc_norm": 0.5544041450777202, |
|
"acc_norm_stderr": 0.03587014986075659 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.34210526315789475, |
|
"acc_stderr": 0.044629175353369376, |
|
"acc_norm": 0.34210526315789475, |
|
"acc_norm_stderr": 0.044629175353369376 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5761467889908257, |
|
"acc_stderr": 0.021187263209087523, |
|
"acc_norm": 0.5761467889908257, |
|
"acc_norm_stderr": 0.021187263209087523 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.38095238095238093, |
|
"acc_stderr": 0.043435254289490965, |
|
"acc_norm": 0.38095238095238093, |
|
"acc_norm_stderr": 0.043435254289490965 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5163398692810458, |
|
"acc_stderr": 0.028614624752805434, |
|
"acc_norm": 0.5163398692810458, |
|
"acc_norm_stderr": 0.028614624752805434 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.57, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.57, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6611570247933884, |
|
"acc_stderr": 0.04320767807536669, |
|
"acc_norm": 0.6611570247933884, |
|
"acc_norm_stderr": 0.04320767807536669 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5131578947368421, |
|
"acc_stderr": 0.04067533136309172, |
|
"acc_norm": 0.5131578947368421, |
|
"acc_norm_stderr": 0.04067533136309172 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.44281045751633985, |
|
"acc_stderr": 0.020095083154577347, |
|
"acc_norm": 0.44281045751633985, |
|
"acc_norm_stderr": 0.020095083154577347 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3404255319148936, |
|
"acc_stderr": 0.02826765748265015, |
|
"acc_norm": 0.3404255319148936, |
|
"acc_norm_stderr": 0.02826765748265015 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4107142857142857, |
|
"acc_stderr": 0.04669510663875193, |
|
"acc_norm": 0.4107142857142857, |
|
"acc_norm_stderr": 0.04669510663875193 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.36574074074074076, |
|
"acc_stderr": 0.03284738857647206, |
|
"acc_norm": 0.36574074074074076, |
|
"acc_norm_stderr": 0.03284738857647206 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2837988826815642, |
|
"acc_stderr": 0.015078358970751757, |
|
"acc_norm": 0.2837988826815642, |
|
"acc_norm_stderr": 0.015078358970751757 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.34191176470588236, |
|
"acc_stderr": 0.02881472242225418, |
|
"acc_norm": 0.34191176470588236, |
|
"acc_norm_stderr": 0.02881472242225418 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5714285714285714, |
|
"acc_stderr": 0.03168091161233882, |
|
"acc_norm": 0.5714285714285714, |
|
"acc_norm_stderr": 0.03168091161233882 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6582278481012658, |
|
"acc_stderr": 0.030874537537553617, |
|
"acc_norm": 0.6582278481012658, |
|
"acc_norm_stderr": 0.030874537537553617 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.36766623207301175, |
|
"acc_stderr": 0.012314845910071705, |
|
"acc_norm": 0.36766623207301175, |
|
"acc_norm_stderr": 0.012314845910071705 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5196078431372549, |
|
"acc_stderr": 0.03506612560524866, |
|
"acc_norm": 0.5196078431372549, |
|
"acc_norm_stderr": 0.03506612560524866 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6060606060606061, |
|
"acc_stderr": 0.038154943086889305, |
|
"acc_norm": 0.6060606060606061, |
|
"acc_norm_stderr": 0.038154943086889305 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29865361077111385, |
|
"mc1_stderr": 0.016021570613768545, |
|
"mc2": 0.46980069864120316, |
|
"mc2_stderr": 0.01587190224210581 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4911452184179457, |
|
"acc_stderr": 0.01718765819933674, |
|
"acc_norm": 0.512396694214876, |
|
"acc_norm_stderr": 0.017185069732676538 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Easy-Systems/easy-ko-Llama3-8b-Instruct-v1", |
|
"model_sha": "b557406b69518b2ffa38a9eed3963b57ae0294bf", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |