|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4462457337883959, |
|
"acc_stderr": 0.014526705548539983, |
|
"acc_norm": 0.507679180887372, |
|
"acc_norm_stderr": 0.014609667440892574 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4570802628958375, |
|
"acc_stderr": 0.004971364031062585, |
|
"acc_norm": 0.6213901613224457, |
|
"acc_norm_stderr": 0.004840493603166207 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6374269005847953, |
|
"acc_stderr": 0.0368713061556206, |
|
"acc_norm": 0.6374269005847953, |
|
"acc_norm_stderr": 0.0368713061556206 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6407766990291263, |
|
"acc_stderr": 0.04750458399041697, |
|
"acc_norm": 0.6407766990291263, |
|
"acc_norm_stderr": 0.04750458399041697 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6641123882503193, |
|
"acc_stderr": 0.01688940723517168, |
|
"acc_norm": 0.6641123882503193, |
|
"acc_norm_stderr": 0.01688940723517168 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4962962962962963, |
|
"acc_stderr": 0.043192236258113303, |
|
"acc_norm": 0.4962962962962963, |
|
"acc_norm_stderr": 0.043192236258113303 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768077, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768077 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.5106382978723404, |
|
"acc_stderr": 0.03267862331014063, |
|
"acc_norm": 0.5106382978723404, |
|
"acc_norm_stderr": 0.03267862331014063 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4939759036144578, |
|
"acc_stderr": 0.03892212195333047, |
|
"acc_norm": 0.4939759036144578, |
|
"acc_norm_stderr": 0.03892212195333047 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6270096463022508, |
|
"acc_stderr": 0.027466610213140105, |
|
"acc_norm": 0.6270096463022508, |
|
"acc_norm_stderr": 0.027466610213140105 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.6098654708520179, |
|
"acc_stderr": 0.03273766725459157, |
|
"acc_norm": 0.6098654708520179, |
|
"acc_norm_stderr": 0.03273766725459157 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6793893129770993, |
|
"acc_stderr": 0.04093329229834278, |
|
"acc_norm": 0.6793893129770993, |
|
"acc_norm_stderr": 0.04093329229834278 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7272727272727273, |
|
"acc_stderr": 0.03173071239071724, |
|
"acc_norm": 0.7272727272727273, |
|
"acc_norm_stderr": 0.03173071239071724 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5241379310344828, |
|
"acc_stderr": 0.041618085035015295, |
|
"acc_norm": 0.5241379310344828, |
|
"acc_norm_stderr": 0.041618085035015295 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.04389869956808778, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.04389869956808778 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6302521008403361, |
|
"acc_stderr": 0.03135709599613591, |
|
"acc_norm": 0.6302521008403361, |
|
"acc_norm_stderr": 0.03135709599613591 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5487179487179488, |
|
"acc_stderr": 0.02523038123893484, |
|
"acc_norm": 0.5487179487179488, |
|
"acc_norm_stderr": 0.02523038123893484 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6666666666666666, |
|
"acc_stderr": 0.04557239513497751, |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.04557239513497751 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4433497536945813, |
|
"acc_stderr": 0.03495334582162933, |
|
"acc_norm": 0.4433497536945813, |
|
"acc_norm_stderr": 0.03495334582162933 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.02786932057166462, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.02786932057166462 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.8376068376068376, |
|
"acc_stderr": 0.024161618127987745, |
|
"acc_norm": 0.8376068376068376, |
|
"acc_norm_stderr": 0.024161618127987745 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5584905660377358, |
|
"acc_stderr": 0.030561590426731837, |
|
"acc_norm": 0.5584905660377358, |
|
"acc_norm_stderr": 0.030561590426731837 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5727272727272728, |
|
"acc_stderr": 0.047381987035454834, |
|
"acc_norm": 0.5727272727272728, |
|
"acc_norm_stderr": 0.047381987035454834 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.32592592592592595, |
|
"acc_stderr": 0.028578348365473072, |
|
"acc_norm": 0.32592592592592595, |
|
"acc_norm_stderr": 0.028578348365473072 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3443708609271523, |
|
"acc_stderr": 0.038796870240733264, |
|
"acc_norm": 0.3443708609271523, |
|
"acc_norm_stderr": 0.038796870240733264 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.7611940298507462, |
|
"acc_stderr": 0.03014777593540922, |
|
"acc_norm": 0.7611940298507462, |
|
"acc_norm_stderr": 0.03014777593540922 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.47398843930635837, |
|
"acc_stderr": 0.03807301726504513, |
|
"acc_norm": 0.47398843930635837, |
|
"acc_norm_stderr": 0.03807301726504513 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.02510742548113728, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.02510742548113728 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5416666666666666, |
|
"acc_stderr": 0.04166666666666665, |
|
"acc_norm": 0.5416666666666666, |
|
"acc_norm_stderr": 0.04166666666666665 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.74, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.74, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5953757225433526, |
|
"acc_stderr": 0.026424816594009852, |
|
"acc_norm": 0.5953757225433526, |
|
"acc_norm_stderr": 0.026424816594009852 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5030674846625767, |
|
"acc_stderr": 0.03928297078179663, |
|
"acc_norm": 0.5030674846625767, |
|
"acc_norm_stderr": 0.03928297078179663 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5925925925925926, |
|
"acc_stderr": 0.027339546640662734, |
|
"acc_norm": 0.5925925925925926, |
|
"acc_norm_stderr": 0.027339546640662734 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7357512953367875, |
|
"acc_stderr": 0.03182155050916646, |
|
"acc_norm": 0.7357512953367875, |
|
"acc_norm_stderr": 0.03182155050916646 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.4298245614035088, |
|
"acc_stderr": 0.04657047260594964, |
|
"acc_norm": 0.4298245614035088, |
|
"acc_norm_stderr": 0.04657047260594964 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.689908256880734, |
|
"acc_stderr": 0.019830849684439752, |
|
"acc_norm": 0.689908256880734, |
|
"acc_norm_stderr": 0.019830849684439752 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.04426266681379909, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.04426266681379909 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5915032679738562, |
|
"acc_stderr": 0.028146405993096358, |
|
"acc_norm": 0.5915032679738562, |
|
"acc_norm_stderr": 0.028146405993096358 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7355371900826446, |
|
"acc_stderr": 0.04026187527591206, |
|
"acc_norm": 0.7355371900826446, |
|
"acc_norm_stderr": 0.04026187527591206 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5394736842105263, |
|
"acc_stderr": 0.04056242252249034, |
|
"acc_norm": 0.5394736842105263, |
|
"acc_norm_stderr": 0.04056242252249034 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5196078431372549, |
|
"acc_stderr": 0.020212274976302964, |
|
"acc_norm": 0.5196078431372549, |
|
"acc_norm_stderr": 0.020212274976302964 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3900709219858156, |
|
"acc_stderr": 0.02909767559946393, |
|
"acc_norm": 0.3900709219858156, |
|
"acc_norm_stderr": 0.02909767559946393 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.41964285714285715, |
|
"acc_stderr": 0.04684099321077106, |
|
"acc_norm": 0.41964285714285715, |
|
"acc_norm_stderr": 0.04684099321077106 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5231481481481481, |
|
"acc_stderr": 0.034063153607115065, |
|
"acc_norm": 0.5231481481481481, |
|
"acc_norm_stderr": 0.034063153607115065 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2022346368715084, |
|
"acc_stderr": 0.013433729483320994, |
|
"acc_norm": 0.2022346368715084, |
|
"acc_norm_stderr": 0.013433729483320994 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.7, |
|
"acc_stderr": 0.04605661864718381, |
|
"acc_norm": 0.7, |
|
"acc_norm_stderr": 0.04605661864718381 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5551470588235294, |
|
"acc_stderr": 0.03018753206032939, |
|
"acc_norm": 0.5551470588235294, |
|
"acc_norm_stderr": 0.03018753206032939 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.6571428571428571, |
|
"acc_stderr": 0.03038726291954772, |
|
"acc_norm": 0.6571428571428571, |
|
"acc_norm_stderr": 0.03038726291954772 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7257383966244726, |
|
"acc_stderr": 0.029041333510598025, |
|
"acc_norm": 0.7257383966244726, |
|
"acc_norm_stderr": 0.029041333510598025 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.39308996088657105, |
|
"acc_stderr": 0.012474899613873955, |
|
"acc_norm": 0.39308996088657105, |
|
"acc_norm_stderr": 0.012474899613873955 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6519607843137255, |
|
"acc_stderr": 0.03343311240488418, |
|
"acc_norm": 0.6519607843137255, |
|
"acc_norm_stderr": 0.03343311240488418 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6121212121212121, |
|
"acc_stderr": 0.03804913653971009, |
|
"acc_norm": 0.6121212121212121, |
|
"acc_norm_stderr": 0.03804913653971009 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3047735618115055, |
|
"mc1_stderr": 0.016114124156882466, |
|
"mc2": 0.46094875069234287, |
|
"mc2_stderr": 0.015327523723455975 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5619834710743802, |
|
"acc_stderr": 0.01705775370216029, |
|
"acc_norm": 0.577331759149941, |
|
"acc_norm_stderr": 0.016983506079577607 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Changgil/K2S3-SOLAR-11b-v4.0", |
|
"model_sha": "f1a90b4594dfe14349be1db44ee887856f73a82c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |