Shashata's picture
Upload folder using huggingface_hub
0997513 verified
{
"results": {
"mmlu": {
"acc,none": 0.25672981056829514,
"acc_stderr,none": 0.0036750819936103075,
"alias": "mmlu"
},
"mmlu_humanities": {
"acc,none": 0.24803400637619555,
"acc_stderr,none": 0.0062956919178925675,
"alias": " - humanities"
},
"mmlu_formal_logic": {
"alias": " - formal_logic",
"acc,none": 0.16666666666666666,
"acc_stderr,none": 0.03333333333333338
},
"mmlu_high_school_european_history": {
"alias": " - high_school_european_history",
"acc,none": 0.23636363636363636,
"acc_stderr,none": 0.03317505930009179
},
"mmlu_high_school_us_history": {
"alias": " - high_school_us_history",
"acc,none": 0.23529411764705882,
"acc_stderr,none": 0.02977177522814563
},
"mmlu_high_school_world_history": {
"alias": " - high_school_world_history",
"acc,none": 0.26582278481012656,
"acc_stderr,none": 0.028756799629658342
},
"mmlu_international_law": {
"alias": " - international_law",
"acc,none": 0.2727272727272727,
"acc_stderr,none": 0.04065578140908705
},
"mmlu_jurisprudence": {
"alias": " - jurisprudence",
"acc,none": 0.23148148148148148,
"acc_stderr,none": 0.04077494709252627
},
"mmlu_logical_fallacies": {
"alias": " - logical_fallacies",
"acc,none": 0.3128834355828221,
"acc_stderr,none": 0.03642914578292406
},
"mmlu_moral_disputes": {
"alias": " - moral_disputes",
"acc,none": 0.2398843930635838,
"acc_stderr,none": 0.022989592543123567
},
"mmlu_moral_scenarios": {
"alias": " - moral_scenarios",
"acc,none": 0.24134078212290502,
"acc_stderr,none": 0.014310999547961464
},
"mmlu_philosophy": {
"alias": " - philosophy",
"acc,none": 0.28938906752411575,
"acc_stderr,none": 0.025755865922632938
},
"mmlu_prehistory": {
"alias": " - prehistory",
"acc,none": 0.2654320987654321,
"acc_stderr,none": 0.024569223600460845
},
"mmlu_professional_law": {
"alias": " - professional_law",
"acc,none": 0.24315514993481094,
"acc_stderr,none": 0.010956556654417339
},
"mmlu_world_religions": {
"alias": " - world_religions",
"acc,none": 0.22807017543859648,
"acc_stderr,none": 0.032180937956023566
},
"mmlu_other": {
"acc,none": 0.27679433537174125,
"acc_stderr,none": 0.007995132205242535,
"alias": " - other"
},
"mmlu_business_ethics": {
"alias": " - business_ethics",
"acc,none": 0.18,
"acc_stderr,none": 0.038612291966536955
},
"mmlu_clinical_knowledge": {
"alias": " - clinical_knowledge",
"acc,none": 0.2679245283018868,
"acc_stderr,none": 0.027257260322494845
},
"mmlu_college_medicine": {
"alias": " - college_medicine",
"acc,none": 0.20809248554913296,
"acc_stderr,none": 0.030952890217749867
},
"mmlu_global_facts": {
"alias": " - global_facts",
"acc,none": 0.31,
"acc_stderr,none": 0.04648231987117316
},
"mmlu_human_aging": {
"alias": " - human_aging",
"acc,none": 0.3452914798206278,
"acc_stderr,none": 0.03191100192835795
},
"mmlu_management": {
"alias": " - management",
"acc,none": 0.1941747572815534,
"acc_stderr,none": 0.03916667762822584
},
"mmlu_marketing": {
"alias": " - marketing",
"acc,none": 0.2564102564102564,
"acc_stderr,none": 0.028605953702004243
},
"mmlu_medical_genetics": {
"alias": " - medical_genetics",
"acc,none": 0.25,
"acc_stderr,none": 0.04351941398892446
},
"mmlu_miscellaneous": {
"alias": " - miscellaneous",
"acc,none": 0.28735632183908044,
"acc_stderr,none": 0.0161824107306827
},
"mmlu_nutrition": {
"alias": " - nutrition",
"acc,none": 0.23529411764705882,
"acc_stderr,none": 0.024288619466046105
},
"mmlu_professional_accounting": {
"alias": " - professional_accounting",
"acc,none": 0.2553191489361702,
"acc_stderr,none": 0.026011992930902002
},
"mmlu_professional_medicine": {
"alias": " - professional_medicine",
"acc,none": 0.3786764705882353,
"acc_stderr,none": 0.02946513363977613
},
"mmlu_virology": {
"alias": " - virology",
"acc,none": 0.30120481927710846,
"acc_stderr,none": 0.0357160923005348
},
"mmlu_social_sciences": {
"acc,none": 0.24114397140071497,
"acc_stderr,none": 0.0077194336804775,
"alias": " - social sciences"
},
"mmlu_econometrics": {
"alias": " - econometrics",
"acc,none": 0.23684210526315788,
"acc_stderr,none": 0.03999423879281336
},
"mmlu_high_school_geography": {
"alias": " - high_school_geography",
"acc,none": 0.24242424242424243,
"acc_stderr,none": 0.030532892233932026
},
"mmlu_high_school_government_and_politics": {
"alias": " - high_school_government_and_politics",
"acc,none": 0.24870466321243523,
"acc_stderr,none": 0.031195840877700304
},
"mmlu_high_school_macroeconomics": {
"alias": " - high_school_macroeconomics",
"acc,none": 0.22564102564102564,
"acc_stderr,none": 0.02119363252514852
},
"mmlu_high_school_microeconomics": {
"alias": " - high_school_microeconomics",
"acc,none": 0.20588235294117646,
"acc_stderr,none": 0.026265024608275886
},
"mmlu_high_school_psychology": {
"alias": " - high_school_psychology",
"acc,none": 0.24036697247706423,
"acc_stderr,none": 0.01832060732096407
},
"mmlu_human_sexuality": {
"alias": " - human_sexuality",
"acc,none": 0.22137404580152673,
"acc_stderr,none": 0.0364129708131373
},
"mmlu_professional_psychology": {
"alias": " - professional_psychology",
"acc,none": 0.2565359477124183,
"acc_stderr,none": 0.017667841612379
},
"mmlu_public_relations": {
"alias": " - public_relations",
"acc,none": 0.23636363636363636,
"acc_stderr,none": 0.040693063197213775
},
"mmlu_security_studies": {
"alias": " - security_studies",
"acc,none": 0.24489795918367346,
"acc_stderr,none": 0.027529637440174917
},
"mmlu_sociology": {
"alias": " - sociology",
"acc,none": 0.23880597014925373,
"acc_stderr,none": 0.030147775935409217
},
"mmlu_us_foreign_policy": {
"alias": " - us_foreign_policy",
"acc,none": 0.31,
"acc_stderr,none": 0.04648231987117316
},
"mmlu_stem": {
"acc,none": 0.2651443070091976,
"acc_stderr,none": 0.007797770272060098,
"alias": " - stem"
},
"mmlu_abstract_algebra": {
"alias": " - abstract_algebra",
"acc,none": 0.26,
"acc_stderr,none": 0.04408440022768079
},
"mmlu_anatomy": {
"alias": " - anatomy",
"acc,none": 0.2740740740740741,
"acc_stderr,none": 0.03853254836552003
},
"mmlu_astronomy": {
"alias": " - astronomy",
"acc,none": 0.18421052631578946,
"acc_stderr,none": 0.031546980450822305
},
"mmlu_college_biology": {
"alias": " - college_biology",
"acc,none": 0.2152777777777778,
"acc_stderr,none": 0.034370793441061344
},
"mmlu_college_chemistry": {
"alias": " - college_chemistry",
"acc,none": 0.2,
"acc_stderr,none": 0.040201512610368445
},
"mmlu_college_computer_science": {
"alias": " - college_computer_science",
"acc,none": 0.25,
"acc_stderr,none": 0.04351941398892446
},
"mmlu_college_mathematics": {
"alias": " - college_mathematics",
"acc,none": 0.22,
"acc_stderr,none": 0.04163331998932269
},
"mmlu_college_physics": {
"alias": " - college_physics",
"acc,none": 0.19607843137254902,
"acc_stderr,none": 0.03950581861179963
},
"mmlu_computer_security": {
"alias": " - computer_security",
"acc,none": 0.24,
"acc_stderr,none": 0.04292346959909284
},
"mmlu_conceptual_physics": {
"alias": " - conceptual_physics",
"acc,none": 0.3021276595744681,
"acc_stderr,none": 0.030017554471880557
},
"mmlu_electrical_engineering": {
"alias": " - electrical_engineering",
"acc,none": 0.19310344827586207,
"acc_stderr,none": 0.03289445522127401
},
"mmlu_elementary_mathematics": {
"alias": " - elementary_mathematics",
"acc,none": 0.25925925925925924,
"acc_stderr,none": 0.02256989707491841
},
"mmlu_high_school_biology": {
"alias": " - high_school_biology",
"acc,none": 0.2903225806451613,
"acc_stderr,none": 0.025822106119415895
},
"mmlu_high_school_chemistry": {
"alias": " - high_school_chemistry",
"acc,none": 0.270935960591133,
"acc_stderr,none": 0.031270907132976984
},
"mmlu_high_school_computer_science": {
"alias": " - high_school_computer_science",
"acc,none": 0.25,
"acc_stderr,none": 0.04351941398892446
},
"mmlu_high_school_mathematics": {
"alias": " - high_school_mathematics",
"acc,none": 0.3,
"acc_stderr,none": 0.027940457136228412
},
"mmlu_high_school_physics": {
"alias": " - high_school_physics",
"acc,none": 0.2052980132450331,
"acc_stderr,none": 0.03297986648473835
},
"mmlu_high_school_statistics": {
"alias": " - high_school_statistics",
"acc,none": 0.46296296296296297,
"acc_stderr,none": 0.03400603625538271
},
"mmlu_machine_learning": {
"alias": " - machine_learning",
"acc,none": 0.21428571428571427,
"acc_stderr,none": 0.038946411200447915
}
},
"groups": {
"mmlu": {
"acc,none": 0.25672981056829514,
"acc_stderr,none": 0.0036750819936103075,
"alias": "mmlu"
},
"mmlu_humanities": {
"acc,none": 0.24803400637619555,
"acc_stderr,none": 0.0062956919178925675,
"alias": " - humanities"
},
"mmlu_other": {
"acc,none": 0.27679433537174125,
"acc_stderr,none": 0.007995132205242535,
"alias": " - other"
},
"mmlu_social_sciences": {
"acc,none": 0.24114397140071497,
"acc_stderr,none": 0.0077194336804775,
"alias": " - social sciences"
},
"mmlu_stem": {
"acc,none": 0.2651443070091976,
"acc_stderr,none": 0.007797770272060098,
"alias": " - stem"
}
},
"group_subtasks": {
"mmlu_humanities": [
"mmlu_international_law",
"mmlu_professional_law",
"mmlu_high_school_us_history",
"mmlu_logical_fallacies",
"mmlu_jurisprudence",
"mmlu_moral_disputes",
"mmlu_high_school_world_history",
"mmlu_prehistory",
"mmlu_moral_scenarios",
"mmlu_formal_logic",
"mmlu_high_school_european_history",
"mmlu_world_religions",
"mmlu_philosophy"
],
"mmlu_social_sciences": [
"mmlu_high_school_psychology",
"mmlu_high_school_government_and_politics",
"mmlu_human_sexuality",
"mmlu_public_relations",
"mmlu_security_studies",
"mmlu_professional_psychology",
"mmlu_high_school_geography",
"mmlu_high_school_macroeconomics",
"mmlu_sociology",
"mmlu_high_school_microeconomics",
"mmlu_us_foreign_policy",
"mmlu_econometrics"
],
"mmlu_other": [
"mmlu_business_ethics",
"mmlu_virology",
"mmlu_marketing",
"mmlu_global_facts",
"mmlu_college_medicine",
"mmlu_professional_accounting",
"mmlu_management",
"mmlu_clinical_knowledge",
"mmlu_medical_genetics",
"mmlu_miscellaneous",
"mmlu_human_aging",
"mmlu_professional_medicine",
"mmlu_nutrition"
],
"mmlu_stem": [
"mmlu_high_school_physics",
"mmlu_high_school_biology",
"mmlu_computer_security",
"mmlu_college_biology",
"mmlu_electrical_engineering",
"mmlu_high_school_computer_science",
"mmlu_conceptual_physics",
"mmlu_high_school_mathematics",
"mmlu_college_mathematics",
"mmlu_college_computer_science",
"mmlu_anatomy",
"mmlu_abstract_algebra",
"mmlu_elementary_mathematics",
"mmlu_college_physics",
"mmlu_astronomy",
"mmlu_college_chemistry",
"mmlu_machine_learning",
"mmlu_high_school_statistics",
"mmlu_high_school_chemistry"
],
"mmlu": [
"mmlu_stem",
"mmlu_other",
"mmlu_social_sciences",
"mmlu_humanities"
]
},
"configs": {
"mmlu_abstract_algebra": {
"task": "mmlu_abstract_algebra",
"task_alias": "abstract_algebra",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "abstract_algebra",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_anatomy": {
"task": "mmlu_anatomy",
"task_alias": "anatomy",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "anatomy",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about anatomy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_astronomy": {
"task": "mmlu_astronomy",
"task_alias": "astronomy",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "astronomy",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_business_ethics": {
"task": "mmlu_business_ethics",
"task_alias": "business_ethics",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "business_ethics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about business ethics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_clinical_knowledge": {
"task": "mmlu_clinical_knowledge",
"task_alias": "clinical_knowledge",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "clinical_knowledge",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_biology": {
"task": "mmlu_college_biology",
"task_alias": "college_biology",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_biology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college biology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_chemistry": {
"task": "mmlu_college_chemistry",
"task_alias": "college_chemistry",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_chemistry",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college chemistry.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_computer_science": {
"task": "mmlu_college_computer_science",
"task_alias": "college_computer_science",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_computer_science",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college computer science.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_mathematics": {
"task": "mmlu_college_mathematics",
"task_alias": "college_mathematics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_mathematics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college mathematics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_medicine": {
"task": "mmlu_college_medicine",
"task_alias": "college_medicine",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_medicine",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college medicine.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_physics": {
"task": "mmlu_college_physics",
"task_alias": "college_physics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_physics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college physics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_computer_security": {
"task": "mmlu_computer_security",
"task_alias": "computer_security",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "computer_security",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about computer security.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_conceptual_physics": {
"task": "mmlu_conceptual_physics",
"task_alias": "conceptual_physics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "conceptual_physics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_econometrics": {
"task": "mmlu_econometrics",
"task_alias": "econometrics",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "econometrics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about econometrics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_electrical_engineering": {
"task": "mmlu_electrical_engineering",
"task_alias": "electrical_engineering",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "electrical_engineering",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_elementary_mathematics": {
"task": "mmlu_elementary_mathematics",
"task_alias": "elementary_mathematics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "elementary_mathematics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_formal_logic": {
"task": "mmlu_formal_logic",
"task_alias": "formal_logic",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "formal_logic",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about formal logic.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_global_facts": {
"task": "mmlu_global_facts",
"task_alias": "global_facts",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "global_facts",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about global facts.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_biology": {
"task": "mmlu_high_school_biology",
"task_alias": "high_school_biology",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_biology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school biology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_chemistry": {
"task": "mmlu_high_school_chemistry",
"task_alias": "high_school_chemistry",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_chemistry",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_computer_science": {
"task": "mmlu_high_school_computer_science",
"task_alias": "high_school_computer_science",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_computer_science",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school computer science.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_european_history": {
"task": "mmlu_high_school_european_history",
"task_alias": "high_school_european_history",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_european_history",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school european history.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_geography": {
"task": "mmlu_high_school_geography",
"task_alias": "high_school_geography",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_geography",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school geography.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_government_and_politics": {
"task": "mmlu_high_school_government_and_politics",
"task_alias": "high_school_government_and_politics",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_government_and_politics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_macroeconomics": {
"task": "mmlu_high_school_macroeconomics",
"task_alias": "high_school_macroeconomics",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_macroeconomics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_mathematics": {
"task": "mmlu_high_school_mathematics",
"task_alias": "high_school_mathematics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_mathematics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_microeconomics": {
"task": "mmlu_high_school_microeconomics",
"task_alias": "high_school_microeconomics",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_microeconomics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_physics": {
"task": "mmlu_high_school_physics",
"task_alias": "high_school_physics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_physics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school physics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_psychology": {
"task": "mmlu_high_school_psychology",
"task_alias": "high_school_psychology",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_psychology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school psychology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_statistics": {
"task": "mmlu_high_school_statistics",
"task_alias": "high_school_statistics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_statistics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school statistics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_us_history": {
"task": "mmlu_high_school_us_history",
"task_alias": "high_school_us_history",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_us_history",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school us history.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_world_history": {
"task": "mmlu_high_school_world_history",
"task_alias": "high_school_world_history",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_world_history",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school world history.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_human_aging": {
"task": "mmlu_human_aging",
"task_alias": "human_aging",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "human_aging",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about human aging.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_human_sexuality": {
"task": "mmlu_human_sexuality",
"task_alias": "human_sexuality",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "human_sexuality",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about human sexuality.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_international_law": {
"task": "mmlu_international_law",
"task_alias": "international_law",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "international_law",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about international law.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_jurisprudence": {
"task": "mmlu_jurisprudence",
"task_alias": "jurisprudence",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "jurisprudence",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_logical_fallacies": {
"task": "mmlu_logical_fallacies",
"task_alias": "logical_fallacies",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "logical_fallacies",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_machine_learning": {
"task": "mmlu_machine_learning",
"task_alias": "machine_learning",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "machine_learning",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about machine learning.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_management": {
"task": "mmlu_management",
"task_alias": "management",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "management",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about management.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_marketing": {
"task": "mmlu_marketing",
"task_alias": "marketing",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "marketing",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about marketing.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_medical_genetics": {
"task": "mmlu_medical_genetics",
"task_alias": "medical_genetics",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "medical_genetics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about medical genetics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_miscellaneous": {
"task": "mmlu_miscellaneous",
"task_alias": "miscellaneous",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "miscellaneous",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_moral_disputes": {
"task": "mmlu_moral_disputes",
"task_alias": "moral_disputes",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "moral_disputes",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about moral disputes.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_moral_scenarios": {
"task": "mmlu_moral_scenarios",
"task_alias": "moral_scenarios",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "moral_scenarios",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_nutrition": {
"task": "mmlu_nutrition",
"task_alias": "nutrition",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "nutrition",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about nutrition.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_philosophy": {
"task": "mmlu_philosophy",
"task_alias": "philosophy",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "philosophy",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about philosophy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_prehistory": {
"task": "mmlu_prehistory",
"task_alias": "prehistory",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "prehistory",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about prehistory.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_professional_accounting": {
"task": "mmlu_professional_accounting",
"task_alias": "professional_accounting",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "professional_accounting",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about professional accounting.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_professional_law": {
"task": "mmlu_professional_law",
"task_alias": "professional_law",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "professional_law",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about professional law.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_professional_medicine": {
"task": "mmlu_professional_medicine",
"task_alias": "professional_medicine",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "professional_medicine",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about professional medicine.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_professional_psychology": {
"task": "mmlu_professional_psychology",
"task_alias": "professional_psychology",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "professional_psychology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about professional psychology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_public_relations": {
"task": "mmlu_public_relations",
"task_alias": "public_relations",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "public_relations",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about public relations.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_security_studies": {
"task": "mmlu_security_studies",
"task_alias": "security_studies",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "security_studies",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about security studies.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_sociology": {
"task": "mmlu_sociology",
"task_alias": "sociology",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "sociology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about sociology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_us_foreign_policy": {
"task": "mmlu_us_foreign_policy",
"task_alias": "us_foreign_policy",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "us_foreign_policy",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_virology": {
"task": "mmlu_virology",
"task_alias": "virology",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "virology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about virology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_world_religions": {
"task": "mmlu_world_religions",
"task_alias": "world_religions",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "world_religions",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about world religions.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 5,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
}
},
"versions": {
"mmlu": 2,
"mmlu_abstract_algebra": 1.0,
"mmlu_anatomy": 1.0,
"mmlu_astronomy": 1.0,
"mmlu_business_ethics": 1.0,
"mmlu_clinical_knowledge": 1.0,
"mmlu_college_biology": 1.0,
"mmlu_college_chemistry": 1.0,
"mmlu_college_computer_science": 1.0,
"mmlu_college_mathematics": 1.0,
"mmlu_college_medicine": 1.0,
"mmlu_college_physics": 1.0,
"mmlu_computer_security": 1.0,
"mmlu_conceptual_physics": 1.0,
"mmlu_econometrics": 1.0,
"mmlu_electrical_engineering": 1.0,
"mmlu_elementary_mathematics": 1.0,
"mmlu_formal_logic": 1.0,
"mmlu_global_facts": 1.0,
"mmlu_high_school_biology": 1.0,
"mmlu_high_school_chemistry": 1.0,
"mmlu_high_school_computer_science": 1.0,
"mmlu_high_school_european_history": 1.0,
"mmlu_high_school_geography": 1.0,
"mmlu_high_school_government_and_politics": 1.0,
"mmlu_high_school_macroeconomics": 1.0,
"mmlu_high_school_mathematics": 1.0,
"mmlu_high_school_microeconomics": 1.0,
"mmlu_high_school_physics": 1.0,
"mmlu_high_school_psychology": 1.0,
"mmlu_high_school_statistics": 1.0,
"mmlu_high_school_us_history": 1.0,
"mmlu_high_school_world_history": 1.0,
"mmlu_human_aging": 1.0,
"mmlu_human_sexuality": 1.0,
"mmlu_humanities": 2,
"mmlu_international_law": 1.0,
"mmlu_jurisprudence": 1.0,
"mmlu_logical_fallacies": 1.0,
"mmlu_machine_learning": 1.0,
"mmlu_management": 1.0,
"mmlu_marketing": 1.0,
"mmlu_medical_genetics": 1.0,
"mmlu_miscellaneous": 1.0,
"mmlu_moral_disputes": 1.0,
"mmlu_moral_scenarios": 1.0,
"mmlu_nutrition": 1.0,
"mmlu_other": 2,
"mmlu_philosophy": 1.0,
"mmlu_prehistory": 1.0,
"mmlu_professional_accounting": 1.0,
"mmlu_professional_law": 1.0,
"mmlu_professional_medicine": 1.0,
"mmlu_professional_psychology": 1.0,
"mmlu_public_relations": 1.0,
"mmlu_security_studies": 1.0,
"mmlu_social_sciences": 2,
"mmlu_sociology": 1.0,
"mmlu_stem": 2,
"mmlu_us_foreign_policy": 1.0,
"mmlu_virology": 1.0,
"mmlu_world_religions": 1.0
},
"n-shot": {
"mmlu_abstract_algebra": 5,
"mmlu_anatomy": 5,
"mmlu_astronomy": 5,
"mmlu_business_ethics": 5,
"mmlu_clinical_knowledge": 5,
"mmlu_college_biology": 5,
"mmlu_college_chemistry": 5,
"mmlu_college_computer_science": 5,
"mmlu_college_mathematics": 5,
"mmlu_college_medicine": 5,
"mmlu_college_physics": 5,
"mmlu_computer_security": 5,
"mmlu_conceptual_physics": 5,
"mmlu_econometrics": 5,
"mmlu_electrical_engineering": 5,
"mmlu_elementary_mathematics": 5,
"mmlu_formal_logic": 5,
"mmlu_global_facts": 5,
"mmlu_high_school_biology": 5,
"mmlu_high_school_chemistry": 5,
"mmlu_high_school_computer_science": 5,
"mmlu_high_school_european_history": 5,
"mmlu_high_school_geography": 5,
"mmlu_high_school_government_and_politics": 5,
"mmlu_high_school_macroeconomics": 5,
"mmlu_high_school_mathematics": 5,
"mmlu_high_school_microeconomics": 5,
"mmlu_high_school_physics": 5,
"mmlu_high_school_psychology": 5,
"mmlu_high_school_statistics": 5,
"mmlu_high_school_us_history": 5,
"mmlu_high_school_world_history": 5,
"mmlu_human_aging": 5,
"mmlu_human_sexuality": 5,
"mmlu_international_law": 5,
"mmlu_jurisprudence": 5,
"mmlu_logical_fallacies": 5,
"mmlu_machine_learning": 5,
"mmlu_management": 5,
"mmlu_marketing": 5,
"mmlu_medical_genetics": 5,
"mmlu_miscellaneous": 5,
"mmlu_moral_disputes": 5,
"mmlu_moral_scenarios": 5,
"mmlu_nutrition": 5,
"mmlu_philosophy": 5,
"mmlu_prehistory": 5,
"mmlu_professional_accounting": 5,
"mmlu_professional_law": 5,
"mmlu_professional_medicine": 5,
"mmlu_professional_psychology": 5,
"mmlu_public_relations": 5,
"mmlu_security_studies": 5,
"mmlu_sociology": 5,
"mmlu_us_foreign_policy": 5,
"mmlu_virology": 5,
"mmlu_world_religions": 5
},
"higher_is_better": {
"mmlu": {
"acc": true
},
"mmlu_abstract_algebra": {
"acc": true
},
"mmlu_anatomy": {
"acc": true
},
"mmlu_astronomy": {
"acc": true
},
"mmlu_business_ethics": {
"acc": true
},
"mmlu_clinical_knowledge": {
"acc": true
},
"mmlu_college_biology": {
"acc": true
},
"mmlu_college_chemistry": {
"acc": true
},
"mmlu_college_computer_science": {
"acc": true
},
"mmlu_college_mathematics": {
"acc": true
},
"mmlu_college_medicine": {
"acc": true
},
"mmlu_college_physics": {
"acc": true
},
"mmlu_computer_security": {
"acc": true
},
"mmlu_conceptual_physics": {
"acc": true
},
"mmlu_econometrics": {
"acc": true
},
"mmlu_electrical_engineering": {
"acc": true
},
"mmlu_elementary_mathematics": {
"acc": true
},
"mmlu_formal_logic": {
"acc": true
},
"mmlu_global_facts": {
"acc": true
},
"mmlu_high_school_biology": {
"acc": true
},
"mmlu_high_school_chemistry": {
"acc": true
},
"mmlu_high_school_computer_science": {
"acc": true
},
"mmlu_high_school_european_history": {
"acc": true
},
"mmlu_high_school_geography": {
"acc": true
},
"mmlu_high_school_government_and_politics": {
"acc": true
},
"mmlu_high_school_macroeconomics": {
"acc": true
},
"mmlu_high_school_mathematics": {
"acc": true
},
"mmlu_high_school_microeconomics": {
"acc": true
},
"mmlu_high_school_physics": {
"acc": true
},
"mmlu_high_school_psychology": {
"acc": true
},
"mmlu_high_school_statistics": {
"acc": true
},
"mmlu_high_school_us_history": {
"acc": true
},
"mmlu_high_school_world_history": {
"acc": true
},
"mmlu_human_aging": {
"acc": true
},
"mmlu_human_sexuality": {
"acc": true
},
"mmlu_humanities": {
"acc": true
},
"mmlu_international_law": {
"acc": true
},
"mmlu_jurisprudence": {
"acc": true
},
"mmlu_logical_fallacies": {
"acc": true
},
"mmlu_machine_learning": {
"acc": true
},
"mmlu_management": {
"acc": true
},
"mmlu_marketing": {
"acc": true
},
"mmlu_medical_genetics": {
"acc": true
},
"mmlu_miscellaneous": {
"acc": true
},
"mmlu_moral_disputes": {
"acc": true
},
"mmlu_moral_scenarios": {
"acc": true
},
"mmlu_nutrition": {
"acc": true
},
"mmlu_other": {
"acc": true
},
"mmlu_philosophy": {
"acc": true
},
"mmlu_prehistory": {
"acc": true
},
"mmlu_professional_accounting": {
"acc": true
},
"mmlu_professional_law": {
"acc": true
},
"mmlu_professional_medicine": {
"acc": true
},
"mmlu_professional_psychology": {
"acc": true
},
"mmlu_public_relations": {
"acc": true
},
"mmlu_security_studies": {
"acc": true
},
"mmlu_social_sciences": {
"acc": true
},
"mmlu_sociology": {
"acc": true
},
"mmlu_stem": {
"acc": true
},
"mmlu_us_foreign_policy": {
"acc": true
},
"mmlu_virology": {
"acc": true
},
"mmlu_world_religions": {
"acc": true
}
},
"n-samples": {
"mmlu_high_school_physics": {
"original": 151,
"effective": 151
},
"mmlu_high_school_biology": {
"original": 310,
"effective": 310
},
"mmlu_computer_security": {
"original": 100,
"effective": 100
},
"mmlu_college_biology": {
"original": 144,
"effective": 144
},
"mmlu_electrical_engineering": {
"original": 145,
"effective": 145
},
"mmlu_high_school_computer_science": {
"original": 100,
"effective": 100
},
"mmlu_conceptual_physics": {
"original": 235,
"effective": 235
},
"mmlu_high_school_mathematics": {
"original": 270,
"effective": 270
},
"mmlu_college_mathematics": {
"original": 100,
"effective": 100
},
"mmlu_college_computer_science": {
"original": 100,
"effective": 100
},
"mmlu_anatomy": {
"original": 135,
"effective": 135
},
"mmlu_abstract_algebra": {
"original": 100,
"effective": 100
},
"mmlu_elementary_mathematics": {
"original": 378,
"effective": 378
},
"mmlu_college_physics": {
"original": 102,
"effective": 102
},
"mmlu_astronomy": {
"original": 152,
"effective": 152
},
"mmlu_college_chemistry": {
"original": 100,
"effective": 100
},
"mmlu_machine_learning": {
"original": 112,
"effective": 112
},
"mmlu_high_school_statistics": {
"original": 216,
"effective": 216
},
"mmlu_high_school_chemistry": {
"original": 203,
"effective": 203
},
"mmlu_business_ethics": {
"original": 100,
"effective": 100
},
"mmlu_virology": {
"original": 166,
"effective": 166
},
"mmlu_marketing": {
"original": 234,
"effective": 234
},
"mmlu_global_facts": {
"original": 100,
"effective": 100
},
"mmlu_college_medicine": {
"original": 173,
"effective": 173
},
"mmlu_professional_accounting": {
"original": 282,
"effective": 282
},
"mmlu_management": {
"original": 103,
"effective": 103
},
"mmlu_clinical_knowledge": {
"original": 265,
"effective": 265
},
"mmlu_medical_genetics": {
"original": 100,
"effective": 100
},
"mmlu_miscellaneous": {
"original": 783,
"effective": 783
},
"mmlu_human_aging": {
"original": 223,
"effective": 223
},
"mmlu_professional_medicine": {
"original": 272,
"effective": 272
},
"mmlu_nutrition": {
"original": 306,
"effective": 306
},
"mmlu_high_school_psychology": {
"original": 545,
"effective": 545
},
"mmlu_high_school_government_and_politics": {
"original": 193,
"effective": 193
},
"mmlu_human_sexuality": {
"original": 131,
"effective": 131
},
"mmlu_public_relations": {
"original": 110,
"effective": 110
},
"mmlu_security_studies": {
"original": 245,
"effective": 245
},
"mmlu_professional_psychology": {
"original": 612,
"effective": 612
},
"mmlu_high_school_geography": {
"original": 198,
"effective": 198
},
"mmlu_high_school_macroeconomics": {
"original": 390,
"effective": 390
},
"mmlu_sociology": {
"original": 201,
"effective": 201
},
"mmlu_high_school_microeconomics": {
"original": 238,
"effective": 238
},
"mmlu_us_foreign_policy": {
"original": 100,
"effective": 100
},
"mmlu_econometrics": {
"original": 114,
"effective": 114
},
"mmlu_international_law": {
"original": 121,
"effective": 121
},
"mmlu_professional_law": {
"original": 1534,
"effective": 1534
},
"mmlu_high_school_us_history": {
"original": 204,
"effective": 204
},
"mmlu_logical_fallacies": {
"original": 163,
"effective": 163
},
"mmlu_jurisprudence": {
"original": 108,
"effective": 108
},
"mmlu_moral_disputes": {
"original": 346,
"effective": 346
},
"mmlu_high_school_world_history": {
"original": 237,
"effective": 237
},
"mmlu_prehistory": {
"original": 324,
"effective": 324
},
"mmlu_moral_scenarios": {
"original": 895,
"effective": 895
},
"mmlu_formal_logic": {
"original": 126,
"effective": 126
},
"mmlu_high_school_european_history": {
"original": 165,
"effective": 165
},
"mmlu_world_religions": {
"original": 171,
"effective": 171
},
"mmlu_philosophy": {
"original": 311,
"effective": 311
}
},
"config": {
"model": "sparseml",
"model_args": "pretrained=/nm/drive0/shashata/quantized_models/SmolLM-360M-Instruct-quantized.w4a16,dtype=bfloat16,max_legth=2048,add_bos_token=True,parallelize=True",
"model_num_parameters": 371651520,
"model_dtype": "torch.bfloat16",
"model_revision": "main",
"model_sha": "",
"batch_size": "32",
"batch_sizes": [],
"device": null,
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null,
"random_seed": 0,
"numpy_seed": 1234,
"torch_seed": 1234,
"fewshot_seed": 1234
},
"git_hash": "4e55a1dd",
"date": 1724299538.4517457,
"pretty_env_info": "PyTorch version: 2.4.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.3 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: version 3.29.3\nLibc version: glibc-2.35\n\nPython version: 3.11.9 | packaged by conda-forge | (main, Apr 19 2024, 18:36:13) [GCC 12.3.0] (64-bit runtime)\nPython platform: Linux-5.15.0-91-generic-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.3.103\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100-SXM4-80GB\nGPU 1: NVIDIA A100-SXM4-80GB\nGPU 2: NVIDIA A100-SXM4-80GB\nGPU 3: NVIDIA A100-SXM4-80GB\nGPU 4: NVIDIA A100-SXM4-80GB\nGPU 5: NVIDIA A100-SXM4-80GB\nGPU 6: NVIDIA A100-SXM4-80GB\nGPU 7: NVIDIA A100-SXM4-80GB\n\nNvidia driver version: 545.23.08\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 256\nOn-line CPU(s) list: 0-255\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7763 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 2\nCore(s) per socket: 64\nSocket(s): 2\nStepping: 1\nFrequency boost: enabled\nCPU max MHz: 3529.0520\nCPU min MHz: 1500.0000\nBogoMIPS: 4900.20\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 invpcid_single hw_pstate ssbd mba ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid cqm rdt_a rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold v_vmsave_vmload vgif v_spec_ctrl umip pku ospke vaes vpclmulqdq rdpid overflow_recov succor smca fsrm\nVirtualization: AMD-V\nL1d cache: 4 MiB (128 instances)\nL1i cache: 4 MiB (128 instances)\nL2 cache: 64 MiB (128 instances)\nL3 cache: 512 MiB (16 instances)\nNUMA node(s): 2\nNUMA node0 CPU(s): 0-63,128-191\nNUMA node1 CPU(s): 64-127,192-255\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec rstack overflow: Mitigation; safe RET\nVulnerability Spec store bypass: Mitigation; Speculative Store Bypass disabled via prctl and seccomp\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines, IBPB conditional, IBRS_FW, STIBP always-on, RSB filling, PBRSB-eIBRS Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] onnx==1.14.1\n[pip3] onnxruntime==1.18.1\n[pip3] torch==2.4.0\n[pip3] triton==3.0.0\n[conda] Could not collect",
"transformers_version": "4.43.4",
"upper_git_hash": null,
"tokenizer_pad_token": [
"<|im_end|>",
"2"
],
"tokenizer_eos_token": [
"<|im_end|>",
"2"
],
"tokenizer_bos_token": [
"<|im_start|>",
"1"
],
"eot_token_id": 2,
"max_length": 2048,
"task_hashes": {},
"model_source": "sparseml",
"model_name": "/nm/drive0/shashata/quantized_models/SmolLM-360M-Instruct-quantized.w4a16",
"model_name_sanitized": "__nm__drive0__shashata__quantized_models__SmolLM-360M-Instruct-quantized.w4a16",
"system_instruction": null,
"system_instruction_sha": null,
"fewshot_as_multiturn": false,
"chat_template": null,
"chat_template_sha": null,
"start_time": 1870050.307212856,
"end_time": 1870733.131912883,
"total_evaluation_time_seconds": "682.8247000270057"
}