diff --git "a/evaluation/en/mmlu_0_shot.json" "b/evaluation/en/mmlu_0_shot.json" deleted file mode 100644--- "a/evaluation/en/mmlu_0_shot.json" +++ /dev/null @@ -1,3289 +0,0 @@ -{ - "results": { - "mmlu": { - "acc,none": 0.5959977211223473, - "acc_stderr,none": 0.0038660270268163492, - "alias": "mmlu" - }, - "mmlu_humanities": { - "acc,none": 0.5243358129649309, - "acc_stderr,none": 0.006614545142497863, - "alias": " - humanities" - }, - "mmlu_formal_logic": { - "alias": " - formal_logic", - "acc,none": 0.4444444444444444, - "acc_stderr,none": 0.04444444444444449 - }, - "mmlu_high_school_european_history": { - "alias": " - high_school_european_history", - "acc,none": 0.7878787878787878, - "acc_stderr,none": 0.031922715695482995 - }, - "mmlu_high_school_us_history": { - "alias": " - high_school_us_history", - "acc,none": 0.7990196078431373, - "acc_stderr,none": 0.028125972265654362 - }, - "mmlu_high_school_world_history": { - "alias": " - high_school_world_history", - "acc,none": 0.8354430379746836, - "acc_stderr,none": 0.02413573624056692 - }, - "mmlu_international_law": { - "alias": " - international_law", - "acc,none": 0.8016528925619835, - "acc_stderr,none": 0.03640118271990947 - }, - "mmlu_jurisprudence": { - "alias": " - jurisprudence", - "acc,none": 0.6666666666666666, - "acc_stderr,none": 0.04557239513497752 - }, - "mmlu_logical_fallacies": { - "alias": " - logical_fallacies", - "acc,none": 0.7852760736196319, - "acc_stderr,none": 0.032262193772867744 - }, - "mmlu_moral_disputes": { - "alias": " - moral_disputes", - "acc,none": 0.6358381502890174, - "acc_stderr,none": 0.025906632631016124 - }, - "mmlu_moral_scenarios": { - "alias": " - moral_scenarios", - "acc,none": 0.2011173184357542, - "acc_stderr,none": 0.013405946402609054 - }, - "mmlu_philosophy": { - "alias": " - philosophy", - "acc,none": 0.6109324758842444, - "acc_stderr,none": 0.027690337536485376 - }, - "mmlu_prehistory": { - "alias": " - prehistory", - "acc,none": 0.6666666666666666, - "acc_stderr,none": 0.026229649178821163 - }, - "mmlu_professional_law": { - "alias": " - professional_law", - "acc,none": 0.439374185136897, - "acc_stderr,none": 0.012676014778580219 - }, - "mmlu_world_religions": { - "alias": " - world_religions", - "acc,none": 0.8362573099415205, - "acc_stderr,none": 0.028380919596145866 - }, - "mmlu_other": { - "acc,none": 0.6829739298358545, - "acc_stderr,none": 0.008015460837332886, - "alias": " - other" - }, - "mmlu_business_ethics": { - "alias": " - business_ethics", - "acc,none": 0.67, - "acc_stderr,none": 0.04725815626252607 - }, - "mmlu_clinical_knowledge": { - "alias": " - clinical_knowledge", - "acc,none": 0.6528301886792452, - "acc_stderr,none": 0.029300101705549645 - }, - "mmlu_college_medicine": { - "alias": " - college_medicine", - "acc,none": 0.5780346820809249, - "acc_stderr,none": 0.0376574669386515 - }, - "mmlu_global_facts": { - "alias": " - global_facts", - "acc,none": 0.43, - "acc_stderr,none": 0.049756985195624284 - }, - "mmlu_human_aging": { - "alias": " - human_aging", - "acc,none": 0.6636771300448431, - "acc_stderr,none": 0.031708824268455 - }, - "mmlu_management": { - "alias": " - management", - "acc,none": 0.8058252427184466, - "acc_stderr,none": 0.03916667762822583 - }, - "mmlu_marketing": { - "alias": " - marketing", - "acc,none": 0.8632478632478633, - "acc_stderr,none": 0.022509033937077805 - }, - "mmlu_medical_genetics": { - "alias": " - medical_genetics", - "acc,none": 0.75, - "acc_stderr,none": 0.04351941398892446 - }, - "mmlu_miscellaneous": { - "alias": " - miscellaneous", - "acc,none": 0.8212005108556832, - "acc_stderr,none": 0.013702643715368976 - }, - "mmlu_nutrition": { - "alias": " - nutrition", - "acc,none": 0.6764705882352942, - "acc_stderr,none": 0.026787453111906494 - }, - "mmlu_professional_accounting": { - "alias": " - professional_accounting", - "acc,none": 0.450354609929078, - "acc_stderr,none": 0.029680105565029036 - }, - "mmlu_professional_medicine": { - "alias": " - professional_medicine", - "acc,none": 0.6323529411764706, - "acc_stderr,none": 0.029289413409403196 - }, - "mmlu_virology": { - "alias": " - virology", - "acc,none": 0.4939759036144578, - "acc_stderr,none": 0.03892212195333047 - }, - "mmlu_social_sciences": { - "acc,none": 0.6932076698082548, - "acc_stderr,none": 0.008165633016061928, - "alias": " - social sciences" - }, - "mmlu_econometrics": { - "alias": " - econometrics", - "acc,none": 0.45614035087719296, - "acc_stderr,none": 0.046854730419077895 - }, - "mmlu_high_school_geography": { - "alias": " - high_school_geography", - "acc,none": 0.7777777777777778, - "acc_stderr,none": 0.029620227874790458 - }, - "mmlu_high_school_government_and_politics": { - "alias": " - high_school_government_and_politics", - "acc,none": 0.8290155440414507, - "acc_stderr,none": 0.027171213683164542 - }, - "mmlu_high_school_macroeconomics": { - "alias": " - high_school_macroeconomics", - "acc,none": 0.6230769230769231, - "acc_stderr,none": 0.024570975364225995 - }, - "mmlu_high_school_microeconomics": { - "alias": " - high_school_microeconomics", - "acc,none": 0.6428571428571429, - "acc_stderr,none": 0.031124619309328177 - }, - "mmlu_high_school_psychology": { - "alias": " - high_school_psychology", - "acc,none": 0.8, - "acc_stderr,none": 0.017149858514250934 - }, - "mmlu_human_sexuality": { - "alias": " - human_sexuality", - "acc,none": 0.6870229007633588, - "acc_stderr,none": 0.04066962905677697 - }, - "mmlu_professional_psychology": { - "alias": " - professional_psychology", - "acc,none": 0.6143790849673203, - "acc_stderr,none": 0.019691459052354025 - }, - "mmlu_public_relations": { - "alias": " - public_relations", - "acc,none": 0.6636363636363637, - "acc_stderr,none": 0.04525393596302505 - }, - "mmlu_security_studies": { - "alias": " - security_studies", - "acc,none": 0.6857142857142857, - "acc_stderr,none": 0.029719329422417468 - }, - "mmlu_sociology": { - "alias": " - sociology", - "acc,none": 0.736318407960199, - "acc_stderr,none": 0.031157150869355558 - }, - "mmlu_us_foreign_policy": { - "alias": " - us_foreign_policy", - "acc,none": 0.8, - "acc_stderr,none": 0.040201512610368445 - }, - "mmlu_stem": { - "acc,none": 0.5223596574690771, - "acc_stderr,none": 0.00855240247531941, - "alias": " - stem" - }, - "mmlu_abstract_algebra": { - "alias": " - abstract_algebra", - "acc,none": 0.27, - "acc_stderr,none": 0.044619604333847394 - }, - "mmlu_anatomy": { - "alias": " - anatomy", - "acc,none": 0.5259259259259259, - "acc_stderr,none": 0.04313531696750575 - }, - "mmlu_astronomy": { - "alias": " - astronomy", - "acc,none": 0.7039473684210527, - "acc_stderr,none": 0.037150621549989056 - }, - "mmlu_college_biology": { - "alias": " - college_biology", - "acc,none": 0.7361111111111112, - "acc_stderr,none": 0.03685651095897532 - }, - "mmlu_college_chemistry": { - "alias": " - college_chemistry", - "acc,none": 0.45, - "acc_stderr,none": 0.049999999999999996 - }, - "mmlu_college_computer_science": { - "alias": " - college_computer_science", - "acc,none": 0.5, - "acc_stderr,none": 0.050251890762960605 - }, - "mmlu_college_mathematics": { - "alias": " - college_mathematics", - "acc,none": 0.33, - "acc_stderr,none": 0.047258156262526045 - }, - "mmlu_college_physics": { - "alias": " - college_physics", - "acc,none": 0.35294117647058826, - "acc_stderr,none": 0.04755129616062948 - }, - "mmlu_computer_security": { - "alias": " - computer_security", - "acc,none": 0.78, - "acc_stderr,none": 0.041633319989322605 - }, - "mmlu_conceptual_physics": { - "alias": " - conceptual_physics", - "acc,none": 0.5829787234042553, - "acc_stderr,none": 0.03223276266711712 - }, - "mmlu_electrical_engineering": { - "alias": " - electrical_engineering", - "acc,none": 0.5379310344827586, - "acc_stderr,none": 0.041546596717075474 - }, - "mmlu_elementary_mathematics": { - "alias": " - elementary_mathematics", - "acc,none": 0.5396825396825397, - "acc_stderr,none": 0.02567008063690932 - }, - "mmlu_high_school_biology": { - "alias": " - high_school_biology", - "acc,none": 0.7193548387096774, - "acc_stderr,none": 0.02556060472102288 - }, - "mmlu_high_school_chemistry": { - "alias": " - high_school_chemistry", - "acc,none": 0.4876847290640394, - "acc_stderr,none": 0.035169204442208966 - }, - "mmlu_high_school_computer_science": { - "alias": " - high_school_computer_science", - "acc,none": 0.59, - "acc_stderr,none": 0.049431107042371025 - }, - "mmlu_high_school_mathematics": { - "alias": " - high_school_mathematics", - "acc,none": 0.32592592592592595, - "acc_stderr,none": 0.02857834836547308 - }, - "mmlu_high_school_physics": { - "alias": " - high_school_physics", - "acc,none": 0.31788079470198677, - "acc_stderr,none": 0.03802039760107903 - }, - "mmlu_high_school_statistics": { - "alias": " - high_school_statistics", - "acc,none": 0.5231481481481481, - "acc_stderr,none": 0.03406315360711507 - }, - "mmlu_machine_learning": { - "alias": " - machine_learning", - "acc,none": 0.4017857142857143, - "acc_stderr,none": 0.04653333146973647 - } - }, - "groups": { - "mmlu": { - "acc,none": 0.5959977211223473, - "acc_stderr,none": 0.0038660270268163492, - "alias": "mmlu" - }, - "mmlu_humanities": { - "acc,none": 0.5243358129649309, - "acc_stderr,none": 0.006614545142497863, - "alias": " - humanities" - }, - "mmlu_other": { - "acc,none": 0.6829739298358545, - "acc_stderr,none": 0.008015460837332886, - "alias": " - other" - }, - "mmlu_social_sciences": { - "acc,none": 0.6932076698082548, - "acc_stderr,none": 0.008165633016061928, - "alias": " - social sciences" - }, - "mmlu_stem": { - "acc,none": 0.5223596574690771, - "acc_stderr,none": 0.00855240247531941, - "alias": " - stem" - } - }, - "group_subtasks": { - "mmlu_humanities": [ - "mmlu_moral_disputes", - "mmlu_international_law", - "mmlu_professional_law", - "mmlu_high_school_european_history", - "mmlu_world_religions", - "mmlu_logical_fallacies", - "mmlu_formal_logic", - "mmlu_high_school_world_history", - "mmlu_philosophy", - "mmlu_jurisprudence", - "mmlu_moral_scenarios", - "mmlu_high_school_us_history", - "mmlu_prehistory" - ], - "mmlu_social_sciences": [ - "mmlu_us_foreign_policy", - "mmlu_high_school_geography", - "mmlu_public_relations", - "mmlu_high_school_microeconomics", - "mmlu_high_school_psychology", - "mmlu_high_school_government_and_politics", - "mmlu_high_school_macroeconomics", - "mmlu_human_sexuality", - "mmlu_professional_psychology", - "mmlu_econometrics", - "mmlu_security_studies", - "mmlu_sociology" - ], - "mmlu_other": [ - "mmlu_virology", - "mmlu_medical_genetics", - "mmlu_professional_medicine", - "mmlu_professional_accounting", - "mmlu_global_facts", - "mmlu_nutrition", - "mmlu_business_ethics", - "mmlu_miscellaneous", - "mmlu_marketing", - "mmlu_human_aging", - "mmlu_college_medicine", - "mmlu_management", - "mmlu_clinical_knowledge" - ], - "mmlu_stem": [ - "mmlu_high_school_mathematics", - "mmlu_college_physics", - "mmlu_college_mathematics", - "mmlu_college_computer_science", - "mmlu_high_school_biology", - "mmlu_astronomy", - "mmlu_anatomy", - "mmlu_elementary_mathematics", - "mmlu_high_school_computer_science", - "mmlu_college_chemistry", - "mmlu_abstract_algebra", - "mmlu_conceptual_physics", - "mmlu_high_school_physics", - "mmlu_college_biology", - "mmlu_machine_learning", - "mmlu_electrical_engineering", - "mmlu_computer_security", - "mmlu_high_school_statistics", - "mmlu_high_school_chemistry" - ], - "mmlu": [ - "mmlu_stem", - "mmlu_other", - "mmlu_social_sciences", - "mmlu_humanities" - ] - }, - "configs": { - "mmlu_abstract_algebra": { - "task": "mmlu_abstract_algebra", - "task_alias": "abstract_algebra", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "abstract_algebra", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_anatomy": { - "task": "mmlu_anatomy", - "task_alias": "anatomy", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "anatomy", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about anatomy.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_astronomy": { - "task": "mmlu_astronomy", - "task_alias": "astronomy", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "astronomy", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about astronomy.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_business_ethics": { - "task": "mmlu_business_ethics", - "task_alias": "business_ethics", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "business_ethics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about business ethics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_clinical_knowledge": { - "task": "mmlu_clinical_knowledge", - "task_alias": "clinical_knowledge", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "clinical_knowledge", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_college_biology": { - "task": "mmlu_college_biology", - "task_alias": "college_biology", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "college_biology", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about college biology.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_college_chemistry": { - "task": "mmlu_college_chemistry", - "task_alias": "college_chemistry", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "college_chemistry", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about college chemistry.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_college_computer_science": { - "task": "mmlu_college_computer_science", - "task_alias": "college_computer_science", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "college_computer_science", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about college computer science.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_college_mathematics": { - "task": "mmlu_college_mathematics", - "task_alias": "college_mathematics", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "college_mathematics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about college mathematics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_college_medicine": { - "task": "mmlu_college_medicine", - "task_alias": "college_medicine", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "college_medicine", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about college medicine.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_college_physics": { - "task": "mmlu_college_physics", - "task_alias": "college_physics", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "college_physics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about college physics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_computer_security": { - "task": "mmlu_computer_security", - "task_alias": "computer_security", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "computer_security", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about computer security.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_conceptual_physics": { - "task": "mmlu_conceptual_physics", - "task_alias": "conceptual_physics", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "conceptual_physics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_econometrics": { - "task": "mmlu_econometrics", - "task_alias": "econometrics", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "econometrics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about econometrics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_electrical_engineering": { - "task": "mmlu_electrical_engineering", - "task_alias": "electrical_engineering", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "electrical_engineering", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_elementary_mathematics": { - "task": "mmlu_elementary_mathematics", - "task_alias": "elementary_mathematics", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "elementary_mathematics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_formal_logic": { - "task": "mmlu_formal_logic", - "task_alias": "formal_logic", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "formal_logic", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about formal logic.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_global_facts": { - "task": "mmlu_global_facts", - "task_alias": "global_facts", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "global_facts", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about global facts.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_biology": { - "task": "mmlu_high_school_biology", - "task_alias": "high_school_biology", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_biology", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school biology.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_chemistry": { - "task": "mmlu_high_school_chemistry", - "task_alias": "high_school_chemistry", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_chemistry", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_computer_science": { - "task": "mmlu_high_school_computer_science", - "task_alias": "high_school_computer_science", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_computer_science", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school computer science.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_european_history": { - "task": "mmlu_high_school_european_history", - "task_alias": "high_school_european_history", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_european_history", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school european history.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_geography": { - "task": "mmlu_high_school_geography", - "task_alias": "high_school_geography", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_geography", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school geography.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_government_and_politics": { - "task": "mmlu_high_school_government_and_politics", - "task_alias": "high_school_government_and_politics", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_government_and_politics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_macroeconomics": { - "task": "mmlu_high_school_macroeconomics", - "task_alias": "high_school_macroeconomics", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_macroeconomics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_mathematics": { - "task": "mmlu_high_school_mathematics", - "task_alias": "high_school_mathematics", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_mathematics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_microeconomics": { - "task": "mmlu_high_school_microeconomics", - "task_alias": "high_school_microeconomics", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_microeconomics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_physics": { - "task": "mmlu_high_school_physics", - "task_alias": "high_school_physics", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_physics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school physics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_psychology": { - "task": "mmlu_high_school_psychology", - "task_alias": "high_school_psychology", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_psychology", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school psychology.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_statistics": { - "task": "mmlu_high_school_statistics", - "task_alias": "high_school_statistics", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_statistics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school statistics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_us_history": { - "task": "mmlu_high_school_us_history", - "task_alias": "high_school_us_history", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_us_history", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school us history.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_high_school_world_history": { - "task": "mmlu_high_school_world_history", - "task_alias": "high_school_world_history", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "high_school_world_history", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about high school world history.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_human_aging": { - "task": "mmlu_human_aging", - "task_alias": "human_aging", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "human_aging", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about human aging.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_human_sexuality": { - "task": "mmlu_human_sexuality", - "task_alias": "human_sexuality", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "human_sexuality", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about human sexuality.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_international_law": { - "task": "mmlu_international_law", - "task_alias": "international_law", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "international_law", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about international law.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_jurisprudence": { - "task": "mmlu_jurisprudence", - "task_alias": "jurisprudence", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "jurisprudence", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_logical_fallacies": { - "task": "mmlu_logical_fallacies", - "task_alias": "logical_fallacies", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "logical_fallacies", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_machine_learning": { - "task": "mmlu_machine_learning", - "task_alias": "machine_learning", - "tag": "mmlu_stem_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "machine_learning", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about machine learning.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_management": { - "task": "mmlu_management", - "task_alias": "management", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "management", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about management.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_marketing": { - "task": "mmlu_marketing", - "task_alias": "marketing", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "marketing", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about marketing.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_medical_genetics": { - "task": "mmlu_medical_genetics", - "task_alias": "medical_genetics", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "medical_genetics", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about medical genetics.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_miscellaneous": { - "task": "mmlu_miscellaneous", - "task_alias": "miscellaneous", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "miscellaneous", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_moral_disputes": { - "task": "mmlu_moral_disputes", - "task_alias": "moral_disputes", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "moral_disputes", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about moral disputes.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_moral_scenarios": { - "task": "mmlu_moral_scenarios", - "task_alias": "moral_scenarios", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "moral_scenarios", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_nutrition": { - "task": "mmlu_nutrition", - "task_alias": "nutrition", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "nutrition", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about nutrition.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_philosophy": { - "task": "mmlu_philosophy", - "task_alias": "philosophy", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "philosophy", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about philosophy.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_prehistory": { - "task": "mmlu_prehistory", - "task_alias": "prehistory", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "prehistory", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about prehistory.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_professional_accounting": { - "task": "mmlu_professional_accounting", - "task_alias": "professional_accounting", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "professional_accounting", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about professional accounting.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_professional_law": { - "task": "mmlu_professional_law", - "task_alias": "professional_law", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "professional_law", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about professional law.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_professional_medicine": { - "task": "mmlu_professional_medicine", - "task_alias": "professional_medicine", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "professional_medicine", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about professional medicine.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_professional_psychology": { - "task": "mmlu_professional_psychology", - "task_alias": "professional_psychology", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "professional_psychology", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about professional psychology.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_public_relations": { - "task": "mmlu_public_relations", - "task_alias": "public_relations", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "public_relations", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about public relations.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_security_studies": { - "task": "mmlu_security_studies", - "task_alias": "security_studies", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "security_studies", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about security studies.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_sociology": { - "task": "mmlu_sociology", - "task_alias": "sociology", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "sociology", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about sociology.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_us_foreign_policy": { - "task": "mmlu_us_foreign_policy", - "task_alias": "us_foreign_policy", - "tag": "mmlu_social_sciences_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "us_foreign_policy", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_virology": { - "task": "mmlu_virology", - "task_alias": "virology", - "tag": "mmlu_other_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "virology", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about virology.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - }, - "mmlu_world_religions": { - "task": "mmlu_world_religions", - "task_alias": "world_religions", - "tag": "mmlu_humanities_tasks", - "dataset_path": "hails/mmlu_no_train", - "dataset_name": "world_religions", - "dataset_kwargs": { - "trust_remote_code": true - }, - "test_split": "test", - "fewshot_split": "dev", - "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", - "doc_to_target": "answer", - "doc_to_choice": [ - "A", - "B", - "C", - "D" - ], - "description": "The following are multiple choice questions (with answers) about world religions.\n\n", - "target_delimiter": " ", - "fewshot_delimiter": "\n\n", - "fewshot_config": { - "sampler": "first_n" - }, - "num_fewshot": 0, - "metric_list": [ - { - "metric": "acc", - "aggregation": "mean", - "higher_is_better": true - } - ], - "output_type": "multiple_choice", - "repeats": 1, - "should_decontaminate": false, - "metadata": { - "version": 1.0 - } - } - }, - "versions": { - "mmlu": 2, - "mmlu_abstract_algebra": 1.0, - "mmlu_anatomy": 1.0, - "mmlu_astronomy": 1.0, - "mmlu_business_ethics": 1.0, - "mmlu_clinical_knowledge": 1.0, - "mmlu_college_biology": 1.0, - "mmlu_college_chemistry": 1.0, - "mmlu_college_computer_science": 1.0, - "mmlu_college_mathematics": 1.0, - "mmlu_college_medicine": 1.0, - "mmlu_college_physics": 1.0, - "mmlu_computer_security": 1.0, - "mmlu_conceptual_physics": 1.0, - "mmlu_econometrics": 1.0, - "mmlu_electrical_engineering": 1.0, - "mmlu_elementary_mathematics": 1.0, - "mmlu_formal_logic": 1.0, - "mmlu_global_facts": 1.0, - "mmlu_high_school_biology": 1.0, - "mmlu_high_school_chemistry": 1.0, - "mmlu_high_school_computer_science": 1.0, - "mmlu_high_school_european_history": 1.0, - "mmlu_high_school_geography": 1.0, - "mmlu_high_school_government_and_politics": 1.0, - "mmlu_high_school_macroeconomics": 1.0, - "mmlu_high_school_mathematics": 1.0, - "mmlu_high_school_microeconomics": 1.0, - "mmlu_high_school_physics": 1.0, - "mmlu_high_school_psychology": 1.0, - "mmlu_high_school_statistics": 1.0, - "mmlu_high_school_us_history": 1.0, - "mmlu_high_school_world_history": 1.0, - "mmlu_human_aging": 1.0, - "mmlu_human_sexuality": 1.0, - "mmlu_humanities": 2, - "mmlu_international_law": 1.0, - "mmlu_jurisprudence": 1.0, - "mmlu_logical_fallacies": 1.0, - "mmlu_machine_learning": 1.0, - "mmlu_management": 1.0, - "mmlu_marketing": 1.0, - "mmlu_medical_genetics": 1.0, - "mmlu_miscellaneous": 1.0, - "mmlu_moral_disputes": 1.0, - "mmlu_moral_scenarios": 1.0, - "mmlu_nutrition": 1.0, - "mmlu_other": 2, - "mmlu_philosophy": 1.0, - "mmlu_prehistory": 1.0, - "mmlu_professional_accounting": 1.0, - "mmlu_professional_law": 1.0, - "mmlu_professional_medicine": 1.0, - "mmlu_professional_psychology": 1.0, - "mmlu_public_relations": 1.0, - "mmlu_security_studies": 1.0, - "mmlu_social_sciences": 2, - "mmlu_sociology": 1.0, - "mmlu_stem": 2, - "mmlu_us_foreign_policy": 1.0, - "mmlu_virology": 1.0, - "mmlu_world_religions": 1.0 - }, - "n-shot": { - "mmlu_abstract_algebra": 0, - "mmlu_anatomy": 0, - "mmlu_astronomy": 0, - "mmlu_business_ethics": 0, - "mmlu_clinical_knowledge": 0, - "mmlu_college_biology": 0, - "mmlu_college_chemistry": 0, - "mmlu_college_computer_science": 0, - "mmlu_college_mathematics": 0, - "mmlu_college_medicine": 0, - "mmlu_college_physics": 0, - "mmlu_computer_security": 0, - "mmlu_conceptual_physics": 0, - "mmlu_econometrics": 0, - "mmlu_electrical_engineering": 0, - "mmlu_elementary_mathematics": 0, - "mmlu_formal_logic": 0, - "mmlu_global_facts": 0, - "mmlu_high_school_biology": 0, - "mmlu_high_school_chemistry": 0, - "mmlu_high_school_computer_science": 0, - "mmlu_high_school_european_history": 0, - "mmlu_high_school_geography": 0, - "mmlu_high_school_government_and_politics": 0, - "mmlu_high_school_macroeconomics": 0, - "mmlu_high_school_mathematics": 0, - "mmlu_high_school_microeconomics": 0, - "mmlu_high_school_physics": 0, - "mmlu_high_school_psychology": 0, - "mmlu_high_school_statistics": 0, - "mmlu_high_school_us_history": 0, - "mmlu_high_school_world_history": 0, - "mmlu_human_aging": 0, - "mmlu_human_sexuality": 0, - "mmlu_international_law": 0, - "mmlu_jurisprudence": 0, - "mmlu_logical_fallacies": 0, - "mmlu_machine_learning": 0, - "mmlu_management": 0, - "mmlu_marketing": 0, - "mmlu_medical_genetics": 0, - "mmlu_miscellaneous": 0, - "mmlu_moral_disputes": 0, - "mmlu_moral_scenarios": 0, - "mmlu_nutrition": 0, - "mmlu_philosophy": 0, - "mmlu_prehistory": 0, - "mmlu_professional_accounting": 0, - "mmlu_professional_law": 0, - "mmlu_professional_medicine": 0, - "mmlu_professional_psychology": 0, - "mmlu_public_relations": 0, - "mmlu_security_studies": 0, - "mmlu_sociology": 0, - "mmlu_us_foreign_policy": 0, - "mmlu_virology": 0, - "mmlu_world_religions": 0 - }, - "higher_is_better": { - "mmlu": { - "acc": true - }, - "mmlu_abstract_algebra": { - "acc": true - }, - "mmlu_anatomy": { - "acc": true - }, - "mmlu_astronomy": { - "acc": true - }, - "mmlu_business_ethics": { - "acc": true - }, - "mmlu_clinical_knowledge": { - "acc": true - }, - "mmlu_college_biology": { - "acc": true - }, - "mmlu_college_chemistry": { - "acc": true - }, - "mmlu_college_computer_science": { - "acc": true - }, - "mmlu_college_mathematics": { - "acc": true - }, - "mmlu_college_medicine": { - "acc": true - }, - "mmlu_college_physics": { - "acc": true - }, - "mmlu_computer_security": { - "acc": true - }, - "mmlu_conceptual_physics": { - "acc": true - }, - "mmlu_econometrics": { - "acc": true - }, - "mmlu_electrical_engineering": { - "acc": true - }, - "mmlu_elementary_mathematics": { - "acc": true - }, - "mmlu_formal_logic": { - "acc": true - }, - "mmlu_global_facts": { - "acc": true - }, - "mmlu_high_school_biology": { - "acc": true - }, - "mmlu_high_school_chemistry": { - "acc": true - }, - "mmlu_high_school_computer_science": { - "acc": true - }, - "mmlu_high_school_european_history": { - "acc": true - }, - "mmlu_high_school_geography": { - "acc": true - }, - "mmlu_high_school_government_and_politics": { - "acc": true - }, - "mmlu_high_school_macroeconomics": { - "acc": true - }, - "mmlu_high_school_mathematics": { - "acc": true - }, - "mmlu_high_school_microeconomics": { - "acc": true - }, - "mmlu_high_school_physics": { - "acc": true - }, - "mmlu_high_school_psychology": { - "acc": true - }, - "mmlu_high_school_statistics": { - "acc": true - }, - "mmlu_high_school_us_history": { - "acc": true - }, - "mmlu_high_school_world_history": { - "acc": true - }, - "mmlu_human_aging": { - "acc": true - }, - "mmlu_human_sexuality": { - "acc": true - }, - "mmlu_humanities": { - "acc": true - }, - "mmlu_international_law": { - "acc": true - }, - "mmlu_jurisprudence": { - "acc": true - }, - "mmlu_logical_fallacies": { - "acc": true - }, - "mmlu_machine_learning": { - "acc": true - }, - "mmlu_management": { - "acc": true - }, - "mmlu_marketing": { - "acc": true - }, - "mmlu_medical_genetics": { - "acc": true - }, - "mmlu_miscellaneous": { - "acc": true - }, - "mmlu_moral_disputes": { - "acc": true - }, - "mmlu_moral_scenarios": { - "acc": true - }, - "mmlu_nutrition": { - "acc": true - }, - "mmlu_other": { - "acc": true - }, - "mmlu_philosophy": { - "acc": true - }, - "mmlu_prehistory": { - "acc": true - }, - "mmlu_professional_accounting": { - "acc": true - }, - "mmlu_professional_law": { - "acc": true - }, - "mmlu_professional_medicine": { - "acc": true - }, - "mmlu_professional_psychology": { - "acc": true - }, - "mmlu_public_relations": { - "acc": true - }, - "mmlu_security_studies": { - "acc": true - }, - "mmlu_social_sciences": { - "acc": true - }, - "mmlu_sociology": { - "acc": true - }, - "mmlu_stem": { - "acc": true - }, - "mmlu_us_foreign_policy": { - "acc": true - }, - "mmlu_virology": { - "acc": true - }, - "mmlu_world_religions": { - "acc": true - } - }, - "n-samples": { - "mmlu_high_school_mathematics": { - "original": 270, - "effective": 270 - }, - "mmlu_college_physics": { - "original": 102, - "effective": 102 - }, - "mmlu_college_mathematics": { - "original": 100, - "effective": 100 - }, - "mmlu_college_computer_science": { - "original": 100, - "effective": 100 - }, - "mmlu_high_school_biology": { - "original": 310, - "effective": 310 - }, - "mmlu_astronomy": { - "original": 152, - "effective": 152 - }, - "mmlu_anatomy": { - "original": 135, - "effective": 135 - }, - "mmlu_elementary_mathematics": { - "original": 378, - "effective": 378 - }, - "mmlu_high_school_computer_science": { - "original": 100, - "effective": 100 - }, - "mmlu_college_chemistry": { - "original": 100, - "effective": 100 - }, - "mmlu_abstract_algebra": { - "original": 100, - "effective": 100 - }, - "mmlu_conceptual_physics": { - "original": 235, - "effective": 235 - }, - "mmlu_high_school_physics": { - "original": 151, - "effective": 151 - }, - "mmlu_college_biology": { - "original": 144, - "effective": 144 - }, - "mmlu_machine_learning": { - "original": 112, - "effective": 112 - }, - "mmlu_electrical_engineering": { - "original": 145, - "effective": 145 - }, - "mmlu_computer_security": { - "original": 100, - "effective": 100 - }, - "mmlu_high_school_statistics": { - "original": 216, - "effective": 216 - }, - "mmlu_high_school_chemistry": { - "original": 203, - "effective": 203 - }, - "mmlu_virology": { - "original": 166, - "effective": 166 - }, - "mmlu_medical_genetics": { - "original": 100, - "effective": 100 - }, - "mmlu_professional_medicine": { - "original": 272, - "effective": 272 - }, - "mmlu_professional_accounting": { - "original": 282, - "effective": 282 - }, - "mmlu_global_facts": { - "original": 100, - "effective": 100 - }, - "mmlu_nutrition": { - "original": 306, - "effective": 306 - }, - "mmlu_business_ethics": { - "original": 100, - "effective": 100 - }, - "mmlu_miscellaneous": { - "original": 783, - "effective": 783 - }, - "mmlu_marketing": { - "original": 234, - "effective": 234 - }, - "mmlu_human_aging": { - "original": 223, - "effective": 223 - }, - "mmlu_college_medicine": { - "original": 173, - "effective": 173 - }, - "mmlu_management": { - "original": 103, - "effective": 103 - }, - "mmlu_clinical_knowledge": { - "original": 265, - "effective": 265 - }, - "mmlu_us_foreign_policy": { - "original": 100, - "effective": 100 - }, - "mmlu_high_school_geography": { - "original": 198, - "effective": 198 - }, - "mmlu_public_relations": { - "original": 110, - "effective": 110 - }, - "mmlu_high_school_microeconomics": { - "original": 238, - "effective": 238 - }, - "mmlu_high_school_psychology": { - "original": 545, - "effective": 545 - }, - "mmlu_high_school_government_and_politics": { - "original": 193, - "effective": 193 - }, - "mmlu_high_school_macroeconomics": { - "original": 390, - "effective": 390 - }, - "mmlu_human_sexuality": { - "original": 131, - "effective": 131 - }, - "mmlu_professional_psychology": { - "original": 612, - "effective": 612 - }, - "mmlu_econometrics": { - "original": 114, - "effective": 114 - }, - "mmlu_security_studies": { - "original": 245, - "effective": 245 - }, - "mmlu_sociology": { - "original": 201, - "effective": 201 - }, - "mmlu_moral_disputes": { - "original": 346, - "effective": 346 - }, - "mmlu_international_law": { - "original": 121, - "effective": 121 - }, - "mmlu_professional_law": { - "original": 1534, - "effective": 1534 - }, - "mmlu_high_school_european_history": { - "original": 165, - "effective": 165 - }, - "mmlu_world_religions": { - "original": 171, - "effective": 171 - }, - "mmlu_logical_fallacies": { - "original": 163, - "effective": 163 - }, - "mmlu_formal_logic": { - "original": 126, - "effective": 126 - }, - "mmlu_high_school_world_history": { - "original": 237, - "effective": 237 - }, - "mmlu_philosophy": { - "original": 311, - "effective": 311 - }, - "mmlu_jurisprudence": { - "original": 108, - "effective": 108 - }, - "mmlu_moral_scenarios": { - "original": 895, - "effective": 895 - }, - "mmlu_high_school_us_history": { - "original": 204, - "effective": 204 - }, - "mmlu_prehistory": { - "original": 324, - "effective": 324 - } - }, - "config": { - "model": "hf", - "model_args": "pretrained=/ALLaM-7B-Instruct,trust_remote_code=True,cache_dir=/tmp,parallelize=True", - "model_num_parameters": 7000559616, - "model_dtype": "torch.bfloat16", - "model_revision": "main", - "model_sha": "", - "batch_size": "auto", - "batch_sizes": [ - 64 - ], - "device": null, - "use_cache": null, - "limit": null, - "bootstrap_iters": 100000, - "gen_kwargs": null, - "random_seed": 0, - "numpy_seed": 1234, - "torch_seed": 1234, - "fewshot_seed": 1234 - }, - "git_hash": "8e1bd48d", - "date": 1735691184.506562, - "pretty_env_info": "PyTorch version: 2.4.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.3 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: version 3.27.1\nLibc version: glibc-2.35\n\nPython version: 3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0] (64-bit runtime)\nPython platform: Linux-5.15.0-1064-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.2.128\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100 80GB PCIe\nGPU 1: NVIDIA A100 80GB PCIe\n\nNvidia driver version: 535.161.08\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.4\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 48\nOn-line CPU(s) list: 0-47\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V13 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 1\nStepping: 1\nBogoMIPS: 4890.88\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core invpcid_single vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr rdpru arat umip vaes vpclmulqdq rdpid fsrm\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 1.5 MiB (48 instances)\nL1i cache: 1.5 MiB (48 instances)\nL2 cache: 24 MiB (48 instances)\nL3 cache: 192 MiB (6 instances)\nNUMA node(s): 2\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec rstack overflow: Mitigation; safe RET, no microcode\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines; STIBP disabled; RSB filling; PBRSB-eIBRS Not affected; BHI Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] onnx==1.14.0\n[pip3] pytorch-lightning==2.0.7\n[pip3] pytorch-quantization==2.1.2\n[pip3] torch==2.4.0\n[pip3] torch-tensorrt==2.0.0.dev0\n[pip3] torchaudio==2.1.0\n[pip3] torchdata==0.7.0a0\n[pip3] torchmetrics==1.2.0\n[pip3] torchvision==0.19.0\n[pip3] triton==3.0.0\n[conda] Could not collect", - "transformers_version": "4.47.1", - "upper_git_hash": null, - "tokenizer_pad_token": [ - "", - "0" - ], - "tokenizer_eos_token": [ - "", - "2" - ], - "tokenizer_bos_token": [ - "", - "1" - ], - "eot_token_id": 2, - "max_length": 4096, - "task_hashes": {}, - "model_source": "hf", - "model_name": "/ALLaM-7B-Instruct", - "model_name_sanitized": "/ALLaM-7B-Instruct", - "system_instruction": null, - "system_instruction_sha": null, - "fewshot_as_multiturn": false, - "chat_template": null, - "chat_template_sha": null, - "start_time": 37362.382822608, - "end_time": 37647.531273873, - "total_evaluation_time_seconds": "285.1484512649986" -} \ No newline at end of file