|
{ |
|
"config_general": { |
|
"lighteval_sha": "1.4", |
|
"num_few_shot_default": null, |
|
"num_fewshot_seeds": null, |
|
"override_batch_size": null, |
|
"max_samples": null, |
|
"job_id": -1, |
|
"start_time": null, |
|
"end_time": "2024-05-14-20-19-29", |
|
"total_evaluation_time_secondes": "", |
|
"model_name": "cstr/Spaetzle-v60-7b-int4-inc", |
|
"model_sha": "", |
|
"model_dtype": "4bit", |
|
"model_size": 4.16, |
|
"model_params": 7.04, |
|
"quant_type": "AutoRound", |
|
"precision": "4bit" |
|
}, |
|
"results": { |
|
"harness|hellaswag|0": { |
|
"acc,none": 0.6643098984266083, |
|
"acc_stderr,none": 0.004712660409846858, |
|
"acc_norm,none": 0.8451503684524995, |
|
"acc_norm_stderr,none": 0.0036102194130613477, |
|
"alias": "hellaswag" |
|
}, |
|
"harness|lambada:openai|0": { |
|
"perplexity,none": 3.439016746061463, |
|
"perplexity_stderr,none": 0.07651117989296828, |
|
"acc,none": 0.7058024451775665, |
|
"acc_stderr,none": 0.00634853007817727, |
|
"alias": "lambada_openai" |
|
}, |
|
"harness|truthfulqa:mc2|0": { |
|
"acc,none": 0.6556633657896446, |
|
"acc_stderr,none": 0.015137766174633594, |
|
"alias": "truthfulqa_mc2" |
|
}, |
|
"harness|boolq|0": { |
|
"acc,none": 0.8733944954128441, |
|
"acc_stderr,none": 0.005815995464335388, |
|
"alias": "boolq" |
|
}, |
|
"harness|truthfulqa:mc1|0": { |
|
"acc,none": 0.5018359853121175, |
|
"acc_stderr,none": 0.01750338304687704, |
|
"alias": "truthfulqa_mc1" |
|
}, |
|
"harness|arc:easy|0": { |
|
"acc,none": 0.8526936026936027, |
|
"acc_stderr,none": 0.007272362176697239, |
|
"acc_norm,none": 0.8198653198653199, |
|
"acc_norm_stderr,none": 0.007885661261794777, |
|
"alias": "arc_easy" |
|
}, |
|
"harness|winogrande|0": { |
|
"acc,none": 0.7750591949486977, |
|
"acc_stderr,none": 0.011735043564126735, |
|
"alias": "winogrande" |
|
}, |
|
"harness|openbookqa|0": { |
|
"acc,none": 0.37, |
|
"acc_stderr,none": 0.02161328916516578, |
|
"acc_norm,none": 0.48, |
|
"acc_norm_stderr,none": 0.02236516042423134, |
|
"alias": "openbookqa" |
|
}, |
|
"harness|piqa|0": { |
|
"acc,none": 0.8226332970620239, |
|
"acc_stderr,none": 0.00891219356474512, |
|
"acc_norm,none": 0.8302502720348205, |
|
"acc_norm_stderr,none": 0.008758999037429159, |
|
"alias": "piqa" |
|
}, |
|
"harness|mmlu|0": { |
|
"acc,none": 0.6139438826378009, |
|
"acc_stderr,none": 0.003851602203891664, |
|
"alias": "mmlu" |
|
}, |
|
"harness|mmlu_humanities|0": { |
|
"alias": " - humanities", |
|
"acc,none": 0.5602550478214665, |
|
"acc_stderr,none": 0.006683438465825116 |
|
}, |
|
"harness|mmlu_formal_logic|0": { |
|
"alias": " - formal_logic", |
|
"acc,none": 0.4444444444444444, |
|
"acc_stderr,none": 0.04444444444444449 |
|
}, |
|
"harness|mmlu_high_school_european_history|0": { |
|
"alias": " - high_school_european_history", |
|
"acc,none": 0.7636363636363637, |
|
"acc_stderr,none": 0.033175059300091805 |
|
}, |
|
"harness|mmlu_high_school_us_history|0": { |
|
"alias": " - high_school_us_history", |
|
"acc,none": 0.8284313725490197, |
|
"acc_stderr,none": 0.02646056956124065 |
|
}, |
|
"harness|mmlu_high_school_world_history|0": { |
|
"alias": " - high_school_world_history", |
|
"acc,none": 0.8227848101265823, |
|
"acc_stderr,none": 0.024856364184503234 |
|
}, |
|
"harness|mmlu_international_law|0": { |
|
"alias": " - international_law", |
|
"acc,none": 0.7851239669421488, |
|
"acc_stderr,none": 0.037494924487096966 |
|
}, |
|
"harness|mmlu_jurisprudence|0": { |
|
"alias": " - jurisprudence", |
|
"acc,none": 0.7685185185185185, |
|
"acc_stderr,none": 0.04077494709252627 |
|
}, |
|
"harness|mmlu_logical_fallacies|0": { |
|
"alias": " - logical_fallacies", |
|
"acc,none": 0.7361963190184049, |
|
"acc_stderr,none": 0.034624199316156234 |
|
}, |
|
"harness|mmlu_moral_disputes|0": { |
|
"alias": " - moral_disputes", |
|
"acc,none": 0.6994219653179191, |
|
"acc_stderr,none": 0.024685316867257796 |
|
}, |
|
"harness|mmlu_moral_scenarios|0": { |
|
"alias": " - moral_scenarios", |
|
"acc,none": 0.2871508379888268, |
|
"acc_stderr,none": 0.015131608849963766 |
|
}, |
|
"harness|mmlu_philosophy|0": { |
|
"alias": " - philosophy", |
|
"acc,none": 0.7041800643086816, |
|
"acc_stderr,none": 0.02592237178881879 |
|
}, |
|
"harness|mmlu_prehistory|0": { |
|
"alias": " - prehistory", |
|
"acc,none": 0.7376543209876543, |
|
"acc_stderr,none": 0.024477222856135114 |
|
}, |
|
"harness|mmlu_professional_law|0": { |
|
"alias": " - professional_law", |
|
"acc,none": 0.45436766623207303, |
|
"acc_stderr,none": 0.012716941720734808 |
|
}, |
|
"harness|mmlu_world_religions|0": { |
|
"alias": " - world_religions", |
|
"acc,none": 0.8070175438596491, |
|
"acc_stderr,none": 0.030267457554898465 |
|
}, |
|
"harness|mmlu_other|0": { |
|
"alias": " - other", |
|
"acc,none": 0.6942388155777277, |
|
"acc_stderr,none": 0.007979014705714821 |
|
}, |
|
"harness|mmlu_business_ethics|0": { |
|
"alias": " - business_ethics", |
|
"acc,none": 0.61, |
|
"acc_stderr,none": 0.04902071300001974 |
|
}, |
|
"harness|mmlu_clinical_knowledge|0": { |
|
"alias": " - clinical_knowledge", |
|
"acc,none": 0.6943396226415094, |
|
"acc_stderr,none": 0.028353298073322663 |
|
}, |
|
"harness|mmlu_college_medicine|0": { |
|
"alias": " - college_medicine", |
|
"acc,none": 0.6242774566473989, |
|
"acc_stderr,none": 0.036928207672648664 |
|
}, |
|
"harness|mmlu_global_facts|0": { |
|
"alias": " - global_facts", |
|
"acc,none": 0.4, |
|
"acc_stderr,none": 0.04923659639173309 |
|
}, |
|
"harness|mmlu_human_aging|0": { |
|
"alias": " - human_aging", |
|
"acc,none": 0.7040358744394619, |
|
"acc_stderr,none": 0.030636591348699796 |
|
}, |
|
"harness|mmlu_management|0": { |
|
"alias": " - management", |
|
"acc,none": 0.8058252427184466, |
|
"acc_stderr,none": 0.03916667762822584 |
|
}, |
|
"harness|mmlu_marketing|0": { |
|
"alias": " - marketing", |
|
"acc,none": 0.8547008547008547, |
|
"acc_stderr,none": 0.02308663508684141 |
|
}, |
|
"harness|mmlu_medical_genetics|0": { |
|
"alias": " - medical_genetics", |
|
"acc,none": 0.72, |
|
"acc_stderr,none": 0.045126085985421276 |
|
}, |
|
"harness|mmlu_miscellaneous|0": { |
|
"alias": " - miscellaneous", |
|
"acc,none": 0.8160919540229885, |
|
"acc_stderr,none": 0.01385372417092253 |
|
}, |
|
"harness|mmlu_nutrition|0": { |
|
"alias": " - nutrition", |
|
"acc,none": 0.6862745098039216, |
|
"acc_stderr,none": 0.026568921015457155 |
|
}, |
|
"harness|mmlu_professional_accounting|0": { |
|
"alias": " - professional_accounting", |
|
"acc,none": 0.4929078014184397, |
|
"acc_stderr,none": 0.02982449855912901 |
|
}, |
|
"harness|mmlu_professional_medicine|0": { |
|
"alias": " - professional_medicine", |
|
"acc,none": 0.6691176470588235, |
|
"acc_stderr,none": 0.028582709753898435 |
|
}, |
|
"harness|mmlu_virology|0": { |
|
"alias": " - virology", |
|
"acc,none": 0.4939759036144578, |
|
"acc_stderr,none": 0.03892212195333045 |
|
}, |
|
"harness|mmlu_social_sciences|0": { |
|
"alias": " - social_sciences", |
|
"acc,none": 0.7179070523236919, |
|
"acc_stderr,none": 0.00793343808995908 |
|
}, |
|
"harness|mmlu_econometrics|0": { |
|
"alias": " - econometrics", |
|
"acc,none": 0.49122807017543857, |
|
"acc_stderr,none": 0.04702880432049615 |
|
}, |
|
"harness|mmlu_high_school_geography|0": { |
|
"alias": " - high_school_geography", |
|
"acc,none": 0.7525252525252525, |
|
"acc_stderr,none": 0.0307463007421245 |
|
}, |
|
"harness|mmlu_high_school_government_and_politics|0": { |
|
"alias": " - high_school_government_and_politics", |
|
"acc,none": 0.8756476683937824, |
|
"acc_stderr,none": 0.023814477086593577 |
|
}, |
|
"harness|mmlu_high_school_macroeconomics|0": { |
|
"alias": " - high_school_macroeconomics", |
|
"acc,none": 0.6205128205128205, |
|
"acc_stderr,none": 0.024603626924097417 |
|
}, |
|
"harness|mmlu_high_school_microeconomics|0": { |
|
"alias": " - high_school_microeconomics", |
|
"acc,none": 0.6386554621848739, |
|
"acc_stderr,none": 0.031204691225150023 |
|
}, |
|
"harness|mmlu_high_school_psychology|0": { |
|
"alias": " - high_school_psychology", |
|
"acc,none": 0.8091743119266055, |
|
"acc_stderr,none": 0.016847676400091112 |
|
}, |
|
"harness|mmlu_human_sexuality|0": { |
|
"alias": " - human_sexuality", |
|
"acc,none": 0.7786259541984732, |
|
"acc_stderr,none": 0.03641297081313729 |
|
}, |
|
"harness|mmlu_professional_psychology|0": { |
|
"alias": " - professional_psychology", |
|
"acc,none": 0.6454248366013072, |
|
"acc_stderr,none": 0.01935336054755369 |
|
}, |
|
"harness|mmlu_public_relations|0": { |
|
"alias": " - public_relations", |
|
"acc,none": 0.6818181818181818, |
|
"acc_stderr,none": 0.04461272175910508 |
|
}, |
|
"harness|mmlu_security_studies|0": { |
|
"alias": " - security_studies", |
|
"acc,none": 0.7061224489795919, |
|
"acc_stderr,none": 0.02916273841024977 |
|
}, |
|
"harness|mmlu_sociology|0": { |
|
"alias": " - sociology", |
|
"acc,none": 0.8407960199004975, |
|
"acc_stderr,none": 0.025870646766169136 |
|
}, |
|
"harness|mmlu_us_foreign_policy|0": { |
|
"alias": " - us_foreign_policy", |
|
"acc,none": 0.86, |
|
"acc_stderr,none": 0.03487350880197771 |
|
}, |
|
"harness|mmlu_stem|0": { |
|
"alias": " - stem", |
|
"acc,none": 0.5134792261338408, |
|
"acc_stderr,none": 0.008544368645395404 |
|
}, |
|
"harness|mmlu_abstract_algebra|0": { |
|
"alias": " - abstract_algebra", |
|
"acc,none": 0.29, |
|
"acc_stderr,none": 0.045604802157206824 |
|
}, |
|
"harness|mmlu_anatomy|0": { |
|
"alias": " - anatomy", |
|
"acc,none": 0.562962962962963, |
|
"acc_stderr,none": 0.042849586397534 |
|
}, |
|
"harness|mmlu_astronomy|0": { |
|
"alias": " - astronomy", |
|
"acc,none": 0.6776315789473685, |
|
"acc_stderr,none": 0.038035102483515854 |
|
}, |
|
"harness|mmlu_college_biology|0": { |
|
"alias": " - college_biology", |
|
"acc,none": 0.7291666666666666, |
|
"acc_stderr,none": 0.03716177437566017 |
|
}, |
|
"harness|mmlu_college_chemistry|0": { |
|
"alias": " - college_chemistry", |
|
"acc,none": 0.43, |
|
"acc_stderr,none": 0.049756985195624284 |
|
}, |
|
"harness|mmlu_college_computer_science|0": { |
|
"alias": " - college_computer_science", |
|
"acc,none": 0.56, |
|
"acc_stderr,none": 0.04988876515698589 |
|
}, |
|
"harness|mmlu_college_mathematics|0": { |
|
"alias": " - college_mathematics", |
|
"acc,none": 0.29, |
|
"acc_stderr,none": 0.04560480215720684 |
|
}, |
|
"harness|mmlu_college_physics|0": { |
|
"alias": " - college_physics", |
|
"acc,none": 0.4215686274509804, |
|
"acc_stderr,none": 0.049135952012744975 |
|
}, |
|
"harness|mmlu_computer_security|0": { |
|
"alias": " - computer_security", |
|
"acc,none": 0.73, |
|
"acc_stderr,none": 0.044619604333847394 |
|
}, |
|
"harness|mmlu_conceptual_physics|0": { |
|
"alias": " - conceptual_physics", |
|
"acc,none": 0.5531914893617021, |
|
"acc_stderr,none": 0.0325005368436584 |
|
}, |
|
"harness|mmlu_electrical_engineering|0": { |
|
"alias": " - electrical_engineering", |
|
"acc,none": 0.5241379310344828, |
|
"acc_stderr,none": 0.0416180850350153 |
|
}, |
|
"harness|mmlu_elementary_mathematics|0": { |
|
"alias": " - elementary_mathematics", |
|
"acc,none": 0.4126984126984127, |
|
"acc_stderr,none": 0.025355741263055263 |
|
}, |
|
"harness|mmlu_high_school_biology|0": { |
|
"alias": " - high_school_biology", |
|
"acc,none": 0.7548387096774194, |
|
"acc_stderr,none": 0.02447224384089554 |
|
}, |
|
"harness|mmlu_high_school_chemistry|0": { |
|
"alias": " - high_school_chemistry", |
|
"acc,none": 0.46798029556650245, |
|
"acc_stderr,none": 0.03510766597959214 |
|
}, |
|
"harness|mmlu_high_school_computer_science|0": { |
|
"alias": " - high_school_computer_science", |
|
"acc,none": 0.68, |
|
"acc_stderr,none": 0.04688261722621505 |
|
}, |
|
"harness|mmlu_high_school_mathematics|0": { |
|
"alias": " - high_school_mathematics", |
|
"acc,none": 0.34444444444444444, |
|
"acc_stderr,none": 0.02897264888484427 |
|
}, |
|
"harness|mmlu_high_school_physics|0": { |
|
"alias": " - high_school_physics", |
|
"acc,none": 0.33112582781456956, |
|
"acc_stderr,none": 0.038425817186598696 |
|
}, |
|
"harness|mmlu_high_school_statistics|0": { |
|
"alias": " - high_school_statistics", |
|
"acc,none": 0.4675925925925926, |
|
"acc_stderr,none": 0.03402801581358966 |
|
}, |
|
"harness|mmlu_machine_learning|0": { |
|
"alias": " - machine_learning", |
|
"acc,none": 0.5267857142857143, |
|
"acc_stderr,none": 0.047389751192741546 |
|
}, |
|
"harness|arc:challenge|0": { |
|
"acc,none": 0.621160409556314, |
|
"acc_stderr,none": 0.014175915490000328, |
|
"acc_norm,none": 0.6390784982935154, |
|
"acc_norm_stderr,none": 0.01403476138617546, |
|
"alias": "arc_challenge" |
|
} |
|
}, |
|
"task_info": { |
|
"model": "cstr/Spaetzle-v60-7b-int4-inc", |
|
"revision": "main", |
|
"private": false, |
|
"params": 4.16, |
|
"architectures": "MistralForCausalLM", |
|
"quant_type": "AutoRound", |
|
"precision": "4bit", |
|
"model_params": 7.04, |
|
"model_size": 4.16, |
|
"weight_dtype": "int4", |
|
"compute_dtype": "float16", |
|
"gguf_ftype": "*Q4_0.gguf", |
|
"hardware": "gpu", |
|
"status": "Waiting", |
|
"submitted_time": "2024-05-11T11:55:16Z", |
|
"model_type": "quantization", |
|
"job_id": -1, |
|
"job_start_time": null, |
|
"scripts": "ITREX" |
|
}, |
|
"quantization_config": { |
|
"autoround_version": "0.11", |
|
"bits": 4, |
|
"damp_percent": 0.01, |
|
"desc_act": false, |
|
"enable_minmax_tuning": true, |
|
"group_size": 128, |
|
"is_marlin_format": false, |
|
"iters": 200, |
|
"lr": 0.005, |
|
"minmax_lr": 0.005, |
|
"model_file_base_name": "model", |
|
"model_name_or_path": null, |
|
"quant_method": "gptq", |
|
"scale_dtype": "torch.float32", |
|
"static_groups": false, |
|
"sym": false, |
|
"true_sequential": false, |
|
"use_quant_input": true |
|
}, |
|
"versions": { |
|
"harness|hellaswag|0": 1.0, |
|
"harness|lambada:openai|0": 1.0, |
|
"harness|truthfulqa:mc2|0": 2.0, |
|
"harness|boolq|0": 2.0, |
|
"harness|truthfulqa:mc1|0": 2.0, |
|
"harness|arc:easy|0": 1.0, |
|
"harness|winogrande|0": 1.0, |
|
"harness|openbookqa|0": 1.0, |
|
"harness|piqa|0": 1.0, |
|
"harness|mmlu|0": null, |
|
"harness|mmlu_humanities|0": null, |
|
"harness|mmlu_formal_logic|0": 0.0, |
|
"harness|mmlu_high_school_european_history|0": 0.0, |
|
"harness|mmlu_high_school_us_history|0": 0.0, |
|
"harness|mmlu_high_school_world_history|0": 0.0, |
|
"harness|mmlu_international_law|0": 0.0, |
|
"harness|mmlu_jurisprudence|0": 0.0, |
|
"harness|mmlu_logical_fallacies|0": 0.0, |
|
"harness|mmlu_moral_disputes|0": 0.0, |
|
"harness|mmlu_moral_scenarios|0": 0.0, |
|
"harness|mmlu_philosophy|0": 0.0, |
|
"harness|mmlu_prehistory|0": 0.0, |
|
"harness|mmlu_professional_law|0": 0.0, |
|
"harness|mmlu_world_religions|0": 0.0, |
|
"harness|mmlu_other|0": null, |
|
"harness|mmlu_business_ethics|0": 0.0, |
|
"harness|mmlu_clinical_knowledge|0": 0.0, |
|
"harness|mmlu_college_medicine|0": 0.0, |
|
"harness|mmlu_global_facts|0": 0.0, |
|
"harness|mmlu_human_aging|0": 0.0, |
|
"harness|mmlu_management|0": 0.0, |
|
"harness|mmlu_marketing|0": 0.0, |
|
"harness|mmlu_medical_genetics|0": 0.0, |
|
"harness|mmlu_miscellaneous|0": 0.0, |
|
"harness|mmlu_nutrition|0": 0.0, |
|
"harness|mmlu_professional_accounting|0": 0.0, |
|
"harness|mmlu_professional_medicine|0": 0.0, |
|
"harness|mmlu_virology|0": 0.0, |
|
"harness|mmlu_social_sciences|0": null, |
|
"harness|mmlu_econometrics|0": 0.0, |
|
"harness|mmlu_high_school_geography|0": 0.0, |
|
"harness|mmlu_high_school_government_and_politics|0": 0.0, |
|
"harness|mmlu_high_school_macroeconomics|0": 0.0, |
|
"harness|mmlu_high_school_microeconomics|0": 0.0, |
|
"harness|mmlu_high_school_psychology|0": 0.0, |
|
"harness|mmlu_human_sexuality|0": 0.0, |
|
"harness|mmlu_professional_psychology|0": 0.0, |
|
"harness|mmlu_public_relations|0": 0.0, |
|
"harness|mmlu_security_studies|0": 0.0, |
|
"harness|mmlu_sociology|0": 0.0, |
|
"harness|mmlu_us_foreign_policy|0": 0.0, |
|
"harness|mmlu_stem|0": null, |
|
"harness|mmlu_abstract_algebra|0": 0.0, |
|
"harness|mmlu_anatomy|0": 0.0, |
|
"harness|mmlu_astronomy|0": 0.0, |
|
"harness|mmlu_college_biology|0": 0.0, |
|
"harness|mmlu_college_chemistry|0": 0.0, |
|
"harness|mmlu_college_computer_science|0": 0.0, |
|
"harness|mmlu_college_mathematics|0": 0.0, |
|
"harness|mmlu_college_physics|0": 0.0, |
|
"harness|mmlu_computer_security|0": 0.0, |
|
"harness|mmlu_conceptual_physics|0": 0.0, |
|
"harness|mmlu_electrical_engineering|0": 0.0, |
|
"harness|mmlu_elementary_mathematics|0": 0.0, |
|
"harness|mmlu_high_school_biology|0": 0.0, |
|
"harness|mmlu_high_school_chemistry|0": 0.0, |
|
"harness|mmlu_high_school_computer_science|0": 0.0, |
|
"harness|mmlu_high_school_mathematics|0": 0.0, |
|
"harness|mmlu_high_school_physics|0": 0.0, |
|
"harness|mmlu_high_school_statistics|0": 0.0, |
|
"harness|mmlu_machine_learning|0": 0.0, |
|
"harness|arc:challenge|0": 1.0 |
|
}, |
|
"n-shot": { |
|
"arc_challenge": 0, |
|
"arc_easy": 0, |
|
"boolq": 0, |
|
"hellaswag": 0, |
|
"lambada_openai": 0, |
|
"mmlu": 0, |
|
"mmlu_abstract_algebra": 0, |
|
"mmlu_anatomy": 0, |
|
"mmlu_astronomy": 0, |
|
"mmlu_business_ethics": 0, |
|
"mmlu_clinical_knowledge": 0, |
|
"mmlu_college_biology": 0, |
|
"mmlu_college_chemistry": 0, |
|
"mmlu_college_computer_science": 0, |
|
"mmlu_college_mathematics": 0, |
|
"mmlu_college_medicine": 0, |
|
"mmlu_college_physics": 0, |
|
"mmlu_computer_security": 0, |
|
"mmlu_conceptual_physics": 0, |
|
"mmlu_econometrics": 0, |
|
"mmlu_electrical_engineering": 0, |
|
"mmlu_elementary_mathematics": 0, |
|
"mmlu_formal_logic": 0, |
|
"mmlu_global_facts": 0, |
|
"mmlu_high_school_biology": 0, |
|
"mmlu_high_school_chemistry": 0, |
|
"mmlu_high_school_computer_science": 0, |
|
"mmlu_high_school_european_history": 0, |
|
"mmlu_high_school_geography": 0, |
|
"mmlu_high_school_government_and_politics": 0, |
|
"mmlu_high_school_macroeconomics": 0, |
|
"mmlu_high_school_mathematics": 0, |
|
"mmlu_high_school_microeconomics": 0, |
|
"mmlu_high_school_physics": 0, |
|
"mmlu_high_school_psychology": 0, |
|
"mmlu_high_school_statistics": 0, |
|
"mmlu_high_school_us_history": 0, |
|
"mmlu_high_school_world_history": 0, |
|
"mmlu_human_aging": 0, |
|
"mmlu_human_sexuality": 0, |
|
"mmlu_humanities": 0, |
|
"mmlu_international_law": 0, |
|
"mmlu_jurisprudence": 0, |
|
"mmlu_logical_fallacies": 0, |
|
"mmlu_machine_learning": 0, |
|
"mmlu_management": 0, |
|
"mmlu_marketing": 0, |
|
"mmlu_medical_genetics": 0, |
|
"mmlu_miscellaneous": 0, |
|
"mmlu_moral_disputes": 0, |
|
"mmlu_moral_scenarios": 0, |
|
"mmlu_nutrition": 0, |
|
"mmlu_other": 0, |
|
"mmlu_philosophy": 0, |
|
"mmlu_prehistory": 0, |
|
"mmlu_professional_accounting": 0, |
|
"mmlu_professional_law": 0, |
|
"mmlu_professional_medicine": 0, |
|
"mmlu_professional_psychology": 0, |
|
"mmlu_public_relations": 0, |
|
"mmlu_security_studies": 0, |
|
"mmlu_social_sciences": 0, |
|
"mmlu_sociology": 0, |
|
"mmlu_stem": 0, |
|
"mmlu_us_foreign_policy": 0, |
|
"mmlu_virology": 0, |
|
"mmlu_world_religions": 0, |
|
"openbookqa": 0, |
|
"piqa": 0, |
|
"truthfulqa_mc1": 0, |
|
"truthfulqa_mc2": 0, |
|
"winogrande": 0 |
|
}, |
|
"date": 1715684444.5114553, |
|
"config": { |
|
"model": "hf", |
|
"model_args": "pretrained=cstr/Spaetzle-v60-7b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", |
|
"batch_size": 2, |
|
"batch_sizes": [], |
|
"device": "cuda", |
|
"use_cache": null, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"gen_kwargs": null |
|
} |
|
} |
|
|