|
{ |
|
"config_general": { |
|
"lighteval_sha": "no", |
|
"num_few_shot_default": null, |
|
"num_fewshot_seeds": null, |
|
"override_batch_size": null, |
|
"max_samples": null, |
|
"job_id": -1, |
|
"start_time": null, |
|
"end_time": "2024-05-22-01-00-12", |
|
"total_evaluation_time_secondes": "", |
|
"model_name": "noxinc/phi-3-portuguese-tom-cat-4k-instruct-Q4_0-GGUF-PTBR", |
|
"model_sha": "", |
|
"model_dtype": "4bit", |
|
"model_size": 2.175438336, |
|
"model_params": 3.821079552, |
|
"quant_type": "llama.cpp", |
|
"precision": "4bit" |
|
}, |
|
"results": { |
|
"harness|arc:challenge|0": { |
|
"acc,none": 0.53839590443686, |
|
"acc_stderr,none": 0.014568245550296361, |
|
"acc_norm,none": 0.560580204778157, |
|
"acc_norm_stderr,none": 0.014503747823580125, |
|
"alias": "arc_challenge" |
|
}, |
|
"harness|mmlu|0": { |
|
"acc,none": 0.6323173337131462, |
|
"acc_stderr,none": 0.0038812703429438895, |
|
"alias": "mmlu" |
|
}, |
|
"harness|mmlu_humanities|0": { |
|
"alias": " - humanities", |
|
"acc,none": 0.5861849096705632, |
|
"acc_stderr,none": 0.006909898897532406 |
|
}, |
|
"harness|mmlu_formal_logic|0": { |
|
"alias": " - formal_logic", |
|
"acc,none": 0.4523809523809524, |
|
"acc_stderr,none": 0.044518079590553275 |
|
}, |
|
"harness|mmlu_high_school_european_history|0": { |
|
"alias": " - high_school_european_history", |
|
"acc,none": 0.7454545454545455, |
|
"acc_stderr,none": 0.03401506715249039 |
|
}, |
|
"harness|mmlu_high_school_us_history|0": { |
|
"alias": " - high_school_us_history", |
|
"acc,none": 0.7254901960784313, |
|
"acc_stderr,none": 0.031321798030832904 |
|
}, |
|
"harness|mmlu_high_school_world_history|0": { |
|
"alias": " - high_school_world_history", |
|
"acc,none": 0.7088607594936709, |
|
"acc_stderr,none": 0.029571601065753378 |
|
}, |
|
"harness|mmlu_international_law|0": { |
|
"alias": " - international_law", |
|
"acc,none": 0.7933884297520661, |
|
"acc_stderr,none": 0.036959801280988254 |
|
}, |
|
"harness|mmlu_jurisprudence|0": { |
|
"alias": " - jurisprudence", |
|
"acc,none": 0.7314814814814815, |
|
"acc_stderr,none": 0.042844679680521934 |
|
}, |
|
"harness|mmlu_logical_fallacies|0": { |
|
"alias": " - logical_fallacies", |
|
"acc,none": 0.7607361963190185, |
|
"acc_stderr,none": 0.033519538795212696 |
|
}, |
|
"harness|mmlu_moral_disputes|0": { |
|
"alias": " - moral_disputes", |
|
"acc,none": 0.6878612716763006, |
|
"acc_stderr,none": 0.024946792225272314 |
|
}, |
|
"harness|mmlu_moral_scenarios|0": { |
|
"alias": " - moral_scenarios", |
|
"acc,none": 0.4581005586592179, |
|
"acc_stderr,none": 0.016663683295020527 |
|
}, |
|
"harness|mmlu_philosophy|0": { |
|
"alias": " - philosophy", |
|
"acc,none": 0.684887459807074, |
|
"acc_stderr,none": 0.026385273703464492 |
|
}, |
|
"harness|mmlu_prehistory|0": { |
|
"alias": " - prehistory", |
|
"acc,none": 0.7561728395061729, |
|
"acc_stderr,none": 0.023891879541959603 |
|
}, |
|
"harness|mmlu_professional_law|0": { |
|
"alias": " - professional_law", |
|
"acc,none": 0.4680573663624511, |
|
"acc_stderr,none": 0.012744149704869647 |
|
}, |
|
"harness|mmlu_world_religions|0": { |
|
"alias": " - world_religions", |
|
"acc,none": 0.8128654970760234, |
|
"acc_stderr,none": 0.029913127232368022 |
|
}, |
|
"harness|mmlu_other|0": { |
|
"alias": " - other", |
|
"acc,none": 0.6923076923076923, |
|
"acc_stderr,none": 0.00802239670550724 |
|
}, |
|
"harness|mmlu_business_ethics|0": { |
|
"alias": " - business_ethics", |
|
"acc,none": 0.72, |
|
"acc_stderr,none": 0.04512608598542127 |
|
}, |
|
"harness|mmlu_clinical_knowledge|0": { |
|
"alias": " - clinical_knowledge", |
|
"acc,none": 0.7320754716981132, |
|
"acc_stderr,none": 0.027257260322494845 |
|
}, |
|
"harness|mmlu_college_medicine|0": { |
|
"alias": " - college_medicine", |
|
"acc,none": 0.6184971098265896, |
|
"acc_stderr,none": 0.03703851193099521 |
|
}, |
|
"harness|mmlu_global_facts|0": { |
|
"alias": " - global_facts", |
|
"acc,none": 0.38, |
|
"acc_stderr,none": 0.048783173121456316 |
|
}, |
|
"harness|mmlu_human_aging|0": { |
|
"alias": " - human_aging", |
|
"acc,none": 0.6502242152466368, |
|
"acc_stderr,none": 0.03200736719484503 |
|
}, |
|
"harness|mmlu_management|0": { |
|
"alias": " - management", |
|
"acc,none": 0.8446601941747572, |
|
"acc_stderr,none": 0.03586594738573974 |
|
}, |
|
"harness|mmlu_marketing|0": { |
|
"alias": " - marketing", |
|
"acc,none": 0.8760683760683761, |
|
"acc_stderr,none": 0.021586494001281403 |
|
}, |
|
"harness|mmlu_medical_genetics|0": { |
|
"alias": " - medical_genetics", |
|
"acc,none": 0.66, |
|
"acc_stderr,none": 0.04760952285695238 |
|
}, |
|
"harness|mmlu_miscellaneous|0": { |
|
"alias": " - miscellaneous", |
|
"acc,none": 0.7905491698595147, |
|
"acc_stderr,none": 0.014551310568143704 |
|
}, |
|
"harness|mmlu_nutrition|0": { |
|
"alias": " - nutrition", |
|
"acc,none": 0.6601307189542484, |
|
"acc_stderr,none": 0.02712195607138886 |
|
}, |
|
"harness|mmlu_professional_accounting|0": { |
|
"alias": " - professional_accounting", |
|
"acc,none": 0.5390070921985816, |
|
"acc_stderr,none": 0.029736592526424438 |
|
}, |
|
"harness|mmlu_professional_medicine|0": { |
|
"alias": " - professional_medicine", |
|
"acc,none": 0.6691176470588235, |
|
"acc_stderr,none": 0.028582709753898438 |
|
}, |
|
"harness|mmlu_virology|0": { |
|
"alias": " - virology", |
|
"acc,none": 0.4939759036144578, |
|
"acc_stderr,none": 0.03892212195333047 |
|
}, |
|
"harness|mmlu_social_sciences|0": { |
|
"alias": " - social_sciences", |
|
"acc,none": 0.7312317192070198, |
|
"acc_stderr,none": 0.007822932557118559 |
|
}, |
|
"harness|mmlu_econometrics|0": { |
|
"alias": " - econometrics", |
|
"acc,none": 0.49122807017543857, |
|
"acc_stderr,none": 0.04702880432049615 |
|
}, |
|
"harness|mmlu_high_school_geography|0": { |
|
"alias": " - high_school_geography", |
|
"acc,none": 0.7929292929292929, |
|
"acc_stderr,none": 0.028869778460267063 |
|
}, |
|
"harness|mmlu_high_school_government_and_politics|0": { |
|
"alias": " - high_school_government_and_politics", |
|
"acc,none": 0.8497409326424871, |
|
"acc_stderr,none": 0.025787723180723872 |
|
}, |
|
"harness|mmlu_high_school_macroeconomics|0": { |
|
"alias": " - high_school_macroeconomics", |
|
"acc,none": 0.7051282051282052, |
|
"acc_stderr,none": 0.02311936275823229 |
|
}, |
|
"harness|mmlu_high_school_microeconomics|0": { |
|
"alias": " - high_school_microeconomics", |
|
"acc,none": 0.7563025210084033, |
|
"acc_stderr,none": 0.027886828078380558 |
|
}, |
|
"harness|mmlu_high_school_psychology|0": { |
|
"alias": " - high_school_psychology", |
|
"acc,none": 0.8366972477064221, |
|
"acc_stderr,none": 0.01584825580650153 |
|
}, |
|
"harness|mmlu_human_sexuality|0": { |
|
"alias": " - human_sexuality", |
|
"acc,none": 0.6641221374045801, |
|
"acc_stderr,none": 0.04142313771996665 |
|
}, |
|
"harness|mmlu_professional_psychology|0": { |
|
"alias": " - professional_psychology", |
|
"acc,none": 0.6666666666666666, |
|
"acc_stderr,none": 0.019070985589687492 |
|
}, |
|
"harness|mmlu_public_relations|0": { |
|
"alias": " - public_relations", |
|
"acc,none": 0.6727272727272727, |
|
"acc_stderr,none": 0.044942908662520896 |
|
}, |
|
"harness|mmlu_security_studies|0": { |
|
"alias": " - security_studies", |
|
"acc,none": 0.5918367346938775, |
|
"acc_stderr,none": 0.03146465712827423 |
|
}, |
|
"harness|mmlu_sociology|0": { |
|
"alias": " - sociology", |
|
"acc,none": 0.8059701492537313, |
|
"acc_stderr,none": 0.027962677604768893 |
|
}, |
|
"harness|mmlu_us_foreign_policy|0": { |
|
"alias": " - us_foreign_policy", |
|
"acc,none": 0.86, |
|
"acc_stderr,none": 0.0348735088019777 |
|
}, |
|
"harness|mmlu_stem|0": { |
|
"alias": " - stem", |
|
"acc,none": 0.545512210593086, |
|
"acc_stderr,none": 0.008466724298098851 |
|
}, |
|
"harness|mmlu_abstract_algebra|0": { |
|
"alias": " - abstract_algebra", |
|
"acc,none": 0.38, |
|
"acc_stderr,none": 0.048783173121456316 |
|
}, |
|
"harness|mmlu_anatomy|0": { |
|
"alias": " - anatomy", |
|
"acc,none": 0.6666666666666666, |
|
"acc_stderr,none": 0.04072314811876837 |
|
}, |
|
"harness|mmlu_astronomy|0": { |
|
"alias": " - astronomy", |
|
"acc,none": 0.6776315789473685, |
|
"acc_stderr,none": 0.03803510248351585 |
|
}, |
|
"harness|mmlu_college_biology|0": { |
|
"alias": " - college_biology", |
|
"acc,none": 0.7638888888888888, |
|
"acc_stderr,none": 0.03551446610810826 |
|
}, |
|
"harness|mmlu_college_chemistry|0": { |
|
"alias": " - college_chemistry", |
|
"acc,none": 0.44, |
|
"acc_stderr,none": 0.04988876515698589 |
|
}, |
|
"harness|mmlu_college_computer_science|0": { |
|
"alias": " - college_computer_science", |
|
"acc,none": 0.37, |
|
"acc_stderr,none": 0.048523658709391 |
|
}, |
|
"harness|mmlu_college_mathematics|0": { |
|
"alias": " - college_mathematics", |
|
"acc,none": 0.32, |
|
"acc_stderr,none": 0.04688261722621504 |
|
}, |
|
"harness|mmlu_college_physics|0": { |
|
"alias": " - college_physics", |
|
"acc,none": 0.35294117647058826, |
|
"acc_stderr,none": 0.047551296160629475 |
|
}, |
|
"harness|mmlu_computer_security|0": { |
|
"alias": " - computer_security", |
|
"acc,none": 0.8, |
|
"acc_stderr,none": 0.04020151261036845 |
|
}, |
|
"harness|mmlu_conceptual_physics|0": { |
|
"alias": " - conceptual_physics", |
|
"acc,none": 0.6212765957446809, |
|
"acc_stderr,none": 0.03170995606040655 |
|
}, |
|
"harness|mmlu_electrical_engineering|0": { |
|
"alias": " - electrical_engineering", |
|
"acc,none": 0.5586206896551724, |
|
"acc_stderr,none": 0.04137931034482757 |
|
}, |
|
"harness|mmlu_elementary_mathematics|0": { |
|
"alias": " - elementary_mathematics", |
|
"acc,none": 0.46825396825396826, |
|
"acc_stderr,none": 0.025699352832131792 |
|
}, |
|
"harness|mmlu_high_school_biology|0": { |
|
"alias": " - high_school_biology", |
|
"acc,none": 0.8129032258064516, |
|
"acc_stderr,none": 0.02218571009225225 |
|
}, |
|
"harness|mmlu_high_school_chemistry|0": { |
|
"alias": " - high_school_chemistry", |
|
"acc,none": 0.5221674876847291, |
|
"acc_stderr,none": 0.03514528562175008 |
|
}, |
|
"harness|mmlu_high_school_computer_science|0": { |
|
"alias": " - high_school_computer_science", |
|
"acc,none": 0.6, |
|
"acc_stderr,none": 0.049236596391733084 |
|
}, |
|
"harness|mmlu_high_school_mathematics|0": { |
|
"alias": " - high_school_mathematics", |
|
"acc,none": 0.34444444444444444, |
|
"acc_stderr,none": 0.028972648884844267 |
|
}, |
|
"harness|mmlu_high_school_physics|0": { |
|
"alias": " - high_school_physics", |
|
"acc,none": 0.423841059602649, |
|
"acc_stderr,none": 0.04034846678603397 |
|
}, |
|
"harness|mmlu_high_school_statistics|0": { |
|
"alias": " - high_school_statistics", |
|
"acc,none": 0.5138888888888888, |
|
"acc_stderr,none": 0.03408655867977749 |
|
}, |
|
"harness|mmlu_machine_learning|0": { |
|
"alias": " - machine_learning", |
|
"acc,none": 0.5357142857142857, |
|
"acc_stderr,none": 0.04733667890053756 |
|
}, |
|
"harness|winogrande|0": { |
|
"acc,none": 0.7024467245461721, |
|
"acc_stderr,none": 0.012849085254614647, |
|
"alias": "winogrande" |
|
}, |
|
"harness|truthfulqa:mc1|0": { |
|
"acc,none": 0.379436964504284, |
|
"acc_stderr,none": 0.016987039266142975, |
|
"alias": "truthfulqa_mc1" |
|
}, |
|
"harness|boolq|0": { |
|
"acc,none": 0.8492354740061162, |
|
"acc_stderr,none": 0.00625829724426619, |
|
"alias": "boolq" |
|
}, |
|
"harness|piqa|0": { |
|
"acc,none": 0.794885745375408, |
|
"acc_stderr,none": 0.009420971671017913, |
|
"acc_norm,none": 0.794341675734494, |
|
"acc_norm_stderr,none": 0.009430229076102503, |
|
"alias": "piqa" |
|
}, |
|
"harness|arc:easy|0": { |
|
"acc,none": 0.8063973063973064, |
|
"acc_stderr,none": 0.008107714081954553, |
|
"acc_norm,none": 0.7929292929292929, |
|
"acc_norm_stderr,none": 0.008314665023956568, |
|
"alias": "arc_easy" |
|
}, |
|
"harness|truthfulqa:mc2|0": { |
|
"acc,none": 0.5544066746600581, |
|
"acc_stderr,none": 0.015175889172914337, |
|
"alias": "truthfulqa_mc2" |
|
}, |
|
"harness|lambada:openai|0": { |
|
"perplexity,none": 5.88455992880931, |
|
"perplexity_stderr,none": 0.15247629498325097, |
|
"acc,none": 0.3564913642538327, |
|
"acc_stderr,none": 0.006672886984196191, |
|
"alias": "lambada_openai" |
|
}, |
|
"harness|hellaswag|0": { |
|
"acc,none": 0.5780720971917944, |
|
"acc_stderr,none": 0.004928578106026359, |
|
"acc_norm,none": 0.7599083847839075, |
|
"acc_norm_stderr,none": 0.004262659388824527, |
|
"alias": "hellaswag" |
|
}, |
|
"harness|openbookqa|0": { |
|
"acc,none": 0.342, |
|
"acc_stderr,none": 0.021236147199899254, |
|
"acc_norm,none": 0.436, |
|
"acc_norm_stderr,none": 0.0221989546414768, |
|
"alias": "openbookqa" |
|
} |
|
}, |
|
"task_info": { |
|
"model": "noxinc/phi-3-portuguese-tom-cat-4k-instruct-Q4_0-GGUF-PTBR", |
|
"revision": "main", |
|
"private": false, |
|
"params": null, |
|
"architectures": "?", |
|
"quant_type": "llama.cpp", |
|
"precision": "4bit", |
|
"model_params": null, |
|
"model_size": null, |
|
"weight_dtype": "int4", |
|
"compute_dtype": "float16", |
|
"gguf_ftype": "*Q4_0.gguf", |
|
"hardware": "cpu", |
|
"status": "Pending", |
|
"submitted_time": "2024-05-20T04:01:34Z", |
|
"model_type": "quantization", |
|
"job_id": -1, |
|
"job_start_time": null, |
|
"scripts": "llama_cpp" |
|
}, |
|
"quantization_config": { |
|
"quant_method": "llama.cpp", |
|
"ftype": "*Q4_0.gguf" |
|
}, |
|
"versions": { |
|
"harness|arc:challenge|0": 1.0, |
|
"harness|mmlu|0": null, |
|
"harness|mmlu_humanities|0": null, |
|
"harness|mmlu_formal_logic|0": 0.0, |
|
"harness|mmlu_high_school_european_history|0": 0.0, |
|
"harness|mmlu_high_school_us_history|0": 0.0, |
|
"harness|mmlu_high_school_world_history|0": 0.0, |
|
"harness|mmlu_international_law|0": 0.0, |
|
"harness|mmlu_jurisprudence|0": 0.0, |
|
"harness|mmlu_logical_fallacies|0": 0.0, |
|
"harness|mmlu_moral_disputes|0": 0.0, |
|
"harness|mmlu_moral_scenarios|0": 0.0, |
|
"harness|mmlu_philosophy|0": 0.0, |
|
"harness|mmlu_prehistory|0": 0.0, |
|
"harness|mmlu_professional_law|0": 0.0, |
|
"harness|mmlu_world_religions|0": 0.0, |
|
"harness|mmlu_other|0": null, |
|
"harness|mmlu_business_ethics|0": 0.0, |
|
"harness|mmlu_clinical_knowledge|0": 0.0, |
|
"harness|mmlu_college_medicine|0": 0.0, |
|
"harness|mmlu_global_facts|0": 0.0, |
|
"harness|mmlu_human_aging|0": 0.0, |
|
"harness|mmlu_management|0": 0.0, |
|
"harness|mmlu_marketing|0": 0.0, |
|
"harness|mmlu_medical_genetics|0": 0.0, |
|
"harness|mmlu_miscellaneous|0": 0.0, |
|
"harness|mmlu_nutrition|0": 0.0, |
|
"harness|mmlu_professional_accounting|0": 0.0, |
|
"harness|mmlu_professional_medicine|0": 0.0, |
|
"harness|mmlu_virology|0": 0.0, |
|
"harness|mmlu_social_sciences|0": null, |
|
"harness|mmlu_econometrics|0": 0.0, |
|
"harness|mmlu_high_school_geography|0": 0.0, |
|
"harness|mmlu_high_school_government_and_politics|0": 0.0, |
|
"harness|mmlu_high_school_macroeconomics|0": 0.0, |
|
"harness|mmlu_high_school_microeconomics|0": 0.0, |
|
"harness|mmlu_high_school_psychology|0": 0.0, |
|
"harness|mmlu_human_sexuality|0": 0.0, |
|
"harness|mmlu_professional_psychology|0": 0.0, |
|
"harness|mmlu_public_relations|0": 0.0, |
|
"harness|mmlu_security_studies|0": 0.0, |
|
"harness|mmlu_sociology|0": 0.0, |
|
"harness|mmlu_us_foreign_policy|0": 0.0, |
|
"harness|mmlu_stem|0": null, |
|
"harness|mmlu_abstract_algebra|0": 0.0, |
|
"harness|mmlu_anatomy|0": 0.0, |
|
"harness|mmlu_astronomy|0": 0.0, |
|
"harness|mmlu_college_biology|0": 0.0, |
|
"harness|mmlu_college_chemistry|0": 0.0, |
|
"harness|mmlu_college_computer_science|0": 0.0, |
|
"harness|mmlu_college_mathematics|0": 0.0, |
|
"harness|mmlu_college_physics|0": 0.0, |
|
"harness|mmlu_computer_security|0": 0.0, |
|
"harness|mmlu_conceptual_physics|0": 0.0, |
|
"harness|mmlu_electrical_engineering|0": 0.0, |
|
"harness|mmlu_elementary_mathematics|0": 0.0, |
|
"harness|mmlu_high_school_biology|0": 0.0, |
|
"harness|mmlu_high_school_chemistry|0": 0.0, |
|
"harness|mmlu_high_school_computer_science|0": 0.0, |
|
"harness|mmlu_high_school_mathematics|0": 0.0, |
|
"harness|mmlu_high_school_physics|0": 0.0, |
|
"harness|mmlu_high_school_statistics|0": 0.0, |
|
"harness|mmlu_machine_learning|0": 0.0, |
|
"harness|winogrande|0": 1.0, |
|
"harness|truthfulqa:mc1|0": 2.0, |
|
"harness|boolq|0": 2.0, |
|
"harness|piqa|0": 1.0, |
|
"harness|arc:easy|0": 1.0, |
|
"harness|truthfulqa:mc2|0": 2.0, |
|
"harness|lambada:openai|0": 1.0, |
|
"harness|hellaswag|0": 1.0, |
|
"harness|openbookqa|0": 1.0 |
|
}, |
|
"n-shot": { |
|
"arc_challenge": 0, |
|
"arc_easy": 0, |
|
"boolq": 0, |
|
"hellaswag": 0, |
|
"lambada_openai": 0, |
|
"mmlu": 0, |
|
"mmlu_abstract_algebra": 0, |
|
"mmlu_anatomy": 0, |
|
"mmlu_astronomy": 0, |
|
"mmlu_business_ethics": 0, |
|
"mmlu_clinical_knowledge": 0, |
|
"mmlu_college_biology": 0, |
|
"mmlu_college_chemistry": 0, |
|
"mmlu_college_computer_science": 0, |
|
"mmlu_college_mathematics": 0, |
|
"mmlu_college_medicine": 0, |
|
"mmlu_college_physics": 0, |
|
"mmlu_computer_security": 0, |
|
"mmlu_conceptual_physics": 0, |
|
"mmlu_econometrics": 0, |
|
"mmlu_electrical_engineering": 0, |
|
"mmlu_elementary_mathematics": 0, |
|
"mmlu_formal_logic": 0, |
|
"mmlu_global_facts": 0, |
|
"mmlu_high_school_biology": 0, |
|
"mmlu_high_school_chemistry": 0, |
|
"mmlu_high_school_computer_science": 0, |
|
"mmlu_high_school_european_history": 0, |
|
"mmlu_high_school_geography": 0, |
|
"mmlu_high_school_government_and_politics": 0, |
|
"mmlu_high_school_macroeconomics": 0, |
|
"mmlu_high_school_mathematics": 0, |
|
"mmlu_high_school_microeconomics": 0, |
|
"mmlu_high_school_physics": 0, |
|
"mmlu_high_school_psychology": 0, |
|
"mmlu_high_school_statistics": 0, |
|
"mmlu_high_school_us_history": 0, |
|
"mmlu_high_school_world_history": 0, |
|
"mmlu_human_aging": 0, |
|
"mmlu_human_sexuality": 0, |
|
"mmlu_humanities": 0, |
|
"mmlu_international_law": 0, |
|
"mmlu_jurisprudence": 0, |
|
"mmlu_logical_fallacies": 0, |
|
"mmlu_machine_learning": 0, |
|
"mmlu_management": 0, |
|
"mmlu_marketing": 0, |
|
"mmlu_medical_genetics": 0, |
|
"mmlu_miscellaneous": 0, |
|
"mmlu_moral_disputes": 0, |
|
"mmlu_moral_scenarios": 0, |
|
"mmlu_nutrition": 0, |
|
"mmlu_other": 0, |
|
"mmlu_philosophy": 0, |
|
"mmlu_prehistory": 0, |
|
"mmlu_professional_accounting": 0, |
|
"mmlu_professional_law": 0, |
|
"mmlu_professional_medicine": 0, |
|
"mmlu_professional_psychology": 0, |
|
"mmlu_public_relations": 0, |
|
"mmlu_security_studies": 0, |
|
"mmlu_social_sciences": 0, |
|
"mmlu_sociology": 0, |
|
"mmlu_stem": 0, |
|
"mmlu_us_foreign_policy": 0, |
|
"mmlu_virology": 0, |
|
"mmlu_world_religions": 0, |
|
"openbookqa": 0, |
|
"piqa": 0, |
|
"truthfulqa_mc1": 0, |
|
"truthfulqa_mc2": 0, |
|
"winogrande": 0 |
|
}, |
|
"date": 1716293460.9694755, |
|
"config": { |
|
"model": "WrapperGGUFLM", |
|
"model_args": "gguf_model=noxinc/phi-3-portuguese-tom-cat-4k-instruct-Q4_0-GGUF-PTBR,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", |
|
"batch_size": 1, |
|
"batch_sizes": [], |
|
"device": "cuda", |
|
"use_cache": null, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"gen_kwargs": null |
|
} |
|
} |