|
{ |
|
"best_metric": 0.7639025449752808, |
|
"best_model_checkpoint": "./output_v2/7b_cluster022_Nous-Hermes-llama-2-7b_partitioned_v3_standardized_022/checkpoint-2800", |
|
"epoch": 1.199400299850075, |
|
"global_step": 3400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8325, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8202, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7466, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7549, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7569, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7691, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.744, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7708, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8071, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7303, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6861, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7592, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7361, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.76, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7617, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7073, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7581, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7636, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7712, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7547, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"eval_loss": 0.8007758259773254, |
|
"eval_runtime": 185.4331, |
|
"eval_samples_per_second": 5.393, |
|
"eval_steps_per_second": 2.696, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"mmlu_eval_accuracy": 0.4659766842502547, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.6428571428571429, |
|
"mmlu_eval_accuracy_astronomy": 0.375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293, |
|
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.34375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7166666666666667, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.7692307692307693, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.7272727272727273, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907, |
|
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, |
|
"mmlu_eval_accuracy_philosophy": 0.5, |
|
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.3411764705882353, |
|
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644, |
|
"mmlu_eval_accuracy_professional_psychology": 0.391304347826087, |
|
"mmlu_eval_accuracy_public_relations": 0.5833333333333334, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6818181818181818, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3888888888888889, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.0009409290575484, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7814, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7313, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7217, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7299, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7229, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7271, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7253, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7371, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7434, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6741, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7386, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7441, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7243, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7534, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7187, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7508, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7597, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7398, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6924, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7035, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 0.7900036573410034, |
|
"eval_runtime": 188.9686, |
|
"eval_samples_per_second": 5.292, |
|
"eval_steps_per_second": 2.646, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"mmlu_eval_accuracy": 0.45327850420813054, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.5625, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.0, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.2727272727272727, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.4, |
|
"mmlu_eval_accuracy_high_school_biology": 0.34375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7166666666666667, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.2727272727272727, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.45454545454545453, |
|
"mmlu_eval_accuracy_marketing": 0.76, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907, |
|
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.5, |
|
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.35294117647058826, |
|
"mmlu_eval_accuracy_professional_medicine": 0.3870967741935484, |
|
"mmlu_eval_accuracy_professional_psychology": 0.4057971014492754, |
|
"mmlu_eval_accuracy_public_relations": 0.6666666666666666, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6818181818181818, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.0370562722026213, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7245, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8027, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7174, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7471, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7263, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7001, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7767, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7406, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7371, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7152, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0002, |
|
"loss": 0.746, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7178, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7056, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6961, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002, |
|
"loss": 0.673, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7508, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7508, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7122, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7154, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7587, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"eval_loss": 0.785829484462738, |
|
"eval_runtime": 189.436, |
|
"eval_samples_per_second": 5.279, |
|
"eval_steps_per_second": 2.639, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"mmlu_eval_accuracy": 0.4748863496985387, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.5625, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.25, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7166666666666667, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5, |
|
"mmlu_eval_accuracy_human_aging": 0.7391304347826086, |
|
"mmlu_eval_accuracy_human_sexuality": 0.5, |
|
"mmlu_eval_accuracy_international_law": 0.7692307692307693, |
|
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.76, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907, |
|
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.23, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, |
|
"mmlu_eval_accuracy_prehistory": 0.45714285714285713, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.3411764705882353, |
|
"mmlu_eval_accuracy_professional_medicine": 0.3225806451612903, |
|
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, |
|
"mmlu_eval_accuracy_public_relations": 0.5833333333333334, |
|
"mmlu_eval_accuracy_security_studies": 0.5925925925925926, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.7894736842105263, |
|
"mmlu_loss": 0.9638749470559798, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8107, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7193, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7275, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7553, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7385, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7071, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7395, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7512, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7063, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.723, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7068, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7211, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7123, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6394, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002, |
|
"loss": 0.679, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7402, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7634, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7253, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7497, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7008, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 0.7810184359550476, |
|
"eval_runtime": 187.2684, |
|
"eval_samples_per_second": 5.34, |
|
"eval_steps_per_second": 2.67, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"mmlu_eval_accuracy": 0.46145585660156935, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.4375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.2727272727272727, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333, |
|
"mmlu_eval_accuracy_international_law": 0.7692307692307693, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.5454545454545454, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.23, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, |
|
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194, |
|
"mmlu_eval_accuracy_professional_law": 0.3352941176470588, |
|
"mmlu_eval_accuracy_professional_medicine": 0.3870967741935484, |
|
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, |
|
"mmlu_eval_accuracy_public_relations": 0.5833333333333334, |
|
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, |
|
"mmlu_eval_accuracy_sociology": 0.6818181818181818, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.074835775843177, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7028, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7465, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7717, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7256, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002, |
|
"loss": 0.776, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7521, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7118, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6725, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6865, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7387, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7117, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002, |
|
"loss": 0.686, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7106, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7004, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7376, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7226, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7396, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7194, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7068, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"eval_loss": 0.7785887718200684, |
|
"eval_runtime": 187.1264, |
|
"eval_samples_per_second": 5.344, |
|
"eval_steps_per_second": 2.672, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"mmlu_eval_accuracy": 0.4578914859636893, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3902439024390244, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.4, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.2727272727272727, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.75, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, |
|
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.5, |
|
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, |
|
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613, |
|
"mmlu_eval_accuracy_professional_law": 0.3411764705882353, |
|
"mmlu_eval_accuracy_professional_medicine": 0.3870967741935484, |
|
"mmlu_eval_accuracy_professional_psychology": 0.391304347826087, |
|
"mmlu_eval_accuracy_public_relations": 0.6666666666666666, |
|
"mmlu_eval_accuracy_security_studies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_sociology": 0.7272727272727273, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3888888888888889, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 0.9906338841578979, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7622, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7039, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7078, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7504, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7543, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7081, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7193, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7138, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7277, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7183, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6955, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6558, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7213, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7377, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7591, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7336, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002, |
|
"loss": 0.717, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6958, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6692, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6991, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_loss": 0.7757567167282104, |
|
"eval_runtime": 186.5318, |
|
"eval_samples_per_second": 5.361, |
|
"eval_steps_per_second": 2.681, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"mmlu_eval_accuracy": 0.4543617194438492, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5625, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.0, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.34615384615384615, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.75, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.68, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, |
|
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, |
|
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, |
|
"mmlu_eval_accuracy_prehistory": 0.45714285714285713, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194, |
|
"mmlu_eval_accuracy_professional_law": 0.3352941176470588, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.4057971014492754, |
|
"mmlu_eval_accuracy_public_relations": 0.5, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.5909090909090909, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.6842105263157895, |
|
"mmlu_loss": 1.0648847781608062, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7593, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6822, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002, |
|
"loss": 0.699, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7015, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6901, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7406, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6947, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002, |
|
"loss": 0.734, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7335, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7067, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7627, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002, |
|
"loss": 0.676, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7385, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6752, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7385, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7122, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6792, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6761, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7069, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7067, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"eval_loss": 0.7749121189117432, |
|
"eval_runtime": 186.6278, |
|
"eval_samples_per_second": 5.358, |
|
"eval_steps_per_second": 2.679, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"mmlu_eval_accuracy": 0.4656455181567258, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3902439024390244, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.7777777777777778, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186, |
|
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.23, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.5, |
|
"mmlu_eval_accuracy_prehistory": 0.4, |
|
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613, |
|
"mmlu_eval_accuracy_professional_law": 0.3235294117647059, |
|
"mmlu_eval_accuracy_professional_medicine": 0.3870967741935484, |
|
"mmlu_eval_accuracy_professional_psychology": 0.391304347826087, |
|
"mmlu_eval_accuracy_public_relations": 0.5833333333333334, |
|
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, |
|
"mmlu_eval_accuracy_sociology": 0.5909090909090909, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3888888888888889, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.0056204651883627, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002, |
|
"loss": 0.672, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6638, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7447, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002, |
|
"loss": 0.703, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6834, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7059, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7046, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7405, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6708, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7534, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7192, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7252, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6876, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6603, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6957, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7745, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7035, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7381, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6919, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6665, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 0.7732749581336975, |
|
"eval_runtime": 189.0558, |
|
"eval_samples_per_second": 5.289, |
|
"eval_steps_per_second": 2.645, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"mmlu_eval_accuracy": 0.4643913679545894, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.3125, |
|
"mmlu_eval_accuracy_college_chemistry": 0.375, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.25, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.4375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637, |
|
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285, |
|
"mmlu_eval_accuracy_global_facts": 0.6, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.6833333333333333, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6521739130434783, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.68, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6976744186046512, |
|
"mmlu_eval_accuracy_moral_disputes": 0.42105263157894735, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, |
|
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, |
|
"mmlu_eval_accuracy_prehistory": 0.45714285714285713, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3870967741935484, |
|
"mmlu_eval_accuracy_professional_law": 0.32941176470588235, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.391304347826087, |
|
"mmlu_eval_accuracy_public_relations": 0.5, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6818181818181818, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3888888888888889, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.211870277013542, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6885, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6468, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6762, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7359, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7327, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6205, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7413, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7164, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6865, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6713, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6729, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6604, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6889, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0002, |
|
"loss": 0.726, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7183, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6994, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6921, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6768, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7288, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6793, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_loss": 0.769940197467804, |
|
"eval_runtime": 188.6588, |
|
"eval_samples_per_second": 5.301, |
|
"eval_steps_per_second": 2.65, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"mmlu_eval_accuracy": 0.4624357023236349, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.3125, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.45454545454545453, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.4375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.6, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.5, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, |
|
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.4411764705882353, |
|
"mmlu_eval_accuracy_prehistory": 0.4857142857142857, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194, |
|
"mmlu_eval_accuracy_professional_law": 0.3176470588235294, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.4057971014492754, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.7272727272727273, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.6842105263157895, |
|
"mmlu_loss": 1.171973392095018, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7021, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6696, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7099, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7604, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6885, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6649, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7325, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6309, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7024, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6719, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002, |
|
"loss": 0.683, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6862, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7632, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7087, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6527, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7284, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6579, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7191, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6849, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7375, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"eval_loss": 0.769704282283783, |
|
"eval_runtime": 188.6001, |
|
"eval_samples_per_second": 5.302, |
|
"eval_steps_per_second": 2.651, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"mmlu_eval_accuracy": 0.4586122911114159, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5625, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.4, |
|
"mmlu_eval_accuracy_high_school_biology": 0.4375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.6833333333333333, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5454545454545454, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333, |
|
"mmlu_eval_accuracy_international_law": 0.7692307692307693, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.7272727272727273, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.23, |
|
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, |
|
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, |
|
"mmlu_eval_accuracy_prehistory": 0.42857142857142855, |
|
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613, |
|
"mmlu_eval_accuracy_professional_law": 0.3235294117647059, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.391304347826087, |
|
"mmlu_eval_accuracy_public_relations": 0.5, |
|
"mmlu_eval_accuracy_security_studies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_sociology": 0.6818181818181818, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.1570235493911465, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6536, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6591, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7033, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7312, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7322, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7249, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7576, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7499, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6414, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7201, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6904, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0002, |
|
"loss": 0.716, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6923, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7509, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7288, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6475, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6637, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7629, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6744, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7216, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_loss": 0.7676219344139099, |
|
"eval_runtime": 186.1144, |
|
"eval_samples_per_second": 5.373, |
|
"eval_steps_per_second": 2.687, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"mmlu_eval_accuracy": 0.4688920828630079, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5, |
|
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.3125, |
|
"mmlu_eval_accuracy_college_chemistry": 0.375, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.45454545454545453, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.65, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.7391304347826086, |
|
"mmlu_eval_accuracy_human_sexuality": 0.5, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907, |
|
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, |
|
"mmlu_eval_accuracy_philosophy": 0.4411764705882353, |
|
"mmlu_eval_accuracy_prehistory": 0.45714285714285713, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903, |
|
"mmlu_eval_accuracy_professional_law": 0.3058823529411765, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, |
|
"mmlu_eval_accuracy_public_relations": 0.3333333333333333, |
|
"mmlu_eval_accuracy_security_studies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_sociology": 0.7272727272727273, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.5555555555555556, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.2998772347564798, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6914, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6982, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7011, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7204, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7029, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6997, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7481, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6893, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7546, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6735, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0002, |
|
"loss": 0.695, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7171, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6942, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6779, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7155, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6583, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6599, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8043, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6276, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6935, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"eval_loss": 0.7674869298934937, |
|
"eval_runtime": 186.2898, |
|
"eval_samples_per_second": 5.368, |
|
"eval_steps_per_second": 2.684, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"mmlu_eval_accuracy": 0.4624483299799015, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.4375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.25, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.45454545454545453, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.4375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683, |
|
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.40625, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5, |
|
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.6833333333333333, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.5833333333333334, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.76, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, |
|
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, |
|
"mmlu_eval_accuracy_philosophy": 0.5294117647058824, |
|
"mmlu_eval_accuracy_prehistory": 0.4857142857142857, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194, |
|
"mmlu_eval_accuracy_professional_law": 0.3176470588235294, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.4057971014492754, |
|
"mmlu_eval_accuracy_public_relations": 0.3333333333333333, |
|
"mmlu_eval_accuracy_security_studies": 0.5925925925925926, |
|
"mmlu_eval_accuracy_sociology": 0.7272727272727273, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3888888888888889, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.1521158476722457, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6884, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6951, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6889, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6504, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7216, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6864, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6508, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6698, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7087, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6714, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7352, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7212, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6869, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6961, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7009, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7227, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6833, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7468, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7568, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7271, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"eval_loss": 0.7648921012878418, |
|
"eval_runtime": 185.3553, |
|
"eval_samples_per_second": 5.395, |
|
"eval_steps_per_second": 2.698, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"mmlu_eval_accuracy": 0.4649620294296765, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.45454545454545453, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.5, |
|
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.3125, |
|
"mmlu_eval_accuracy_college_chemistry": 0.25, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.08333333333333333, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.4375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.6521739130434783, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, |
|
"mmlu_eval_accuracy_philosophy": 0.5, |
|
"mmlu_eval_accuracy_prehistory": 0.45714285714285713, |
|
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613, |
|
"mmlu_eval_accuracy_professional_law": 0.3235294117647059, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, |
|
"mmlu_eval_accuracy_public_relations": 0.5, |
|
"mmlu_eval_accuracy_security_studies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_sociology": 0.6818181818181818, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.5, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.3137481814109315, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7144, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7213, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0002, |
|
"loss": 0.685, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6937, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7367, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7221, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.0002, |
|
"loss": 0.717, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6915, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6913, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6552, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7508, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6657, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7466, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7433, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7041, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7001, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6845, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7031, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7454, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7136, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"eval_loss": 0.7639025449752808, |
|
"eval_runtime": 184.2838, |
|
"eval_samples_per_second": 5.426, |
|
"eval_steps_per_second": 2.713, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"mmlu_eval_accuracy": 0.4617285223167345, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.45454545454545453, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.5, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.4375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.10344827586206896, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.5, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.7272727272727273, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, |
|
"mmlu_eval_accuracy_prehistory": 0.4857142857142857, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903, |
|
"mmlu_eval_accuracy_professional_law": 0.32941176470588235, |
|
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644, |
|
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.3333333333333333, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.2059359818152287, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7341, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7175, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6706, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6273, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6064, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6719, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6426, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6111, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6084, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6414, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6305, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6568, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6456, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6124, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6381, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6184, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.588, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6697, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6403, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6339, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"eval_loss": 0.7678287625312805, |
|
"eval_runtime": 184.2482, |
|
"eval_samples_per_second": 5.427, |
|
"eval_steps_per_second": 2.714, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"mmlu_eval_accuracy": 0.46810410139947695, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.25, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.2727272727272727, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.08333333333333333, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.40625, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.6833333333333333, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.6521739130434783, |
|
"mmlu_eval_accuracy_human_sexuality": 0.5833333333333334, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.7272727272727273, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, |
|
"mmlu_eval_accuracy_philosophy": 0.4411764705882353, |
|
"mmlu_eval_accuracy_prehistory": 0.45714285714285713, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194, |
|
"mmlu_eval_accuracy_professional_law": 0.34705882352941175, |
|
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644, |
|
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_sociology": 0.6818181818181818, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.5555555555555556, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.0877072031830994, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6124, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5688, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5921, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6182, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6351, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.0002, |
|
"loss": 0.672, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6648, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6858, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6367, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5801, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6313, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.614, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6164, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6137, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6205, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6495, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6411, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6338, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6278, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6291, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"eval_loss": 0.769086480140686, |
|
"eval_runtime": 184.5863, |
|
"eval_samples_per_second": 5.418, |
|
"eval_steps_per_second": 2.709, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"mmlu_eval_accuracy": 0.46426909704043023, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5625, |
|
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.2727272727272727, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.4, |
|
"mmlu_eval_accuracy_high_school_biology": 0.40625, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6086956521739131, |
|
"mmlu_eval_accuracy_human_sexuality": 0.5, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.7272727272727273, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.5, |
|
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903, |
|
"mmlu_eval_accuracy_professional_law": 0.3235294117647059, |
|
"mmlu_eval_accuracy_professional_medicine": 0.3870967741935484, |
|
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5925925925925926, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.5, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.0945545882764747, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5948, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6622, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6265, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6154, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5703, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6418, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6197, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6295, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6537, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5913, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6146, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6304, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6601, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5797, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6143, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.0002, |
|
"loss": 0.674, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6489, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6867, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6091, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6734, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"eval_loss": 0.7711524367332458, |
|
"eval_runtime": 185.1721, |
|
"eval_samples_per_second": 5.4, |
|
"eval_steps_per_second": 2.7, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"mmlu_eval_accuracy": 0.462289893215491, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5, |
|
"mmlu_eval_accuracy_astronomy": 0.5, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.3125, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.08333333333333333, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.4375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683, |
|
"mmlu_eval_accuracy_formal_logic": 0.35714285714285715, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.40625, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.27906976744186046, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.5, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6521739130434783, |
|
"mmlu_eval_accuracy_human_sexuality": 0.5833333333333334, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, |
|
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, |
|
"mmlu_eval_accuracy_philosophy": 0.5294117647058824, |
|
"mmlu_eval_accuracy_prehistory": 0.45714285714285713, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.3352941176470588, |
|
"mmlu_eval_accuracy_professional_medicine": 0.3870967741935484, |
|
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, |
|
"mmlu_eval_accuracy_public_relations": 0.5, |
|
"mmlu_eval_accuracy_security_studies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_sociology": 0.7272727272727273, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.5, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.2021908294944477, |
|
"step": 3400 |
|
} |
|
], |
|
"max_steps": 5000, |
|
"num_train_epochs": 2, |
|
"total_flos": 7.858095077549261e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|