|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.017956843217142, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5e-08, |
|
"loss": 1.8279, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.5e-08, |
|
"loss": 1.7023, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4500000000000001e-07, |
|
"loss": 1.6779, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.95e-07, |
|
"loss": 1.6776, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.4500000000000004e-07, |
|
"loss": 1.6379, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9500000000000003e-07, |
|
"loss": 1.5523, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.4500000000000003e-07, |
|
"loss": 1.3693, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.9500000000000003e-07, |
|
"loss": 1.3299, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.4500000000000003e-07, |
|
"loss": 1.2452, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.95e-07, |
|
"loss": 1.25, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.450000000000001e-07, |
|
"loss": 1.1376, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.95e-07, |
|
"loss": 1.1166, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.450000000000001e-07, |
|
"loss": 1.1069, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.950000000000001e-07, |
|
"loss": 1.0921, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 7.450000000000001e-07, |
|
"loss": 1.0674, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.950000000000001e-07, |
|
"loss": 1.0283, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 8.450000000000002e-07, |
|
"loss": 1.0252, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.95e-07, |
|
"loss": 0.993, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.450000000000001e-07, |
|
"loss": 0.9808, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.950000000000002e-07, |
|
"loss": 0.9879, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.045e-06, |
|
"loss": 0.9724, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.095e-06, |
|
"loss": 0.9544, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.145e-06, |
|
"loss": 0.9349, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.195e-06, |
|
"loss": 0.9247, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.2450000000000002e-06, |
|
"loss": 0.9251, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.295e-06, |
|
"loss": 0.938, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.3450000000000003e-06, |
|
"loss": 0.8967, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.3950000000000002e-06, |
|
"loss": 0.9132, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.445e-06, |
|
"loss": 0.9081, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.495e-06, |
|
"loss": 0.8873, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.545e-06, |
|
"loss": 0.8978, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.5950000000000002e-06, |
|
"loss": 0.8628, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.6450000000000001e-06, |
|
"loss": 0.8651, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.6950000000000003e-06, |
|
"loss": 0.8499, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.745e-06, |
|
"loss": 0.8514, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.7950000000000002e-06, |
|
"loss": 0.8645, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8450000000000001e-06, |
|
"loss": 0.8632, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.895e-06, |
|
"loss": 0.8382, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.945e-06, |
|
"loss": 0.8531, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9950000000000004e-06, |
|
"loss": 0.8413, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.045e-06, |
|
"loss": 0.8272, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.0950000000000003e-06, |
|
"loss": 0.831, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.1450000000000002e-06, |
|
"loss": 0.8077, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.195e-06, |
|
"loss": 0.8209, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.245e-06, |
|
"loss": 0.8249, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.2950000000000005e-06, |
|
"loss": 0.8037, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.345e-06, |
|
"loss": 0.7976, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.395e-06, |
|
"loss": 0.8046, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.4450000000000003e-06, |
|
"loss": 0.7959, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.4950000000000003e-06, |
|
"loss": 0.8176, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_oasst_export_accuracy": 0.7042471551142071, |
|
"eval_oasst_export_loss": 1.2229081392288208, |
|
"eval_oasst_export_runtime": 68.987, |
|
"eval_oasst_export_samples_per_second": 30.354, |
|
"eval_oasst_export_steps_per_second": 1.276, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_code_alpaca_accuracy": 0.8437448762092147, |
|
"eval_code_alpaca_loss": 0.5453789234161377, |
|
"eval_code_alpaca_runtime": 4.9365, |
|
"eval_code_alpaca_samples_per_second": 50.643, |
|
"eval_code_alpaca_steps_per_second": 2.228, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_evol_v2_accuracy": 0.7902228855879827, |
|
"eval_evol_v2_loss": 0.7061598300933838, |
|
"eval_evol_v2_runtime": 275.1097, |
|
"eval_evol_v2_samples_per_second": 25.99, |
|
"eval_evol_v2_steps_per_second": 1.083, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_nlu_instruct_accuracy": 0.7843999034325105, |
|
"eval_nlu_instruct_loss": 0.757986843585968, |
|
"eval_nlu_instruct_runtime": 3477.7847, |
|
"eval_nlu_instruct_samples_per_second": 22.435, |
|
"eval_nlu_instruct_steps_per_second": 0.935, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_grade_school_math_instructions_accuracy": 0.7889496290511518, |
|
"eval_grade_school_math_instructions_loss": 0.7259232997894287, |
|
"eval_grade_school_math_instructions_runtime": 10.7804, |
|
"eval_grade_school_math_instructions_samples_per_second": 40.815, |
|
"eval_grade_school_math_instructions_steps_per_second": 1.762, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_poem_instructions_accuracy": 0.49874821391497626, |
|
"eval_poem_instructions_loss": 2.590395450592041, |
|
"eval_poem_instructions_runtime": 21.1843, |
|
"eval_poem_instructions_samples_per_second": 16.38, |
|
"eval_poem_instructions_steps_per_second": 0.708, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_gpt4all_accuracy": 0.7821919414875455, |
|
"eval_gpt4all_loss": 0.7756699323654175, |
|
"eval_gpt4all_runtime": 3565.0056, |
|
"eval_gpt4all_samples_per_second": 21.813, |
|
"eval_gpt4all_steps_per_second": 0.909, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_joke_accuracy": 0.5321712147485625, |
|
"eval_joke_loss": 1.970703125, |
|
"eval_joke_runtime": 5.8841, |
|
"eval_joke_samples_per_second": 12.916, |
|
"eval_joke_steps_per_second": 0.68, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_gsm8k_accuracy": 0.8398790847611236, |
|
"eval_gsm8k_loss": 0.5526099801063538, |
|
"eval_gsm8k_runtime": 22.7523, |
|
"eval_gsm8k_samples_per_second": 57.972, |
|
"eval_gsm8k_steps_per_second": 2.417, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.545e-06, |
|
"loss": 0.8266, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.595e-06, |
|
"loss": 0.8252, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.6450000000000005e-06, |
|
"loss": 0.806, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.6950000000000005e-06, |
|
"loss": 0.7994, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.7450000000000004e-06, |
|
"loss": 0.8062, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7950000000000003e-06, |
|
"loss": 0.8086, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.845e-06, |
|
"loss": 0.8055, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.8950000000000002e-06, |
|
"loss": 0.7825, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.945e-06, |
|
"loss": 0.796, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.995e-06, |
|
"loss": 0.7974, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.045e-06, |
|
"loss": 0.8071, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.0950000000000004e-06, |
|
"loss": 0.7967, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.1450000000000004e-06, |
|
"loss": 0.778, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.1950000000000003e-06, |
|
"loss": 0.7869, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.2450000000000003e-06, |
|
"loss": 0.7828, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.2950000000000002e-06, |
|
"loss": 0.7927, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.3450000000000006e-06, |
|
"loss": 0.7709, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.3950000000000005e-06, |
|
"loss": 0.7624, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.445e-06, |
|
"loss": 0.7739, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.495e-06, |
|
"loss": 0.77, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.545e-06, |
|
"loss": 0.7814, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.5950000000000003e-06, |
|
"loss": 0.7822, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.6450000000000003e-06, |
|
"loss": 0.7816, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.695e-06, |
|
"loss": 0.7935, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.745e-06, |
|
"loss": 0.7738, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.7950000000000005e-06, |
|
"loss": 0.7585, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.8450000000000005e-06, |
|
"loss": 0.7776, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.895000000000001e-06, |
|
"loss": 0.7681, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.945e-06, |
|
"loss": 0.7836, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.995000000000001e-06, |
|
"loss": 0.7687, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.045e-06, |
|
"loss": 0.7852, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.095e-06, |
|
"loss": 0.753, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.145e-06, |
|
"loss": 0.7676, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1950000000000005e-06, |
|
"loss": 0.7626, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.245e-06, |
|
"loss": 0.7578, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.295e-06, |
|
"loss": 0.7743, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.345000000000001e-06, |
|
"loss": 0.7662, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.395e-06, |
|
"loss": 0.7709, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.445000000000001e-06, |
|
"loss": 0.7638, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.495e-06, |
|
"loss": 0.752, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.5450000000000005e-06, |
|
"loss": 0.7744, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.595000000000001e-06, |
|
"loss": 0.7607, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.645e-06, |
|
"loss": 0.765, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.695e-06, |
|
"loss": 0.7534, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.745e-06, |
|
"loss": 0.7569, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.795e-06, |
|
"loss": 0.7522, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.845e-06, |
|
"loss": 0.7876, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.8950000000000006e-06, |
|
"loss": 0.7586, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.945e-06, |
|
"loss": 0.7658, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.9950000000000005e-06, |
|
"loss": 0.7562, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_oasst_export_accuracy": 0.7149679693032294, |
|
"eval_oasst_export_loss": 1.1956202983856201, |
|
"eval_oasst_export_runtime": 67.0497, |
|
"eval_oasst_export_samples_per_second": 31.231, |
|
"eval_oasst_export_steps_per_second": 1.312, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_code_alpaca_accuracy": 0.8463136033229491, |
|
"eval_code_alpaca_loss": 0.5398945212364197, |
|
"eval_code_alpaca_runtime": 6.22, |
|
"eval_code_alpaca_samples_per_second": 40.193, |
|
"eval_code_alpaca_steps_per_second": 1.769, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_evol_v2_accuracy": 0.7982416515833893, |
|
"eval_evol_v2_loss": 0.6806697845458984, |
|
"eval_evol_v2_runtime": 274.9092, |
|
"eval_evol_v2_samples_per_second": 26.009, |
|
"eval_evol_v2_steps_per_second": 1.084, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_nlu_instruct_accuracy": 0.8084633890177162, |
|
"eval_nlu_instruct_loss": 0.6906304955482483, |
|
"eval_nlu_instruct_runtime": 3476.832, |
|
"eval_nlu_instruct_samples_per_second": 22.441, |
|
"eval_nlu_instruct_steps_per_second": 0.935, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_grade_school_math_instructions_accuracy": 0.8071065989847716, |
|
"eval_grade_school_math_instructions_loss": 0.6586337089538574, |
|
"eval_grade_school_math_instructions_runtime": 9.647, |
|
"eval_grade_school_math_instructions_samples_per_second": 45.61, |
|
"eval_grade_school_math_instructions_steps_per_second": 1.97, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_poem_instructions_accuracy": 0.506542535644907, |
|
"eval_poem_instructions_loss": 2.5567362308502197, |
|
"eval_poem_instructions_runtime": 21.1754, |
|
"eval_poem_instructions_samples_per_second": 16.387, |
|
"eval_poem_instructions_steps_per_second": 0.708, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_gpt4all_accuracy": 0.7917010462623065, |
|
"eval_gpt4all_loss": 0.7404432892799377, |
|
"eval_gpt4all_runtime": 3565.553, |
|
"eval_gpt4all_samples_per_second": 21.809, |
|
"eval_gpt4all_steps_per_second": 0.909, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_joke_accuracy": 0.5601898329834809, |
|
"eval_joke_loss": 1.8026316165924072, |
|
"eval_joke_runtime": 4.4938, |
|
"eval_joke_samples_per_second": 16.912, |
|
"eval_joke_steps_per_second": 0.89, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_gsm8k_accuracy": 0.8582343392395189, |
|
"eval_gsm8k_loss": 0.4936060607433319, |
|
"eval_gsm8k_runtime": 22.7991, |
|
"eval_gsm8k_samples_per_second": 57.853, |
|
"eval_gsm8k_steps_per_second": 2.412, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.045e-06, |
|
"loss": 0.7579, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.095e-06, |
|
"loss": 0.7684, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.145e-06, |
|
"loss": 0.7464, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.195e-06, |
|
"loss": 0.7691, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.245e-06, |
|
"loss": 0.7385, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.295e-06, |
|
"loss": 0.7704, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.3450000000000005e-06, |
|
"loss": 0.7735, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.395e-06, |
|
"loss": 0.7448, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.445e-06, |
|
"loss": 0.7358, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.495000000000001e-06, |
|
"loss": 0.762, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.545e-06, |
|
"loss": 0.7636, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.595000000000001e-06, |
|
"loss": 0.7662, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.645e-06, |
|
"loss": 0.7565, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.6950000000000005e-06, |
|
"loss": 0.7564, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.745000000000001e-06, |
|
"loss": 0.7576, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.795e-06, |
|
"loss": 0.7588, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.845000000000001e-06, |
|
"loss": 0.7503, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.895e-06, |
|
"loss": 0.7579, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.945000000000001e-06, |
|
"loss": 0.772, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.995000000000001e-06, |
|
"loss": 0.7507, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.0450000000000006e-06, |
|
"loss": 0.7448, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.095000000000001e-06, |
|
"loss": 0.7479, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.145000000000001e-06, |
|
"loss": 0.7502, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.195000000000001e-06, |
|
"loss": 0.7449, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.245000000000001e-06, |
|
"loss": 0.7491, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.295e-06, |
|
"loss": 0.7561, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.345e-06, |
|
"loss": 0.7581, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.395e-06, |
|
"loss": 0.7544, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.445e-06, |
|
"loss": 0.7429, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.4950000000000005e-06, |
|
"loss": 0.7678, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.545e-06, |
|
"loss": 0.7655, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.595e-06, |
|
"loss": 0.7695, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.645000000000001e-06, |
|
"loss": 0.7645, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.695e-06, |
|
"loss": 0.7628, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.745000000000001e-06, |
|
"loss": 0.7597, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.795e-06, |
|
"loss": 0.7687, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.8450000000000005e-06, |
|
"loss": 0.7488, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.895000000000001e-06, |
|
"loss": 0.7454, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.945e-06, |
|
"loss": 0.7459, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.995000000000001e-06, |
|
"loss": 0.7595, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.045e-06, |
|
"loss": 0.762, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.095000000000001e-06, |
|
"loss": 0.7618, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.145000000000001e-06, |
|
"loss": 0.737, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.1950000000000006e-06, |
|
"loss": 0.7492, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.245000000000001e-06, |
|
"loss": 0.7617, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.295000000000001e-06, |
|
"loss": 0.7564, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.345000000000001e-06, |
|
"loss": 0.7612, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.395000000000001e-06, |
|
"loss": 0.7506, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.445000000000001e-06, |
|
"loss": 0.7555, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 7.495000000000001e-06, |
|
"loss": 0.7528, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_oasst_export_accuracy": 0.7145207936670809, |
|
"eval_oasst_export_loss": 1.20199453830719, |
|
"eval_oasst_export_runtime": 68.1714, |
|
"eval_oasst_export_samples_per_second": 30.717, |
|
"eval_oasst_export_steps_per_second": 1.291, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_code_alpaca_accuracy": 0.8416680330108761, |
|
"eval_code_alpaca_loss": 0.5439140796661377, |
|
"eval_code_alpaca_runtime": 5.1754, |
|
"eval_code_alpaca_samples_per_second": 48.306, |
|
"eval_code_alpaca_steps_per_second": 2.125, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_evol_v2_accuracy": 0.7973383052991362, |
|
"eval_evol_v2_loss": 0.6886976957321167, |
|
"eval_evol_v2_runtime": 275.0257, |
|
"eval_evol_v2_samples_per_second": 25.998, |
|
"eval_evol_v2_steps_per_second": 1.084, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_nlu_instruct_accuracy": 0.8144291416562309, |
|
"eval_nlu_instruct_loss": 0.6783603429794312, |
|
"eval_nlu_instruct_runtime": 3480.2092, |
|
"eval_nlu_instruct_samples_per_second": 22.419, |
|
"eval_nlu_instruct_steps_per_second": 0.934, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_grade_school_math_instructions_accuracy": 0.8132565404139008, |
|
"eval_grade_school_math_instructions_loss": 0.6372869610786438, |
|
"eval_grade_school_math_instructions_runtime": 8.0235, |
|
"eval_grade_school_math_instructions_samples_per_second": 54.839, |
|
"eval_grade_school_math_instructions_steps_per_second": 2.368, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_poem_instructions_accuracy": 0.5055101467336193, |
|
"eval_poem_instructions_loss": 2.5727665424346924, |
|
"eval_poem_instructions_runtime": 21.4941, |
|
"eval_poem_instructions_samples_per_second": 16.144, |
|
"eval_poem_instructions_steps_per_second": 0.698, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_gpt4all_accuracy": 0.791444802483569, |
|
"eval_gpt4all_loss": 0.7454394698143005, |
|
"eval_gpt4all_runtime": 3568.858, |
|
"eval_gpt4all_samples_per_second": 21.789, |
|
"eval_gpt4all_steps_per_second": 0.908, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_joke_accuracy": 0.5673085698640139, |
|
"eval_joke_loss": 1.7545230388641357, |
|
"eval_joke_runtime": 2.3532, |
|
"eval_joke_samples_per_second": 32.296, |
|
"eval_joke_steps_per_second": 1.7, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_gsm8k_accuracy": 0.8628002087910197, |
|
"eval_gsm8k_loss": 0.47572216391563416, |
|
"eval_gsm8k_runtime": 22.6914, |
|
"eval_gsm8k_samples_per_second": 58.128, |
|
"eval_gsm8k_steps_per_second": 2.424, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 7.545e-06, |
|
"loss": 0.7567, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.595e-06, |
|
"loss": 0.7925, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.645e-06, |
|
"loss": 0.7563, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 7.695e-06, |
|
"loss": 0.7566, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 7.745e-06, |
|
"loss": 0.7523, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 7.795e-06, |
|
"loss": 0.749, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.845e-06, |
|
"loss": 0.7668, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.895e-06, |
|
"loss": 0.7706, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.945000000000001e-06, |
|
"loss": 0.7329, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.995e-06, |
|
"loss": 0.7679, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 8.045e-06, |
|
"loss": 0.752, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.095000000000001e-06, |
|
"loss": 0.7768, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.145e-06, |
|
"loss": 0.7673, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.195e-06, |
|
"loss": 0.7474, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.245000000000002e-06, |
|
"loss": 0.7743, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.295000000000001e-06, |
|
"loss": 0.7491, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 8.345e-06, |
|
"loss": 0.7182, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 8.395e-06, |
|
"loss": 0.7146, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 8.445000000000001e-06, |
|
"loss": 0.702, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 8.495e-06, |
|
"loss": 0.716, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 8.545e-06, |
|
"loss": 0.6966, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 8.595000000000002e-06, |
|
"loss": 0.7042, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 8.645000000000001e-06, |
|
"loss": 0.7243, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 8.695e-06, |
|
"loss": 0.7169, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 8.745000000000002e-06, |
|
"loss": 0.7089, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 8.795e-06, |
|
"loss": 0.706, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.845000000000001e-06, |
|
"loss": 0.7201, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.895e-06, |
|
"loss": 0.7038, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.945e-06, |
|
"loss": 0.7261, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.995000000000001e-06, |
|
"loss": 0.7032, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.045e-06, |
|
"loss": 0.725, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.095e-06, |
|
"loss": 0.7157, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.145000000000001e-06, |
|
"loss": 0.7259, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.195000000000001e-06, |
|
"loss": 0.7287, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.245e-06, |
|
"loss": 0.7039, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.295e-06, |
|
"loss": 0.7186, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.345000000000001e-06, |
|
"loss": 0.7132, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.395e-06, |
|
"loss": 0.7276, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.445e-06, |
|
"loss": 0.7048, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.495000000000001e-06, |
|
"loss": 0.7223, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.545000000000001e-06, |
|
"loss": 0.721, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.595e-06, |
|
"loss": 0.717, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.645000000000002e-06, |
|
"loss": 0.7276, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.695000000000001e-06, |
|
"loss": 0.7248, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.745e-06, |
|
"loss": 0.7404, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.795000000000002e-06, |
|
"loss": 0.7055, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.845000000000001e-06, |
|
"loss": 0.7278, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.895000000000001e-06, |
|
"loss": 0.7386, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.945e-06, |
|
"loss": 0.7316, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.995000000000002e-06, |
|
"loss": 0.7317, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_oasst_export_accuracy": 0.7127776954366549, |
|
"eval_oasst_export_loss": 1.2178658246994019, |
|
"eval_oasst_export_runtime": 68.1598, |
|
"eval_oasst_export_samples_per_second": 30.722, |
|
"eval_oasst_export_steps_per_second": 1.291, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_code_alpaca_accuracy": 0.8417226867792534, |
|
"eval_code_alpaca_loss": 0.5532773733139038, |
|
"eval_code_alpaca_runtime": 5.2964, |
|
"eval_code_alpaca_samples_per_second": 47.202, |
|
"eval_code_alpaca_steps_per_second": 2.077, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_evol_v2_accuracy": 0.7965186099357738, |
|
"eval_evol_v2_loss": 0.693313717842102, |
|
"eval_evol_v2_runtime": 274.7294, |
|
"eval_evol_v2_samples_per_second": 26.026, |
|
"eval_evol_v2_steps_per_second": 1.085, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_nlu_instruct_accuracy": 0.8156692420335936, |
|
"eval_nlu_instruct_loss": 0.6785455942153931, |
|
"eval_nlu_instruct_runtime": 3477.3559, |
|
"eval_nlu_instruct_samples_per_second": 22.437, |
|
"eval_nlu_instruct_steps_per_second": 0.935, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_grade_school_math_instructions_accuracy": 0.8186011323701678, |
|
"eval_grade_school_math_instructions_loss": 0.6188520789146423, |
|
"eval_grade_school_math_instructions_runtime": 10.016, |
|
"eval_grade_school_math_instructions_samples_per_second": 43.93, |
|
"eval_grade_school_math_instructions_steps_per_second": 1.897, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_poem_instructions_accuracy": 0.5050254631608515, |
|
"eval_poem_instructions_loss": 2.5899674892425537, |
|
"eval_poem_instructions_runtime": 21.1807, |
|
"eval_poem_instructions_samples_per_second": 16.383, |
|
"eval_poem_instructions_steps_per_second": 0.708, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_gpt4all_accuracy": 0.7903227041985094, |
|
"eval_gpt4all_loss": 0.7531591057777405, |
|
"eval_gpt4all_runtime": 3566.1745, |
|
"eval_gpt4all_samples_per_second": 21.805, |
|
"eval_gpt4all_steps_per_second": 0.909, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_joke_accuracy": 0.572693255453135, |
|
"eval_joke_loss": 1.7435238361358643, |
|
"eval_joke_runtime": 4.873, |
|
"eval_joke_samples_per_second": 15.596, |
|
"eval_joke_steps_per_second": 0.821, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_gsm8k_accuracy": 0.8679970402152154, |
|
"eval_gsm8k_loss": 0.4584772288799286, |
|
"eval_gsm8k_runtime": 22.6037, |
|
"eval_gsm8k_samples_per_second": 58.353, |
|
"eval_gsm8k_steps_per_second": 2.433, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.985668789808918e-06, |
|
"loss": 0.7147, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.969745222929938e-06, |
|
"loss": 0.7078, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.953821656050957e-06, |
|
"loss": 0.7232, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.937898089171976e-06, |
|
"loss": 0.7238, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.921974522292994e-06, |
|
"loss": 0.7266, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.906050955414014e-06, |
|
"loss": 0.7208, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.890127388535032e-06, |
|
"loss": 0.7311, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.874203821656053e-06, |
|
"loss": 0.7219, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.85828025477707e-06, |
|
"loss": 0.7223, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.84235668789809e-06, |
|
"loss": 0.7251, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.826433121019109e-06, |
|
"loss": 0.7338, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.810509554140128e-06, |
|
"loss": 0.7347, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.794585987261147e-06, |
|
"loss": 0.7079, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.778662420382167e-06, |
|
"loss": 0.7394, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.762738853503186e-06, |
|
"loss": 0.7344, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.746815286624205e-06, |
|
"loss": 0.7289, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.730891719745224e-06, |
|
"loss": 0.7137, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.714968152866243e-06, |
|
"loss": 0.7208, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.699044585987261e-06, |
|
"loss": 0.7449, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.683121019108282e-06, |
|
"loss": 0.7258, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.6671974522293e-06, |
|
"loss": 0.7285, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.65127388535032e-06, |
|
"loss": 0.7364, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.635350318471338e-06, |
|
"loss": 0.726, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.619426751592357e-06, |
|
"loss": 0.7426, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.603503184713376e-06, |
|
"loss": 0.7385, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.587579617834396e-06, |
|
"loss": 0.7227, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.571656050955415e-06, |
|
"loss": 0.7336, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.555732484076434e-06, |
|
"loss": 0.7056, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.539808917197453e-06, |
|
"loss": 0.7305, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.523885350318473e-06, |
|
"loss": 0.7506, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.507961783439492e-06, |
|
"loss": 0.7413, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.492038216560511e-06, |
|
"loss": 0.7101, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 9.476114649681529e-06, |
|
"loss": 0.7361, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 9.46019108280255e-06, |
|
"loss": 0.7393, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.444267515923567e-06, |
|
"loss": 0.7135, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.428343949044588e-06, |
|
"loss": 0.7115, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.412420382165605e-06, |
|
"loss": 0.7262, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.396496815286625e-06, |
|
"loss": 0.7393, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.380573248407644e-06, |
|
"loss": 0.7193, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.364649681528663e-06, |
|
"loss": 0.7258, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.348726114649682e-06, |
|
"loss": 0.7296, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.332802547770702e-06, |
|
"loss": 0.7259, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.31687898089172e-06, |
|
"loss": 0.7297, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.30095541401274e-06, |
|
"loss": 0.7239, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 9.28503184713376e-06, |
|
"loss": 0.718, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 9.269108280254778e-06, |
|
"loss": 0.7239, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.253184713375796e-06, |
|
"loss": 0.7307, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.237261146496817e-06, |
|
"loss": 0.7294, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.221337579617834e-06, |
|
"loss": 0.7326, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.205414012738855e-06, |
|
"loss": 0.7295, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_oasst_export_accuracy": 0.7117135947727322, |
|
"eval_oasst_export_loss": 1.234184741973877, |
|
"eval_oasst_export_runtime": 68.1505, |
|
"eval_oasst_export_samples_per_second": 30.726, |
|
"eval_oasst_export_steps_per_second": 1.291, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_code_alpaca_accuracy": 0.8403563425698202, |
|
"eval_code_alpaca_loss": 0.557296872138977, |
|
"eval_code_alpaca_runtime": 4.9636, |
|
"eval_code_alpaca_samples_per_second": 50.367, |
|
"eval_code_alpaca_steps_per_second": 2.216, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_evol_v2_accuracy": 0.7950730371609088, |
|
"eval_evol_v2_loss": 0.70171058177948, |
|
"eval_evol_v2_runtime": 275.0985, |
|
"eval_evol_v2_samples_per_second": 25.991, |
|
"eval_evol_v2_steps_per_second": 1.083, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_nlu_instruct_accuracy": 0.8178751700867107, |
|
"eval_nlu_instruct_loss": 0.677690327167511, |
|
"eval_nlu_instruct_runtime": 3484.6437, |
|
"eval_nlu_instruct_samples_per_second": 22.391, |
|
"eval_nlu_instruct_steps_per_second": 0.933, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_grade_school_math_instructions_accuracy": 0.8254099960952753, |
|
"eval_grade_school_math_instructions_loss": 0.5937632918357849, |
|
"eval_grade_school_math_instructions_runtime": 8.3622, |
|
"eval_grade_school_math_instructions_samples_per_second": 52.618, |
|
"eval_grade_school_math_instructions_steps_per_second": 2.272, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_poem_instructions_accuracy": 0.5039080150947083, |
|
"eval_poem_instructions_loss": 2.603971481323242, |
|
"eval_poem_instructions_runtime": 21.2045, |
|
"eval_poem_instructions_samples_per_second": 16.364, |
|
"eval_poem_instructions_steps_per_second": 0.707, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_gpt4all_accuracy": 0.7901557992616548, |
|
"eval_gpt4all_loss": 0.7601524591445923, |
|
"eval_gpt4all_runtime": 3565.0345, |
|
"eval_gpt4all_samples_per_second": 21.812, |
|
"eval_gpt4all_steps_per_second": 0.909, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_joke_accuracy": 0.5776216117550425, |
|
"eval_joke_loss": 1.7364308834075928, |
|
"eval_joke_runtime": 9.9885, |
|
"eval_joke_samples_per_second": 7.609, |
|
"eval_joke_steps_per_second": 0.4, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_gsm8k_accuracy": 0.8719721000131928, |
|
"eval_gsm8k_loss": 0.4444270133972168, |
|
"eval_gsm8k_runtime": 22.8112, |
|
"eval_gsm8k_samples_per_second": 57.822, |
|
"eval_gsm8k_steps_per_second": 2.411, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 9.189490445859873e-06, |
|
"loss": 0.7294, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 9.173566878980892e-06, |
|
"loss": 0.7326, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.157643312101911e-06, |
|
"loss": 0.7375, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.14171974522293e-06, |
|
"loss": 0.7289, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.12579617834395e-06, |
|
"loss": 0.7277, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.109872611464969e-06, |
|
"loss": 0.7215, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.093949044585988e-06, |
|
"loss": 0.7397, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.078025477707008e-06, |
|
"loss": 0.7378, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.062101910828027e-06, |
|
"loss": 0.7287, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.046178343949046e-06, |
|
"loss": 0.7254, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.030254777070064e-06, |
|
"loss": 0.7291, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.014331210191084e-06, |
|
"loss": 0.7239, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 8.998407643312102e-06, |
|
"loss": 0.7285, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 8.982484076433123e-06, |
|
"loss": 0.7217, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 8.96656050955414e-06, |
|
"loss": 0.7123, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.95063694267516e-06, |
|
"loss": 0.7262, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.934713375796179e-06, |
|
"loss": 0.7311, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.918789808917198e-06, |
|
"loss": 0.7249, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.902866242038217e-06, |
|
"loss": 0.7244, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.886942675159237e-06, |
|
"loss": 0.7417, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.871019108280256e-06, |
|
"loss": 0.7314, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.855095541401275e-06, |
|
"loss": 0.7499, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.839171974522294e-06, |
|
"loss": 0.7364, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.823248407643313e-06, |
|
"loss": 0.7433, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.807324840764333e-06, |
|
"loss": 0.7288, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.791401273885352e-06, |
|
"loss": 0.7286, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.77547770700637e-06, |
|
"loss": 0.7229, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.75955414012739e-06, |
|
"loss": 0.7434, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.743630573248408e-06, |
|
"loss": 0.7283, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.727707006369427e-06, |
|
"loss": 0.7304, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.711783439490446e-06, |
|
"loss": 0.7212, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.695859872611466e-06, |
|
"loss": 0.749, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.679936305732485e-06, |
|
"loss": 0.7436, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.664012738853504e-06, |
|
"loss": 0.7186, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.648089171974523e-06, |
|
"loss": 0.7285, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.63216560509554e-06, |
|
"loss": 0.7354, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.616242038216562e-06, |
|
"loss": 0.7232, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.60031847133758e-06, |
|
"loss": 0.7225, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.5843949044586e-06, |
|
"loss": 0.7235, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.568471337579618e-06, |
|
"loss": 0.746, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.552547770700637e-06, |
|
"loss": 0.74, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.536624203821656e-06, |
|
"loss": 0.7431, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.520700636942675e-06, |
|
"loss": 0.7353, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.504777070063695e-06, |
|
"loss": 0.7127, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.488853503184714e-06, |
|
"loss": 0.7359, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.472929936305733e-06, |
|
"loss": 0.7182, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.457006369426752e-06, |
|
"loss": 0.7232, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.441082802547772e-06, |
|
"loss": 0.7422, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.42515923566879e-06, |
|
"loss": 0.7237, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.409235668789808e-06, |
|
"loss": 0.7376, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_oasst_export_accuracy": 0.7091762214038781, |
|
"eval_oasst_export_loss": 1.2443393468856812, |
|
"eval_oasst_export_runtime": 67.1263, |
|
"eval_oasst_export_samples_per_second": 31.195, |
|
"eval_oasst_export_steps_per_second": 1.311, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_code_alpaca_accuracy": 0.8380608842979723, |
|
"eval_code_alpaca_loss": 0.5666581988334656, |
|
"eval_code_alpaca_runtime": 6.0038, |
|
"eval_code_alpaca_samples_per_second": 41.641, |
|
"eval_code_alpaca_steps_per_second": 1.832, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_evol_v2_accuracy": 0.7950646930701502, |
|
"eval_evol_v2_loss": 0.7063774466514587, |
|
"eval_evol_v2_runtime": 274.2256, |
|
"eval_evol_v2_samples_per_second": 26.073, |
|
"eval_evol_v2_steps_per_second": 1.087, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_nlu_instruct_accuracy": 0.8185144938579312, |
|
"eval_nlu_instruct_loss": 0.6780356764793396, |
|
"eval_nlu_instruct_runtime": 3479.7549, |
|
"eval_nlu_instruct_samples_per_second": 22.422, |
|
"eval_nlu_instruct_steps_per_second": 0.934, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_grade_school_math_instructions_accuracy": 0.8272403358063256, |
|
"eval_grade_school_math_instructions_loss": 0.5888405442237854, |
|
"eval_grade_school_math_instructions_runtime": 12.6644, |
|
"eval_grade_school_math_instructions_samples_per_second": 34.743, |
|
"eval_grade_school_math_instructions_steps_per_second": 1.5, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_poem_instructions_accuracy": 0.5025932057398731, |
|
"eval_poem_instructions_loss": 2.6240994930267334, |
|
"eval_poem_instructions_runtime": 21.1971, |
|
"eval_poem_instructions_samples_per_second": 16.37, |
|
"eval_poem_instructions_steps_per_second": 0.708, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_gpt4all_accuracy": 0.7898066834593447, |
|
"eval_gpt4all_loss": 0.7626320719718933, |
|
"eval_gpt4all_runtime": 3567.7466, |
|
"eval_gpt4all_samples_per_second": 21.796, |
|
"eval_gpt4all_steps_per_second": 0.908, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_joke_accuracy": 0.5721456603084786, |
|
"eval_joke_loss": 1.7529810667037964, |
|
"eval_joke_runtime": 5.6117, |
|
"eval_joke_samples_per_second": 13.543, |
|
"eval_joke_steps_per_second": 0.713, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_gsm8k_accuracy": 0.8744156432656293, |
|
"eval_gsm8k_loss": 0.43758735060691833, |
|
"eval_gsm8k_runtime": 22.8872, |
|
"eval_gsm8k_samples_per_second": 57.63, |
|
"eval_gsm8k_steps_per_second": 2.403, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.39331210191083e-06, |
|
"loss": 0.7289, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.377388535031847e-06, |
|
"loss": 0.7262, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.361464968152868e-06, |
|
"loss": 0.759, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.345541401273885e-06, |
|
"loss": 0.7493, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.329617834394904e-06, |
|
"loss": 0.7508, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.313694267515924e-06, |
|
"loss": 0.7269, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.297770700636943e-06, |
|
"loss": 0.7263, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.281847133757962e-06, |
|
"loss": 0.7285, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 8.265923566878981e-06, |
|
"loss": 0.7372, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 8.25e-06, |
|
"loss": 0.7289, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 8.23407643312102e-06, |
|
"loss": 0.7393, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 8.218152866242039e-06, |
|
"loss": 0.744, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 8.202229299363058e-06, |
|
"loss": 0.7276, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 8.186305732484076e-06, |
|
"loss": 0.7264, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 8.170382165605097e-06, |
|
"loss": 0.7404, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.154458598726114e-06, |
|
"loss": 0.7281, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.138535031847135e-06, |
|
"loss": 0.7194, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.122611464968153e-06, |
|
"loss": 0.7375, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 8.106687898089172e-06, |
|
"loss": 0.7382, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 8.090764331210191e-06, |
|
"loss": 0.743, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.07484076433121e-06, |
|
"loss": 0.735, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.05891719745223e-06, |
|
"loss": 0.7384, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 8.042993630573249e-06, |
|
"loss": 0.7263, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.027070063694268e-06, |
|
"loss": 0.7501, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.011146496815287e-06, |
|
"loss": 0.7335, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 7.995222929936307e-06, |
|
"loss": 0.7409, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 7.979299363057326e-06, |
|
"loss": 0.7332, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.963375796178345e-06, |
|
"loss": 0.7342, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 7.947452229299364e-06, |
|
"loss": 0.7325, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 7.931528662420382e-06, |
|
"loss": 0.7248, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 7.915605095541403e-06, |
|
"loss": 0.7299, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 7.89968152866242e-06, |
|
"loss": 0.6739, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 7.88375796178344e-06, |
|
"loss": 0.6283, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 7.867834394904459e-06, |
|
"loss": 0.6366, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 7.851910828025478e-06, |
|
"loss": 0.6461, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 7.835987261146497e-06, |
|
"loss": 0.6455, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 7.820063694267516e-06, |
|
"loss": 0.6126, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 7.804140127388536e-06, |
|
"loss": 0.6255, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 7.788216560509555e-06, |
|
"loss": 0.6493, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 7.772292993630574e-06, |
|
"loss": 0.6175, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 7.756369426751593e-06, |
|
"loss": 0.6309, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 7.740445859872613e-06, |
|
"loss": 0.6424, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 7.724522292993632e-06, |
|
"loss": 0.6225, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 7.70859872611465e-06, |
|
"loss": 0.63, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 7.69267515923567e-06, |
|
"loss": 0.6237, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 7.676751592356688e-06, |
|
"loss": 0.6341, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 7.660828025477709e-06, |
|
"loss": 0.6243, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 7.644904458598726e-06, |
|
"loss": 0.635, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 7.628980891719746e-06, |
|
"loss": 0.6387, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 7.613057324840765e-06, |
|
"loss": 0.635, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_oasst_export_accuracy": 0.7073963863648168, |
|
"eval_oasst_export_loss": 1.2838494777679443, |
|
"eval_oasst_export_runtime": 68.1304, |
|
"eval_oasst_export_samples_per_second": 30.735, |
|
"eval_oasst_export_steps_per_second": 1.292, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_code_alpaca_accuracy": 0.8387713832868776, |
|
"eval_code_alpaca_loss": 0.5747187733650208, |
|
"eval_code_alpaca_runtime": 4.9938, |
|
"eval_code_alpaca_samples_per_second": 50.062, |
|
"eval_code_alpaca_steps_per_second": 2.203, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_evol_v2_accuracy": 0.7953503870352282, |
|
"eval_evol_v2_loss": 0.7144909501075745, |
|
"eval_evol_v2_runtime": 274.878, |
|
"eval_evol_v2_samples_per_second": 26.012, |
|
"eval_evol_v2_steps_per_second": 1.084, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_nlu_instruct_accuracy": 0.82024077537588, |
|
"eval_nlu_instruct_loss": 0.6822787523269653, |
|
"eval_nlu_instruct_runtime": 3475.2318, |
|
"eval_nlu_instruct_samples_per_second": 22.451, |
|
"eval_nlu_instruct_steps_per_second": 0.935, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_grade_school_math_instructions_accuracy": 0.8345128855915658, |
|
"eval_grade_school_math_instructions_loss": 0.5768687725067139, |
|
"eval_grade_school_math_instructions_runtime": 11.7518, |
|
"eval_grade_school_math_instructions_samples_per_second": 37.441, |
|
"eval_grade_school_math_instructions_steps_per_second": 1.617, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_poem_instructions_accuracy": 0.5003298676864057, |
|
"eval_poem_instructions_loss": 2.657353162765503, |
|
"eval_poem_instructions_runtime": 21.1728, |
|
"eval_poem_instructions_samples_per_second": 16.389, |
|
"eval_poem_instructions_steps_per_second": 0.708, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_gpt4all_accuracy": 0.7895927579325352, |
|
"eval_gpt4all_loss": 0.7741940021514893, |
|
"eval_gpt4all_runtime": 3567.353, |
|
"eval_gpt4all_samples_per_second": 21.798, |
|
"eval_gpt4all_steps_per_second": 0.909, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_joke_accuracy": 0.569955279729853, |
|
"eval_joke_loss": 1.8059210777282715, |
|
"eval_joke_runtime": 3.9667, |
|
"eval_joke_samples_per_second": 19.159, |
|
"eval_joke_steps_per_second": 1.008, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_gsm8k_accuracy": 0.8798132352856823, |
|
"eval_gsm8k_loss": 0.4235023558139801, |
|
"eval_gsm8k_runtime": 22.6764, |
|
"eval_gsm8k_samples_per_second": 58.166, |
|
"eval_gsm8k_steps_per_second": 2.425, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 7.597133757961784e-06, |
|
"loss": 0.6345, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 7.581210191082803e-06, |
|
"loss": 0.6488, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 7.565286624203822e-06, |
|
"loss": 0.6461, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 7.549363057324841e-06, |
|
"loss": 0.6517, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 7.533439490445861e-06, |
|
"loss": 0.63, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 7.517515923566879e-06, |
|
"loss": 0.6214, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 7.501592356687899e-06, |
|
"loss": 0.6433, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 7.485668789808918e-06, |
|
"loss": 0.6228, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 7.469745222929937e-06, |
|
"loss": 0.631, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 7.453821656050956e-06, |
|
"loss": 0.6441, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 7.437898089171975e-06, |
|
"loss": 0.6388, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 7.421974522292994e-06, |
|
"loss": 0.6343, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 7.406050955414014e-06, |
|
"loss": 0.615, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 7.390127388535032e-06, |
|
"loss": 0.6496, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 7.374203821656052e-06, |
|
"loss": 0.6316, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 7.358280254777071e-06, |
|
"loss": 0.635, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.34235668789809e-06, |
|
"loss": 0.6481, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.326433121019108e-06, |
|
"loss": 0.6395, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.310509554140128e-06, |
|
"loss": 0.6349, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.294585987261147e-06, |
|
"loss": 0.6231, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 7.278662420382167e-06, |
|
"loss": 0.6319, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.262738853503185e-06, |
|
"loss": 0.6468, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.246815286624204e-06, |
|
"loss": 0.6395, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.230891719745224e-06, |
|
"loss": 0.6552, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.214968152866243e-06, |
|
"loss": 0.6353, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 7.199044585987261e-06, |
|
"loss": 0.6392, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 7.183121019108281e-06, |
|
"loss": 0.6576, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 7.1671974522293e-06, |
|
"loss": 0.6411, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 7.15127388535032e-06, |
|
"loss": 0.6297, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 7.135350318471338e-06, |
|
"loss": 0.6314, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 7.119426751592357e-06, |
|
"loss": 0.6342, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 7.103503184713377e-06, |
|
"loss": 0.6364, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 7.087579617834396e-06, |
|
"loss": 0.646, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 7.071656050955414e-06, |
|
"loss": 0.6409, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 7.055732484076434e-06, |
|
"loss": 0.6314, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 7.039808917197453e-06, |
|
"loss": 0.6308, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 7.023885350318472e-06, |
|
"loss": 0.6364, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 7.007961783439491e-06, |
|
"loss": 0.6325, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 6.99203821656051e-06, |
|
"loss": 0.6358, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 6.976114649681529e-06, |
|
"loss": 0.6369, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 6.960191082802549e-06, |
|
"loss": 0.6475, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 6.944267515923567e-06, |
|
"loss": 0.6191, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 6.928343949044587e-06, |
|
"loss": 0.6504, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 6.912420382165606e-06, |
|
"loss": 0.6312, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 6.896496815286625e-06, |
|
"loss": 0.6413, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 6.880573248407644e-06, |
|
"loss": 0.6402, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 6.864649681528663e-06, |
|
"loss": 0.6471, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 6.848726114649682e-06, |
|
"loss": 0.6465, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 6.832802547770702e-06, |
|
"loss": 0.639, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 6.81687898089172e-06, |
|
"loss": 0.6439, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_oasst_export_accuracy": 0.7082109300873196, |
|
"eval_oasst_export_loss": 1.277816653251648, |
|
"eval_oasst_export_runtime": 68.1401, |
|
"eval_oasst_export_samples_per_second": 30.731, |
|
"eval_oasst_export_steps_per_second": 1.291, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_code_alpaca_accuracy": 0.8390993058971417, |
|
"eval_code_alpaca_loss": 0.5614765882492065, |
|
"eval_code_alpaca_runtime": 4.9704, |
|
"eval_code_alpaca_samples_per_second": 50.298, |
|
"eval_code_alpaca_steps_per_second": 2.213, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_evol_v2_accuracy": 0.7963871231321064, |
|
"eval_evol_v2_loss": 0.7076693177223206, |
|
"eval_evol_v2_runtime": 274.9975, |
|
"eval_evol_v2_samples_per_second": 26.0, |
|
"eval_evol_v2_steps_per_second": 1.084, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_nlu_instruct_accuracy": 0.8217512104859825, |
|
"eval_nlu_instruct_loss": 0.6734899282455444, |
|
"eval_nlu_instruct_runtime": 3476.0607, |
|
"eval_nlu_instruct_samples_per_second": 22.446, |
|
"eval_nlu_instruct_steps_per_second": 0.935, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_grade_school_math_instructions_accuracy": 0.8384420148379539, |
|
"eval_grade_school_math_instructions_loss": 0.5439364314079285, |
|
"eval_grade_school_math_instructions_runtime": 10.6494, |
|
"eval_grade_school_math_instructions_samples_per_second": 41.317, |
|
"eval_grade_school_math_instructions_steps_per_second": 1.784, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_poem_instructions_accuracy": 0.5008827284556223, |
|
"eval_poem_instructions_loss": 2.6464786529541016, |
|
"eval_poem_instructions_runtime": 21.1785, |
|
"eval_poem_instructions_samples_per_second": 16.385, |
|
"eval_poem_instructions_steps_per_second": 0.708, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_gpt4all_accuracy": 0.7913096259297718, |
|
"eval_gpt4all_loss": 0.7641447186470032, |
|
"eval_gpt4all_runtime": 3565.6669, |
|
"eval_gpt4all_samples_per_second": 21.809, |
|
"eval_gpt4all_steps_per_second": 0.909, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_joke_accuracy": 0.5768002190380579, |
|
"eval_joke_loss": 1.7684005498886108, |
|
"eval_joke_runtime": 3.0277, |
|
"eval_joke_samples_per_second": 25.102, |
|
"eval_joke_steps_per_second": 1.321, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_gsm8k_accuracy": 0.8816889128526934, |
|
"eval_gsm8k_loss": 0.4015596807003021, |
|
"eval_gsm8k_runtime": 23.0636, |
|
"eval_gsm8k_samples_per_second": 57.19, |
|
"eval_gsm8k_steps_per_second": 2.385, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 6.80095541401274e-06, |
|
"loss": 0.6573, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 6.785031847133759e-06, |
|
"loss": 0.6252, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 6.769108280254778e-06, |
|
"loss": 0.6471, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 6.753184713375796e-06, |
|
"loss": 0.6518, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 6.737261146496816e-06, |
|
"loss": 0.6403, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 6.721337579617835e-06, |
|
"loss": 0.638, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 6.705414012738855e-06, |
|
"loss": 0.6546, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 6.689490445859873e-06, |
|
"loss": 0.652, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 6.673566878980892e-06, |
|
"loss": 0.642, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 6.657643312101912e-06, |
|
"loss": 0.6387, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 6.641719745222931e-06, |
|
"loss": 0.6469, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 6.625796178343949e-06, |
|
"loss": 0.651, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 6.609872611464969e-06, |
|
"loss": 0.6435, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 6.593949044585988e-06, |
|
"loss": 0.6494, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 6.578025477707008e-06, |
|
"loss": 0.6331, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 6.562101910828026e-06, |
|
"loss": 0.6252, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 6.546178343949045e-06, |
|
"loss": 0.6469, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 6.530254777070064e-06, |
|
"loss": 0.6598, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 6.514331210191084e-06, |
|
"loss": 0.6513, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 6.498407643312102e-06, |
|
"loss": 0.6505, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 6.482484076433122e-06, |
|
"loss": 0.6572, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 6.466560509554141e-06, |
|
"loss": 0.6346, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 6.45063694267516e-06, |
|
"loss": 0.6518, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 6.434713375796179e-06, |
|
"loss": 0.651, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 6.418789808917198e-06, |
|
"loss": 0.6546, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 6.402866242038217e-06, |
|
"loss": 0.6551, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.386942675159237e-06, |
|
"loss": 0.6494, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.371019108280255e-06, |
|
"loss": 0.6589, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.355095541401275e-06, |
|
"loss": 0.6399, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.339171974522294e-06, |
|
"loss": 0.6354, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.323248407643313e-06, |
|
"loss": 0.6432, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.307324840764332e-06, |
|
"loss": 0.6461, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.291401273885351e-06, |
|
"loss": 0.644, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.27547770700637e-06, |
|
"loss": 0.6675, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.25955414012739e-06, |
|
"loss": 0.6447, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.243630573248408e-06, |
|
"loss": 0.6524, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.2277070063694265e-06, |
|
"loss": 0.6414, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.211783439490447e-06, |
|
"loss": 0.641, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.195859872611465e-06, |
|
"loss": 0.6451, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.179936305732484e-06, |
|
"loss": 0.6351, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.1640127388535035e-06, |
|
"loss": 0.6409, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.148089171974523e-06, |
|
"loss": 0.6294, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.132165605095541e-06, |
|
"loss": 0.6466, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.116242038216561e-06, |
|
"loss": 0.6653, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.1003184713375795e-06, |
|
"loss": 0.6371, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.0843949044586e-06, |
|
"loss": 0.6416, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.068471337579618e-06, |
|
"loss": 0.6463, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.052547770700637e-06, |
|
"loss": 0.6352, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.0366242038216564e-06, |
|
"loss": 0.6582, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.020700636942676e-06, |
|
"loss": 0.6398, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_oasst_export_accuracy": 0.7068643360328554, |
|
"eval_oasst_export_loss": 1.2810877561569214, |
|
"eval_oasst_export_runtime": 68.1205, |
|
"eval_oasst_export_samples_per_second": 30.74, |
|
"eval_oasst_export_steps_per_second": 1.292, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_code_alpaca_accuracy": 0.8397004973492922, |
|
"eval_code_alpaca_loss": 0.5658242106437683, |
|
"eval_code_alpaca_runtime": 4.4867, |
|
"eval_code_alpaca_samples_per_second": 55.72, |
|
"eval_code_alpaca_steps_per_second": 2.452, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_evol_v2_accuracy": 0.7957420569509923, |
|
"eval_evol_v2_loss": 0.7099133133888245, |
|
"eval_evol_v2_runtime": 275.359, |
|
"eval_evol_v2_samples_per_second": 25.966, |
|
"eval_evol_v2_steps_per_second": 1.082, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_nlu_instruct_accuracy": 0.8218655440157056, |
|
"eval_nlu_instruct_loss": 0.6718029975891113, |
|
"eval_nlu_instruct_runtime": 3476.8418, |
|
"eval_nlu_instruct_samples_per_second": 22.441, |
|
"eval_nlu_instruct_steps_per_second": 0.935, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_grade_school_math_instructions_accuracy": 0.8435181569699336, |
|
"eval_grade_school_math_instructions_loss": 0.5345037579536438, |
|
"eval_grade_school_math_instructions_runtime": 9.633, |
|
"eval_grade_school_math_instructions_samples_per_second": 45.676, |
|
"eval_grade_school_math_instructions_steps_per_second": 1.972, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_poem_instructions_accuracy": 0.5007816030189417, |
|
"eval_poem_instructions_loss": 2.6497209072113037, |
|
"eval_poem_instructions_runtime": 21.1714, |
|
"eval_poem_instructions_samples_per_second": 16.39, |
|
"eval_poem_instructions_steps_per_second": 0.709, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_gpt4all_accuracy": 0.790508486230526, |
|
"eval_gpt4all_loss": 0.7647537589073181, |
|
"eval_gpt4all_runtime": 3565.0547, |
|
"eval_gpt4all_samples_per_second": 21.812, |
|
"eval_gpt4all_steps_per_second": 0.909, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_joke_accuracy": 0.5753399653189741, |
|
"eval_joke_loss": 1.7703536748886108, |
|
"eval_joke_runtime": 3.6512, |
|
"eval_joke_samples_per_second": 20.815, |
|
"eval_joke_steps_per_second": 1.096, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_gsm8k_accuracy": 0.8853599637483724, |
|
"eval_gsm8k_loss": 0.391888827085495, |
|
"eval_gsm8k_runtime": 22.7306, |
|
"eval_gsm8k_samples_per_second": 58.027, |
|
"eval_gsm8k_steps_per_second": 2.42, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.004777070063694e-06, |
|
"loss": 0.6392, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.988853503184714e-06, |
|
"loss": 0.6533, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.9729299363057325e-06, |
|
"loss": 0.6432, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 5.957006369426752e-06, |
|
"loss": 0.6447, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 5.941082802547771e-06, |
|
"loss": 0.6438, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 5.92515923566879e-06, |
|
"loss": 0.6402, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 5.9092356687898086e-06, |
|
"loss": 0.6436, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 5.893312101910829e-06, |
|
"loss": 0.6565, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 5.877388535031847e-06, |
|
"loss": 0.6562, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 5.861464968152867e-06, |
|
"loss": 0.6341, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 5.8455414012738855e-06, |
|
"loss": 0.6506, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 5.829617834394905e-06, |
|
"loss": 0.6633, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 5.813694267515924e-06, |
|
"loss": 0.6586, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.797770700636943e-06, |
|
"loss": 0.6479, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.7818471337579615e-06, |
|
"loss": 0.6546, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.765923566878982e-06, |
|
"loss": 0.6422, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.75e-06, |
|
"loss": 0.6523, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.73407643312102e-06, |
|
"loss": 0.657, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.7181528662420385e-06, |
|
"loss": 0.6716, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.702229299363058e-06, |
|
"loss": 0.6673, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.686305732484076e-06, |
|
"loss": 0.6443, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.670382165605096e-06, |
|
"loss": 0.6539, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.6544585987261145e-06, |
|
"loss": 0.6551, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 5.638535031847135e-06, |
|
"loss": 0.6531, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 5.622611464968153e-06, |
|
"loss": 0.6332, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 5.606687898089172e-06, |
|
"loss": 0.6554, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 5.5907643312101914e-06, |
|
"loss": 0.6503, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.574840764331211e-06, |
|
"loss": 0.6384, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.558917197452229e-06, |
|
"loss": 0.6433, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.542993630573249e-06, |
|
"loss": 0.6516, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.5270700636942675e-06, |
|
"loss": 0.6567, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.511146496815288e-06, |
|
"loss": 0.6488, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.495222929936306e-06, |
|
"loss": 0.6434, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.479299363057325e-06, |
|
"loss": 0.6404, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 5.463375796178344e-06, |
|
"loss": 0.6467, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 5.447452229299364e-06, |
|
"loss": 0.6449, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 5.431528662420382e-06, |
|
"loss": 0.6329, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.415605095541402e-06, |
|
"loss": 0.6637, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.3996815286624205e-06, |
|
"loss": 0.6431, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.38375796178344e-06, |
|
"loss": 0.6545, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.367834394904459e-06, |
|
"loss": 0.6679, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.351910828025478e-06, |
|
"loss": 0.6596, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.3359872611464966e-06, |
|
"loss": 0.6546, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.320063694267517e-06, |
|
"loss": 0.6412, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.304140127388535e-06, |
|
"loss": 0.6624, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.288216560509555e-06, |
|
"loss": 0.6609, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.2722929936305735e-06, |
|
"loss": 0.6637, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 5.256369426751593e-06, |
|
"loss": 0.5777, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 5.240445859872612e-06, |
|
"loss": 0.5636, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 5.224522292993631e-06, |
|
"loss": 0.5546, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_oasst_export_accuracy": 0.7039114566904696, |
|
"eval_oasst_export_loss": 1.3318158388137817, |
|
"eval_oasst_export_runtime": 68.1379, |
|
"eval_oasst_export_samples_per_second": 30.732, |
|
"eval_oasst_export_steps_per_second": 1.291, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_code_alpaca_accuracy": 0.8384434606766137, |
|
"eval_code_alpaca_loss": 0.5847578048706055, |
|
"eval_code_alpaca_runtime": 5.0277, |
|
"eval_code_alpaca_samples_per_second": 49.724, |
|
"eval_code_alpaca_steps_per_second": 2.188, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_evol_v2_accuracy": 0.7943008349293793, |
|
"eval_evol_v2_loss": 0.7375423312187195, |
|
"eval_evol_v2_runtime": 274.7434, |
|
"eval_evol_v2_samples_per_second": 26.024, |
|
"eval_evol_v2_steps_per_second": 1.085, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_nlu_instruct_accuracy": 0.8220131298210854, |
|
"eval_nlu_instruct_loss": 0.6885544657707214, |
|
"eval_nlu_instruct_runtime": 3476.7259, |
|
"eval_nlu_instruct_samples_per_second": 22.442, |
|
"eval_nlu_instruct_steps_per_second": 0.935, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_grade_school_math_instructions_accuracy": 0.8461050370948848, |
|
"eval_grade_school_math_instructions_loss": 0.5257013440132141, |
|
"eval_grade_school_math_instructions_runtime": 12.7566, |
|
"eval_grade_school_math_instructions_samples_per_second": 34.492, |
|
"eval_grade_school_math_instructions_steps_per_second": 1.489, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_poem_instructions_accuracy": 0.49797271716962005, |
|
"eval_poem_instructions_loss": 2.6931512355804443, |
|
"eval_poem_instructions_runtime": 21.1948, |
|
"eval_poem_instructions_samples_per_second": 16.372, |
|
"eval_poem_instructions_steps_per_second": 0.708, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_gpt4all_accuracy": 0.7893674285927822, |
|
"eval_gpt4all_loss": 0.7917724251747131, |
|
"eval_gpt4all_runtime": 3566.3071, |
|
"eval_gpt4all_samples_per_second": 21.805, |
|
"eval_gpt4all_steps_per_second": 0.909, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_joke_accuracy": 0.5706854065893949, |
|
"eval_joke_loss": 1.8482730388641357, |
|
"eval_joke_runtime": 2.7911, |
|
"eval_joke_samples_per_second": 27.229, |
|
"eval_joke_steps_per_second": 1.433, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_gsm8k_accuracy": 0.8884000527713566, |
|
"eval_gsm8k_loss": 0.38383719325065613, |
|
"eval_gsm8k_runtime": 22.6991, |
|
"eval_gsm8k_samples_per_second": 58.108, |
|
"eval_gsm8k_steps_per_second": 2.423, |
|
"step": 5000 |
|
} |
|
], |
|
"max_steps": 8280, |
|
"num_train_epochs": 5, |
|
"total_flos": 8911630701166592.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|