|
{ |
|
"best_metric": 0.7984717269485482, |
|
"best_model_checkpoint": "/home/user/emrecan/models/dbmdz_bert-base-turkish-cased_allnli_tr/checkpoint-80000", |
|
"epoch": 3.0, |
|
"global_step": 88320, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9773550724637682e-05, |
|
"loss": 0.8559, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_accuracy": 0.679775853285787, |
|
"eval_loss": 0.7577011585235596, |
|
"eval_runtime": 35.4894, |
|
"eval_samples_per_second": 276.562, |
|
"eval_steps_per_second": 8.65, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9547101449275363e-05, |
|
"loss": 0.6612, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"eval_accuracy": 0.6957717778909832, |
|
"eval_loss": 0.7263472676277161, |
|
"eval_runtime": 37.7507, |
|
"eval_samples_per_second": 259.995, |
|
"eval_steps_per_second": 8.132, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9320652173913047e-05, |
|
"loss": 0.6115, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_accuracy": 0.7364238410596027, |
|
"eval_loss": 0.6430743932723999, |
|
"eval_runtime": 37.122, |
|
"eval_samples_per_second": 264.399, |
|
"eval_steps_per_second": 8.27, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9094202898550727e-05, |
|
"loss": 0.5916, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_accuracy": 0.7407030056036679, |
|
"eval_loss": 0.6347211599349976, |
|
"eval_runtime": 36.4648, |
|
"eval_samples_per_second": 269.164, |
|
"eval_steps_per_second": 8.419, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8867753623188408e-05, |
|
"loss": 0.5719, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_accuracy": 0.7483443708609272, |
|
"eval_loss": 0.6316782236099243, |
|
"eval_runtime": 36.2383, |
|
"eval_samples_per_second": 270.846, |
|
"eval_steps_per_second": 8.472, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.864130434782609e-05, |
|
"loss": 0.5575, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_accuracy": 0.7543555781966378, |
|
"eval_loss": 0.6033961772918701, |
|
"eval_runtime": 36.9743, |
|
"eval_samples_per_second": 265.455, |
|
"eval_steps_per_second": 8.303, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.841485507246377e-05, |
|
"loss": 0.5521, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"eval_accuracy": 0.7568008150789608, |
|
"eval_loss": 0.6147794723510742, |
|
"eval_runtime": 35.6284, |
|
"eval_samples_per_second": 275.483, |
|
"eval_steps_per_second": 8.617, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.818840579710145e-05, |
|
"loss": 0.5393, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"eval_accuracy": 0.7609780947529292, |
|
"eval_loss": 0.593109667301178, |
|
"eval_runtime": 35.1284, |
|
"eval_samples_per_second": 279.404, |
|
"eval_steps_per_second": 8.739, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7961956521739134e-05, |
|
"loss": 0.5382, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_accuracy": 0.7664798777381558, |
|
"eval_loss": 0.5865770578384399, |
|
"eval_runtime": 36.7058, |
|
"eval_samples_per_second": 267.396, |
|
"eval_steps_per_second": 8.364, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7735507246376815e-05, |
|
"loss": 0.5306, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"eval_accuracy": 0.7594498217014773, |
|
"eval_loss": 0.5881215929985046, |
|
"eval_runtime": 36.4456, |
|
"eval_samples_per_second": 269.305, |
|
"eval_steps_per_second": 8.424, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.7509057971014495e-05, |
|
"loss": 0.5295, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"eval_accuracy": 0.7632195618950586, |
|
"eval_loss": 0.6120319962501526, |
|
"eval_runtime": 36.0374, |
|
"eval_samples_per_second": 272.356, |
|
"eval_steps_per_second": 8.519, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.7282608695652176e-05, |
|
"loss": 0.5225, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"eval_accuracy": 0.7758532857870606, |
|
"eval_loss": 0.5619738698005676, |
|
"eval_runtime": 36.3737, |
|
"eval_samples_per_second": 269.838, |
|
"eval_steps_per_second": 8.44, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7056159420289856e-05, |
|
"loss": 0.5112, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"eval_accuracy": 0.7768721344880285, |
|
"eval_loss": 0.5641229152679443, |
|
"eval_runtime": 36.0192, |
|
"eval_samples_per_second": 272.494, |
|
"eval_steps_per_second": 8.523, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.6829710144927537e-05, |
|
"loss": 0.5133, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"eval_accuracy": 0.7798267957208355, |
|
"eval_loss": 0.557083010673523, |
|
"eval_runtime": 37.2598, |
|
"eval_samples_per_second": 263.42, |
|
"eval_steps_per_second": 8.239, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6603260869565218e-05, |
|
"loss": 0.5023, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_accuracy": 0.7721854304635761, |
|
"eval_loss": 0.5719286203384399, |
|
"eval_runtime": 36.5731, |
|
"eval_samples_per_second": 268.366, |
|
"eval_steps_per_second": 8.394, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6376811594202898e-05, |
|
"loss": 0.5017, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"eval_accuracy": 0.7844116148751911, |
|
"eval_loss": 0.5482256412506104, |
|
"eval_runtime": 35.2562, |
|
"eval_samples_per_second": 278.391, |
|
"eval_steps_per_second": 8.708, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.615036231884058e-05, |
|
"loss": 0.5111, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"eval_accuracy": 0.780030565461029, |
|
"eval_loss": 0.5503329634666443, |
|
"eval_runtime": 35.8945, |
|
"eval_samples_per_second": 273.44, |
|
"eval_steps_per_second": 8.553, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5923913043478263e-05, |
|
"loss": 0.4929, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"eval_accuracy": 0.7835965359144167, |
|
"eval_loss": 0.5501914620399475, |
|
"eval_runtime": 36.1699, |
|
"eval_samples_per_second": 271.358, |
|
"eval_steps_per_second": 8.488, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5697463768115943e-05, |
|
"loss": 0.4923, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"eval_accuracy": 0.7843097300050943, |
|
"eval_loss": 0.5424306988716125, |
|
"eval_runtime": 35.606, |
|
"eval_samples_per_second": 275.656, |
|
"eval_steps_per_second": 8.622, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5471014492753624e-05, |
|
"loss": 0.4894, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"eval_accuracy": 0.7851248089658686, |
|
"eval_loss": 0.541650116443634, |
|
"eval_runtime": 35.3356, |
|
"eval_samples_per_second": 277.765, |
|
"eval_steps_per_second": 8.688, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5244565217391305e-05, |
|
"loss": 0.4877, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"eval_accuracy": 0.7841059602649006, |
|
"eval_loss": 0.5514414310455322, |
|
"eval_runtime": 36.8151, |
|
"eval_samples_per_second": 266.602, |
|
"eval_steps_per_second": 8.339, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5018115942028985e-05, |
|
"loss": 0.4818, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_accuracy": 0.7848191543555781, |
|
"eval_loss": 0.5493878126144409, |
|
"eval_runtime": 37.2449, |
|
"eval_samples_per_second": 263.526, |
|
"eval_steps_per_second": 8.243, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.479166666666667e-05, |
|
"loss": 0.4898, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_accuracy": 0.7859398879266429, |
|
"eval_loss": 0.5450355410575867, |
|
"eval_runtime": 37.2862, |
|
"eval_samples_per_second": 263.234, |
|
"eval_steps_per_second": 8.234, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.456521739130435e-05, |
|
"loss": 0.4823, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"eval_accuracy": 0.7877738155883851, |
|
"eval_loss": 0.5417405366897583, |
|
"eval_runtime": 37.3765, |
|
"eval_samples_per_second": 262.598, |
|
"eval_steps_per_second": 8.214, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.433876811594203e-05, |
|
"loss": 0.4806, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"eval_accuracy": 0.7874681609780948, |
|
"eval_loss": 0.5354494452476501, |
|
"eval_runtime": 37.6863, |
|
"eval_samples_per_second": 260.44, |
|
"eval_steps_per_second": 8.146, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.4112318840579711e-05, |
|
"loss": 0.4779, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"eval_accuracy": 0.7848191543555781, |
|
"eval_loss": 0.5337836146354675, |
|
"eval_runtime": 36.4797, |
|
"eval_samples_per_second": 269.054, |
|
"eval_steps_per_second": 8.416, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.3885869565217392e-05, |
|
"loss": 0.4744, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"eval_accuracy": 0.7933774834437086, |
|
"eval_loss": 0.5277203917503357, |
|
"eval_runtime": 36.8856, |
|
"eval_samples_per_second": 266.093, |
|
"eval_steps_per_second": 8.323, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3659420289855074e-05, |
|
"loss": 0.4678, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"eval_accuracy": 0.7870606214977076, |
|
"eval_loss": 0.5506766438484192, |
|
"eval_runtime": 36.0153, |
|
"eval_samples_per_second": 272.523, |
|
"eval_steps_per_second": 8.524, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.3432971014492755e-05, |
|
"loss": 0.4727, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"eval_accuracy": 0.7789098318899643, |
|
"eval_loss": 0.5603240132331848, |
|
"eval_runtime": 35.8744, |
|
"eval_samples_per_second": 273.593, |
|
"eval_steps_per_second": 8.558, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.3206521739130435e-05, |
|
"loss": 0.4243, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"eval_accuracy": 0.7894039735099337, |
|
"eval_loss": 0.562574565410614, |
|
"eval_runtime": 35.9331, |
|
"eval_samples_per_second": 273.146, |
|
"eval_steps_per_second": 8.544, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.2980072463768116e-05, |
|
"loss": 0.3955, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"eval_accuracy": 0.7938869077941926, |
|
"eval_loss": 0.5324372053146362, |
|
"eval_runtime": 37.5987, |
|
"eval_samples_per_second": 261.046, |
|
"eval_steps_per_second": 8.165, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.2753623188405797e-05, |
|
"loss": 0.4022, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"eval_accuracy": 0.7924605196128375, |
|
"eval_loss": 0.5322304368019104, |
|
"eval_runtime": 37.3438, |
|
"eval_samples_per_second": 262.828, |
|
"eval_steps_per_second": 8.221, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.252717391304348e-05, |
|
"loss": 0.3976, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"eval_accuracy": 0.7919510952623535, |
|
"eval_loss": 0.545001745223999, |
|
"eval_runtime": 36.321, |
|
"eval_samples_per_second": 270.229, |
|
"eval_steps_per_second": 8.452, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.2300724637681161e-05, |
|
"loss": 0.3913, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"eval_accuracy": 0.7948038716250637, |
|
"eval_loss": 0.5464295744895935, |
|
"eval_runtime": 36.0101, |
|
"eval_samples_per_second": 272.562, |
|
"eval_steps_per_second": 8.525, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.2074275362318842e-05, |
|
"loss": 0.406, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"eval_accuracy": 0.7958227203260316, |
|
"eval_loss": 0.5405891537666321, |
|
"eval_runtime": 35.967, |
|
"eval_samples_per_second": 272.889, |
|
"eval_steps_per_second": 8.536, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.1847826086956522e-05, |
|
"loss": 0.3875, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"eval_accuracy": 0.7877738155883851, |
|
"eval_loss": 0.5489205718040466, |
|
"eval_runtime": 36.1107, |
|
"eval_samples_per_second": 271.803, |
|
"eval_steps_per_second": 8.502, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.1621376811594205e-05, |
|
"loss": 0.4024, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"eval_accuracy": 0.7924605196128375, |
|
"eval_loss": 0.542732834815979, |
|
"eval_runtime": 36.87, |
|
"eval_samples_per_second": 266.206, |
|
"eval_steps_per_second": 8.327, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.1394927536231885e-05, |
|
"loss": 0.3988, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"eval_accuracy": 0.7904228222109017, |
|
"eval_loss": 0.5334596037864685, |
|
"eval_runtime": 36.0057, |
|
"eval_samples_per_second": 272.596, |
|
"eval_steps_per_second": 8.526, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.1168478260869566e-05, |
|
"loss": 0.393, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"eval_accuracy": 0.7922567498726439, |
|
"eval_loss": 0.5415398478507996, |
|
"eval_runtime": 35.9966, |
|
"eval_samples_per_second": 272.665, |
|
"eval_steps_per_second": 8.529, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.0942028985507247e-05, |
|
"loss": 0.3988, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"eval_accuracy": 0.7962302598064187, |
|
"eval_loss": 0.5384690761566162, |
|
"eval_runtime": 36.005, |
|
"eval_samples_per_second": 272.601, |
|
"eval_steps_per_second": 8.527, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.0715579710144927e-05, |
|
"loss": 0.3912, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"eval_accuracy": 0.7950076413652573, |
|
"eval_loss": 0.5382993817329407, |
|
"eval_runtime": 37.5898, |
|
"eval_samples_per_second": 261.108, |
|
"eval_steps_per_second": 8.167, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0489130434782611e-05, |
|
"loss": 0.3949, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"eval_accuracy": 0.7930718288334182, |
|
"eval_loss": 0.5415102243423462, |
|
"eval_runtime": 37.2008, |
|
"eval_samples_per_second": 263.838, |
|
"eval_steps_per_second": 8.253, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0262681159420292e-05, |
|
"loss": 0.3902, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"eval_accuracy": 0.789302088639837, |
|
"eval_loss": 0.5438172817230225, |
|
"eval_runtime": 36.0003, |
|
"eval_samples_per_second": 272.636, |
|
"eval_steps_per_second": 8.528, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0036231884057972e-05, |
|
"loss": 0.3948, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"eval_accuracy": 0.7906265919510953, |
|
"eval_loss": 0.5348154902458191, |
|
"eval_runtime": 36.2261, |
|
"eval_samples_per_second": 270.937, |
|
"eval_steps_per_second": 8.475, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.809782608695653e-06, |
|
"loss": 0.3921, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"eval_accuracy": 0.7889964340295467, |
|
"eval_loss": 0.5361019968986511, |
|
"eval_runtime": 36.0927, |
|
"eval_samples_per_second": 271.938, |
|
"eval_steps_per_second": 8.506, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.583333333333335e-06, |
|
"loss": 0.3944, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"eval_accuracy": 0.7953132959755477, |
|
"eval_loss": 0.5419210195541382, |
|
"eval_runtime": 36.1865, |
|
"eval_samples_per_second": 271.234, |
|
"eval_steps_per_second": 8.484, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.356884057971016e-06, |
|
"loss": 0.3959, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"eval_accuracy": 0.7967396841569027, |
|
"eval_loss": 0.5401512980461121, |
|
"eval_runtime": 36.1559, |
|
"eval_samples_per_second": 271.463, |
|
"eval_steps_per_second": 8.491, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.130434782608697e-06, |
|
"loss": 0.3926, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"eval_accuracy": 0.7924605196128375, |
|
"eval_loss": 0.5428866744041443, |
|
"eval_runtime": 36.1454, |
|
"eval_samples_per_second": 271.542, |
|
"eval_steps_per_second": 8.493, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.903985507246377e-06, |
|
"loss": 0.3854, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"eval_accuracy": 0.7959246051961284, |
|
"eval_loss": 0.534604012966156, |
|
"eval_runtime": 36.0997, |
|
"eval_samples_per_second": 271.886, |
|
"eval_steps_per_second": 8.504, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.677536231884058e-06, |
|
"loss": 0.3864, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"eval_accuracy": 0.7978604177279673, |
|
"eval_loss": 0.5241352915763855, |
|
"eval_runtime": 35.9203, |
|
"eval_samples_per_second": 273.244, |
|
"eval_steps_per_second": 8.547, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.45108695652174e-06, |
|
"loss": 0.385, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"eval_accuracy": 0.8002037697401936, |
|
"eval_loss": 0.514944851398468, |
|
"eval_runtime": 37.3533, |
|
"eval_samples_per_second": 262.762, |
|
"eval_steps_per_second": 8.219, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.22463768115942e-06, |
|
"loss": 0.3871, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"eval_accuracy": 0.8002037697401936, |
|
"eval_loss": 0.5325358510017395, |
|
"eval_runtime": 36.687, |
|
"eval_samples_per_second": 267.534, |
|
"eval_steps_per_second": 8.368, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.998188405797103e-06, |
|
"loss": 0.3819, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"eval_accuracy": 0.8022414671421294, |
|
"eval_loss": 0.5332015752792358, |
|
"eval_runtime": 35.9183, |
|
"eval_samples_per_second": 273.259, |
|
"eval_steps_per_second": 8.547, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.771739130434784e-06, |
|
"loss": 0.384, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"eval_accuracy": 0.7872643912379012, |
|
"eval_loss": 0.5419135093688965, |
|
"eval_runtime": 35.939, |
|
"eval_samples_per_second": 273.102, |
|
"eval_steps_per_second": 8.542, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.545289855072464e-06, |
|
"loss": 0.3899, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"eval_accuracy": 0.7973509933774835, |
|
"eval_loss": 0.522521436214447, |
|
"eval_runtime": 36.1317, |
|
"eval_samples_per_second": 271.645, |
|
"eval_steps_per_second": 8.497, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.318840579710146e-06, |
|
"loss": 0.3894, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"eval_accuracy": 0.7976566479877738, |
|
"eval_loss": 0.5357836484909058, |
|
"eval_runtime": 36.1326, |
|
"eval_samples_per_second": 271.639, |
|
"eval_steps_per_second": 8.496, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.092391304347826e-06, |
|
"loss": 0.3838, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"eval_accuracy": 0.7987773815588385, |
|
"eval_loss": 0.5263946652412415, |
|
"eval_runtime": 35.9423, |
|
"eval_samples_per_second": 273.077, |
|
"eval_steps_per_second": 8.541, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.865942028985509e-06, |
|
"loss": 0.3881, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"eval_accuracy": 0.795618950585838, |
|
"eval_loss": 0.5279687643051147, |
|
"eval_runtime": 35.9441, |
|
"eval_samples_per_second": 273.063, |
|
"eval_steps_per_second": 8.541, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.639492753623189e-06, |
|
"loss": 0.3756, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7969434538970963, |
|
"eval_loss": 0.5600506663322449, |
|
"eval_runtime": 36.2008, |
|
"eval_samples_per_second": 271.126, |
|
"eval_steps_per_second": 8.48, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.41304347826087e-06, |
|
"loss": 0.3156, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"eval_accuracy": 0.7924605196128375, |
|
"eval_loss": 0.5936241745948792, |
|
"eval_runtime": 36.6834, |
|
"eval_samples_per_second": 267.56, |
|
"eval_steps_per_second": 8.369, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 6.186594202898551e-06, |
|
"loss": 0.3125, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"eval_accuracy": 0.7937850229240958, |
|
"eval_loss": 0.5897734761238098, |
|
"eval_runtime": 43.1106, |
|
"eval_samples_per_second": 227.67, |
|
"eval_steps_per_second": 7.121, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 5.960144927536232e-06, |
|
"loss": 0.3179, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_accuracy": 0.798064187468161, |
|
"eval_loss": 0.5591443777084351, |
|
"eval_runtime": 36.4374, |
|
"eval_samples_per_second": 269.366, |
|
"eval_steps_per_second": 8.425, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 5.733695652173914e-06, |
|
"loss": 0.315, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"eval_accuracy": 0.7970453387671931, |
|
"eval_loss": 0.585314154624939, |
|
"eval_runtime": 35.8475, |
|
"eval_samples_per_second": 273.799, |
|
"eval_steps_per_second": 8.564, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 5.507246376811595e-06, |
|
"loss": 0.3122, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"eval_accuracy": 0.7978604177279673, |
|
"eval_loss": 0.5801787376403809, |
|
"eval_runtime": 35.9545, |
|
"eval_samples_per_second": 272.984, |
|
"eval_steps_per_second": 8.539, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 5.2807971014492755e-06, |
|
"loss": 0.3105, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"eval_accuracy": 0.7978604177279673, |
|
"eval_loss": 0.5757964849472046, |
|
"eval_runtime": 36.2052, |
|
"eval_samples_per_second": 271.094, |
|
"eval_steps_per_second": 8.479, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 5.054347826086957e-06, |
|
"loss": 0.3076, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"eval_accuracy": 0.7979623025980642, |
|
"eval_loss": 0.5684827566146851, |
|
"eval_runtime": 37.6302, |
|
"eval_samples_per_second": 260.828, |
|
"eval_steps_per_second": 8.158, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.8278985507246375e-06, |
|
"loss": 0.3117, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"eval_accuracy": 0.7943963321446765, |
|
"eval_loss": 0.5799030661582947, |
|
"eval_runtime": 35.9518, |
|
"eval_samples_per_second": 273.004, |
|
"eval_steps_per_second": 8.539, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.601449275362319e-06, |
|
"loss": 0.3108, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"eval_accuracy": 0.7987773815588385, |
|
"eval_loss": 0.5741643905639648, |
|
"eval_runtime": 36.1228, |
|
"eval_samples_per_second": 271.712, |
|
"eval_steps_per_second": 8.499, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.3047, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"eval_accuracy": 0.7920529801324503, |
|
"eval_loss": 0.5906898975372314, |
|
"eval_runtime": 36.4484, |
|
"eval_samples_per_second": 269.284, |
|
"eval_steps_per_second": 8.423, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.148550724637682e-06, |
|
"loss": 0.3114, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"eval_accuracy": 0.7936831380539989, |
|
"eval_loss": 0.5723035335540771, |
|
"eval_runtime": 37.6048, |
|
"eval_samples_per_second": 261.004, |
|
"eval_steps_per_second": 8.164, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.9221014492753625e-06, |
|
"loss": 0.3035, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"eval_accuracy": 0.7955170657157412, |
|
"eval_loss": 0.5944135189056396, |
|
"eval_runtime": 37.5234, |
|
"eval_samples_per_second": 261.57, |
|
"eval_steps_per_second": 8.182, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.6956521739130436e-06, |
|
"loss": 0.3129, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"eval_accuracy": 0.7927661742231279, |
|
"eval_loss": 0.5837991833686829, |
|
"eval_runtime": 37.2958, |
|
"eval_samples_per_second": 263.167, |
|
"eval_steps_per_second": 8.231, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.4692028985507246e-06, |
|
"loss": 0.3071, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"eval_accuracy": 0.7949057564951605, |
|
"eval_loss": 0.5928712487220764, |
|
"eval_runtime": 37.6873, |
|
"eval_samples_per_second": 260.433, |
|
"eval_steps_per_second": 8.146, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.242753623188406e-06, |
|
"loss": 0.3061, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"eval_accuracy": 0.7967396841569027, |
|
"eval_loss": 0.579406201839447, |
|
"eval_runtime": 37.7053, |
|
"eval_samples_per_second": 260.308, |
|
"eval_steps_per_second": 8.142, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.016304347826087e-06, |
|
"loss": 0.3068, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"eval_accuracy": 0.7954151808456444, |
|
"eval_loss": 0.5892155766487122, |
|
"eval_runtime": 36.0876, |
|
"eval_samples_per_second": 271.977, |
|
"eval_steps_per_second": 8.507, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.7898550724637686e-06, |
|
"loss": 0.3053, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"eval_accuracy": 0.7962302598064187, |
|
"eval_loss": 0.5795509815216064, |
|
"eval_runtime": 36.0517, |
|
"eval_samples_per_second": 272.248, |
|
"eval_steps_per_second": 8.516, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.563405797101449e-06, |
|
"loss": 0.3117, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"eval_accuracy": 0.798064187468161, |
|
"eval_loss": 0.5763091444969177, |
|
"eval_runtime": 36.0281, |
|
"eval_samples_per_second": 272.426, |
|
"eval_steps_per_second": 8.521, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 2.3369565217391307e-06, |
|
"loss": 0.3062, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"eval_accuracy": 0.7964340295466124, |
|
"eval_loss": 0.5851861238479614, |
|
"eval_runtime": 37.7429, |
|
"eval_samples_per_second": 260.049, |
|
"eval_steps_per_second": 8.134, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.1105072463768117e-06, |
|
"loss": 0.3004, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"eval_accuracy": 0.7966377992868059, |
|
"eval_loss": 0.5793375968933105, |
|
"eval_runtime": 37.5317, |
|
"eval_samples_per_second": 261.512, |
|
"eval_steps_per_second": 8.18, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.884057971014493e-06, |
|
"loss": 0.3146, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"eval_accuracy": 0.7984717269485482, |
|
"eval_loss": 0.569298505783081, |
|
"eval_runtime": 36.0904, |
|
"eval_samples_per_second": 271.956, |
|
"eval_steps_per_second": 8.506, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.657608695652174e-06, |
|
"loss": 0.3146, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"eval_accuracy": 0.7981660723382578, |
|
"eval_loss": 0.5787694454193115, |
|
"eval_runtime": 36.6231, |
|
"eval_samples_per_second": 268.0, |
|
"eval_steps_per_second": 8.383, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.4311594202898552e-06, |
|
"loss": 0.3079, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"eval_accuracy": 0.7977585328578706, |
|
"eval_loss": 0.5726253986358643, |
|
"eval_runtime": 37.51, |
|
"eval_samples_per_second": 261.663, |
|
"eval_steps_per_second": 8.184, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.2047101449275363e-06, |
|
"loss": 0.3058, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"eval_accuracy": 0.7987773815588385, |
|
"eval_loss": 0.5677123665809631, |
|
"eval_runtime": 37.4144, |
|
"eval_samples_per_second": 262.332, |
|
"eval_steps_per_second": 8.205, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 9.782608695652175e-07, |
|
"loss": 0.3055, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"eval_accuracy": 0.7981660723382578, |
|
"eval_loss": 0.5700839161872864, |
|
"eval_runtime": 36.0967, |
|
"eval_samples_per_second": 271.908, |
|
"eval_steps_per_second": 8.505, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.518115942028987e-07, |
|
"loss": 0.3049, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"eval_accuracy": 0.7970453387671931, |
|
"eval_loss": 0.5808990001678467, |
|
"eval_runtime": 37.6784, |
|
"eval_samples_per_second": 260.494, |
|
"eval_steps_per_second": 8.148, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.253623188405797e-07, |
|
"loss": 0.3044, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"eval_accuracy": 0.798573611818645, |
|
"eval_loss": 0.5740913152694702, |
|
"eval_runtime": 37.5561, |
|
"eval_samples_per_second": 261.342, |
|
"eval_steps_per_second": 8.174, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 2.989130434782609e-07, |
|
"loss": 0.3057, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"eval_accuracy": 0.7979623025980642, |
|
"eval_loss": 0.5742570757865906, |
|
"eval_runtime": 36.0024, |
|
"eval_samples_per_second": 272.621, |
|
"eval_steps_per_second": 8.527, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.246376811594204e-08, |
|
"loss": 0.3081, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_accuracy": 0.7977585328578706, |
|
"eval_loss": 0.5770518183708191, |
|
"eval_runtime": 35.9949, |
|
"eval_samples_per_second": 272.677, |
|
"eval_steps_per_second": 8.529, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 88320, |
|
"total_flos": 1.0090192573609696e+17, |
|
"train_loss": 0.41023221776105356, |
|
"train_runtime": 36982.4162, |
|
"train_samples_per_second": 76.42, |
|
"train_steps_per_second": 2.388 |
|
} |
|
], |
|
"max_steps": 88320, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.0090192573609696e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|