|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 29.891151091309005, |
|
"global_step": 530000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5e-05, |
|
"loss": 7.9054, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.928493793261255e-05, |
|
"loss": 6.3313, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.85698758652251e-05, |
|
"loss": 5.9425, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.785481379783766e-05, |
|
"loss": 5.7223, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7139751730450204e-05, |
|
"loss": 5.5629, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.642468966306275e-05, |
|
"loss": 5.442, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.570962759567531e-05, |
|
"loss": 5.334, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.499456552828786e-05, |
|
"loss": 5.2403, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.4279503460900405e-05, |
|
"loss": 5.1663, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.356444139351296e-05, |
|
"loss": 5.0992, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.284937932612551e-05, |
|
"loss": 5.0405, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.213431725873806e-05, |
|
"loss": 4.986, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.1419255191350606e-05, |
|
"loss": 4.9365, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.070419312396317e-05, |
|
"loss": 4.8932, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.9989131056575714e-05, |
|
"loss": 4.8477, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.927406898918826e-05, |
|
"loss": 4.8121, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.8559006921800814e-05, |
|
"loss": 4.7778, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.784394485441337e-05, |
|
"loss": 4.7411, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.7128882787025915e-05, |
|
"loss": 4.7129, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.641382071963846e-05, |
|
"loss": 4.6799, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.432817196982588e-05, |
|
"loss": 4.6588, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.404458056831717e-05, |
|
"loss": 4.6292, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.376098916680846e-05, |
|
"loss": 4.6063, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.347739776529976e-05, |
|
"loss": 4.5796, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.319380636379105e-05, |
|
"loss": 4.5554, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.2910214962282343e-05, |
|
"loss": 4.5335, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2626623560773635e-05, |
|
"loss": 4.5173, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.2343032159264934e-05, |
|
"loss": 4.4928, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.2059440757756225e-05, |
|
"loss": 4.4717, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.1775849356247524e-05, |
|
"loss": 4.4479, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.1492257954738816e-05, |
|
"loss": 4.437, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.120866655323011e-05, |
|
"loss": 4.4213, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.0925075151721406e-05, |
|
"loss": 4.4059, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.06414837502127e-05, |
|
"loss": 4.3865, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.035789234870399e-05, |
|
"loss": 4.3713, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 4.288460731506348, |
|
"eval_runtime": 860.8156, |
|
"eval_samples_per_second": 264.664, |
|
"eval_steps_per_second": 33.084, |
|
"step": 17731 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.007430094719528e-05, |
|
"loss": 4.3433, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.979070954568658e-05, |
|
"loss": 4.3132, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.950711814417787e-05, |
|
"loss": 4.3063, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.922352674266916e-05, |
|
"loss": 4.2905, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.893993534116046e-05, |
|
"loss": 4.2872, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.8656343939651754e-05, |
|
"loss": 4.2758, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.8372752538143045e-05, |
|
"loss": 4.2675, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.808916113663434e-05, |
|
"loss": 4.2582, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.7805569735125636e-05, |
|
"loss": 4.2483, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.752197833361693e-05, |
|
"loss": 4.2384, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.723838693210822e-05, |
|
"loss": 4.2275, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.695479553059951e-05, |
|
"loss": 4.2214, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.667120412909081e-05, |
|
"loss": 4.2135, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.63876127275821e-05, |
|
"loss": 4.2036, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.610402132607339e-05, |
|
"loss": 4.1925, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.582042992456469e-05, |
|
"loss": 4.1852, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.553683852305598e-05, |
|
"loss": 4.1817, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.5253247121547275e-05, |
|
"loss": 4.1713, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.4969655720038567e-05, |
|
"loss": 4.1722, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.4686064318529865e-05, |
|
"loss": 4.1611, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.440247291702116e-05, |
|
"loss": 4.1498, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.4118881515512455e-05, |
|
"loss": 4.1434, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.383529011400374e-05, |
|
"loss": 4.1377, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.355169871249504e-05, |
|
"loss": 4.1337, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.326810731098634e-05, |
|
"loss": 4.1265, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.298451590947763e-05, |
|
"loss": 4.1284, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.270092450796892e-05, |
|
"loss": 4.1102, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.241733310646021e-05, |
|
"loss": 4.1166, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.213374170495151e-05, |
|
"loss": 4.1047, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.18501503034428e-05, |
|
"loss": 4.0991, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.1566558901934095e-05, |
|
"loss": 4.0919, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.1282967500425386e-05, |
|
"loss": 4.0908, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.0999376098916685e-05, |
|
"loss": 4.081, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.0715784697407977e-05, |
|
"loss": 4.0792, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.043219329589927e-05, |
|
"loss": 4.0755, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 4.004979610443115, |
|
"eval_runtime": 860.9555, |
|
"eval_samples_per_second": 264.621, |
|
"eval_steps_per_second": 33.078, |
|
"step": 35462 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.0148601894390567e-05, |
|
"loss": 4.0625, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.9865010492881855e-05, |
|
"loss": 4.0114, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.9581419091373154e-05, |
|
"loss": 4.0113, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.9297827689864442e-05, |
|
"loss": 4.0112, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.901423628835574e-05, |
|
"loss": 4.002, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.873064488684703e-05, |
|
"loss": 4.005, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.8447053485338327e-05, |
|
"loss": 4.0107, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.8163462083829616e-05, |
|
"loss": 3.9993, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.7879870682320914e-05, |
|
"loss": 4.0016, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.759627928081221e-05, |
|
"loss": 3.9919, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.73126878793035e-05, |
|
"loss": 3.9919, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.7029096477794796e-05, |
|
"loss": 3.9848, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.6745505076286088e-05, |
|
"loss": 3.9896, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.6461913674777383e-05, |
|
"loss": 3.9849, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.6178322273268675e-05, |
|
"loss": 3.9784, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.589473087175997e-05, |
|
"loss": 3.9746, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.5611139470251262e-05, |
|
"loss": 3.9651, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.5327548068742557e-05, |
|
"loss": 3.9689, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.5043956667233852e-05, |
|
"loss": 3.9698, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.4760365265725144e-05, |
|
"loss": 3.9631, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.447677386421644e-05, |
|
"loss": 3.9595, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.419318246270773e-05, |
|
"loss": 3.9582, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.3909591061199026e-05, |
|
"loss": 3.9564, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 2.362599965969032e-05, |
|
"loss": 3.9534, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.3342408258181613e-05, |
|
"loss": 3.95, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.3058816856672908e-05, |
|
"loss": 3.9472, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 2.27752254551642e-05, |
|
"loss": 3.9486, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 2.2491634053655495e-05, |
|
"loss": 3.9404, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.2208042652146787e-05, |
|
"loss": 3.9339, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.192445125063808e-05, |
|
"loss": 3.9353, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.1640859849129373e-05, |
|
"loss": 3.9354, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.135726844762067e-05, |
|
"loss": 3.9301, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.1073677046111964e-05, |
|
"loss": 3.9274, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.079008564460326e-05, |
|
"loss": 3.9231, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 2.050649424309455e-05, |
|
"loss": 3.9214, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.0222902841585846e-05, |
|
"loss": 3.9244, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 3.8807497024536133, |
|
"eval_runtime": 859.6671, |
|
"eval_samples_per_second": 265.018, |
|
"eval_steps_per_second": 33.128, |
|
"step": 53193 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.9939311440077137e-05, |
|
"loss": 3.8879, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.9655720038568433e-05, |
|
"loss": 3.8667, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.9372128637059724e-05, |
|
"loss": 3.8687, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.908853723555102e-05, |
|
"loss": 3.8726, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.880494583404231e-05, |
|
"loss": 3.8683, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.8521354432533606e-05, |
|
"loss": 3.8649, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.82377630310249e-05, |
|
"loss": 3.8723, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.7954171629516197e-05, |
|
"loss": 3.867, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.767058022800749e-05, |
|
"loss": 3.8646, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.7386988826498783e-05, |
|
"loss": 3.8663, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.7103397424990075e-05, |
|
"loss": 3.8549, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.681980602348137e-05, |
|
"loss": 3.8581, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.6536214621972662e-05, |
|
"loss": 3.8602, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.6252623220463957e-05, |
|
"loss": 3.8606, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.596903181895525e-05, |
|
"loss": 3.8573, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.5685440417446544e-05, |
|
"loss": 3.8579, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.540184901593784e-05, |
|
"loss": 3.853, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.5118257614429133e-05, |
|
"loss": 3.8544, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.4834666212920426e-05, |
|
"loss": 3.847, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.455107481141172e-05, |
|
"loss": 3.8492, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.4267483409903013e-05, |
|
"loss": 3.8466, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.3983892008394306e-05, |
|
"loss": 3.8439, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.37003006068856e-05, |
|
"loss": 3.8457, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.3416709205376893e-05, |
|
"loss": 3.8434, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.3133117803868187e-05, |
|
"loss": 3.8368, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.2849526402359482e-05, |
|
"loss": 3.841, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.2565935000850775e-05, |
|
"loss": 3.8426, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.2282343599342069e-05, |
|
"loss": 3.8318, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.1998752197833362e-05, |
|
"loss": 3.8418, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.1715160796324656e-05, |
|
"loss": 3.834, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.1431569394815949e-05, |
|
"loss": 3.8356, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.1147977993307242e-05, |
|
"loss": 3.8393, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.0864386591798536e-05, |
|
"loss": 3.8317, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.0580795190289831e-05, |
|
"loss": 3.8317, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.0297203788781125e-05, |
|
"loss": 3.8276, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 3.812286853790283, |
|
"eval_runtime": 859.7712, |
|
"eval_samples_per_second": 264.986, |
|
"eval_steps_per_second": 33.124, |
|
"step": 70924 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.0013612387272418e-05, |
|
"loss": 3.8235, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 9.730020985763711e-06, |
|
"loss": 3.7872, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.446429584255005e-06, |
|
"loss": 3.7813, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 9.1628381827463e-06, |
|
"loss": 3.7885, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.879246781237593e-06, |
|
"loss": 3.7837, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 8.595655379728887e-06, |
|
"loss": 3.7849, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 8.31206397822018e-06, |
|
"loss": 3.7867, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 8.028472576711475e-06, |
|
"loss": 3.7838, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 7.744881175202769e-06, |
|
"loss": 3.7853, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 7.461289773694062e-06, |
|
"loss": 3.7855, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 7.177698372185356e-06, |
|
"loss": 3.7883, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 6.894106970676649e-06, |
|
"loss": 3.7792, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 6.6105155691679434e-06, |
|
"loss": 3.7845, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 6.326924167659237e-06, |
|
"loss": 3.7818, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 6.04333276615053e-06, |
|
"loss": 3.7807, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 5.759741364641824e-06, |
|
"loss": 3.7801, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 5.476149963133118e-06, |
|
"loss": 3.7759, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.1925585616244115e-06, |
|
"loss": 3.7825, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.908967160115706e-06, |
|
"loss": 3.7858, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.737684520106329e-05, |
|
"loss": 3.8083, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.7235450483569934e-05, |
|
"loss": 3.8201, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.7094055766076583e-05, |
|
"loss": 3.8216, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.695266104858323e-05, |
|
"loss": 3.8234, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.6811266331089874e-05, |
|
"loss": 3.8227, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.666987161359652e-05, |
|
"loss": 3.8237, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.652847689610316e-05, |
|
"loss": 3.8267, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.6387082178609807e-05, |
|
"loss": 3.8183, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.6245687461116452e-05, |
|
"loss": 3.8273, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.6104292743623097e-05, |
|
"loss": 3.827, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.5962898026129746e-05, |
|
"loss": 3.8238, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.582150330863639e-05, |
|
"loss": 3.8235, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.5680108591143037e-05, |
|
"loss": 3.8168, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.5538713873649683e-05, |
|
"loss": 3.8192, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.5397319156156328e-05, |
|
"loss": 3.821, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.525592443866297e-05, |
|
"loss": 3.8137, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.5114529721169615e-05, |
|
"loss": 3.8183, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 3.806265354156494, |
|
"eval_runtime": 860.47, |
|
"eval_samples_per_second": 264.77, |
|
"eval_steps_per_second": 33.097, |
|
"step": 88655 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.4973135003676264e-05, |
|
"loss": 3.787, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.483174028618291e-05, |
|
"loss": 3.7753, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.4690345568689555e-05, |
|
"loss": 3.781, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.45489508511962e-05, |
|
"loss": 3.781, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.4407556133702846e-05, |
|
"loss": 3.7791, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.426616141620949e-05, |
|
"loss": 3.7782, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.4124766698716137e-05, |
|
"loss": 3.7754, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 2.3983371981222782e-05, |
|
"loss": 3.7808, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.3841977263729427e-05, |
|
"loss": 3.7761, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.3700582546236076e-05, |
|
"loss": 3.7795, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.3559187828742718e-05, |
|
"loss": 3.7861, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.3417793111249364e-05, |
|
"loss": 3.7788, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.327639839375601e-05, |
|
"loss": 3.7763, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.3135003676262658e-05, |
|
"loss": 3.7804, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.2993608958769303e-05, |
|
"loss": 3.7721, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.2852214241275945e-05, |
|
"loss": 3.7698, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.271081952378259e-05, |
|
"loss": 3.775, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.256942480628924e-05, |
|
"loss": 3.772, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.2428030088795885e-05, |
|
"loss": 3.7743, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.228663537130253e-05, |
|
"loss": 3.7746, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.2145240653809172e-05, |
|
"loss": 3.7706, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.200384593631582e-05, |
|
"loss": 3.7669, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.1862451218822467e-05, |
|
"loss": 3.7704, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.1721056501329112e-05, |
|
"loss": 3.7677, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.1579661783835754e-05, |
|
"loss": 3.7705, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.1438267066342403e-05, |
|
"loss": 3.7676, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.1296872348849048e-05, |
|
"loss": 3.763, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.1155477631355694e-05, |
|
"loss": 3.7676, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.101408291386234e-05, |
|
"loss": 3.7668, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.0872688196368985e-05, |
|
"loss": 3.763, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.073129347887563e-05, |
|
"loss": 3.762, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.0589898761382275e-05, |
|
"loss": 3.7595, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.044850404388892e-05, |
|
"loss": 3.7625, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.030710932639557e-05, |
|
"loss": 3.7623, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.016571460890221e-05, |
|
"loss": 3.7612, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 3.7527620792388916, |
|
"eval_runtime": 859.2059, |
|
"eval_samples_per_second": 265.16, |
|
"eval_steps_per_second": 33.146, |
|
"step": 106386 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 2.0024319891408857e-05, |
|
"loss": 3.744, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.9882925173915502e-05, |
|
"loss": 3.7041, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.974153045642215e-05, |
|
"loss": 3.708, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.9600135738928793e-05, |
|
"loss": 3.7088, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.945874102143544e-05, |
|
"loss": 3.709, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.9317346303942084e-05, |
|
"loss": 3.7106, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.9175951586448733e-05, |
|
"loss": 3.7105, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.9034556868955378e-05, |
|
"loss": 3.7095, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.889316215146202e-05, |
|
"loss": 3.7096, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.8751767433968666e-05, |
|
"loss": 3.7127, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.8610372716475315e-05, |
|
"loss": 3.7108, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.846897799898196e-05, |
|
"loss": 3.7165, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.8327583281488605e-05, |
|
"loss": 3.7106, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.8186188563995247e-05, |
|
"loss": 3.7119, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.8044793846501896e-05, |
|
"loss": 3.7073, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.790339912900854e-05, |
|
"loss": 3.7135, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.7762004411515187e-05, |
|
"loss": 3.712, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.7620609694021832e-05, |
|
"loss": 3.7126, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.7479214976528478e-05, |
|
"loss": 3.7101, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.7337820259035123e-05, |
|
"loss": 3.7074, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.719642554154177e-05, |
|
"loss": 3.7061, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.7055030824048414e-05, |
|
"loss": 3.7018, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.691363610655506e-05, |
|
"loss": 3.7063, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.6772241389061705e-05, |
|
"loss": 3.7086, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.663084667156835e-05, |
|
"loss": 3.7064, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.6489451954074996e-05, |
|
"loss": 3.7049, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.6348057236581645e-05, |
|
"loss": 3.7019, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.6206662519088287e-05, |
|
"loss": 3.7057, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.6065267801594932e-05, |
|
"loss": 3.7046, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5923873084101577e-05, |
|
"loss": 3.7094, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.5782478366608226e-05, |
|
"loss": 3.7016, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.564108364911487e-05, |
|
"loss": 3.6999, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.5499688931621514e-05, |
|
"loss": 3.7064, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.535829421412816e-05, |
|
"loss": 3.7024, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5216899496634806e-05, |
|
"loss": 3.702, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.5075504779141453e-05, |
|
"loss": 3.6976, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 3.7144248485565186, |
|
"eval_runtime": 859.1145, |
|
"eval_samples_per_second": 265.188, |
|
"eval_steps_per_second": 33.149, |
|
"step": 124117 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.4934110061648099e-05, |
|
"loss": 3.6596, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.4792715344154742e-05, |
|
"loss": 3.657, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.4651320626661388e-05, |
|
"loss": 3.6579, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.4509925909168035e-05, |
|
"loss": 3.6603, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.436853119167468e-05, |
|
"loss": 3.6607, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.4227136474181324e-05, |
|
"loss": 3.6531, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.408574175668797e-05, |
|
"loss": 3.6601, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.3944347039194617e-05, |
|
"loss": 3.6541, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.3802952321701262e-05, |
|
"loss": 3.6614, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.3661557604207909e-05, |
|
"loss": 3.6619, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.3520162886714551e-05, |
|
"loss": 3.6576, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.3378768169221198e-05, |
|
"loss": 3.6563, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.3237373451727844e-05, |
|
"loss": 3.6527, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.309597873423449e-05, |
|
"loss": 3.6587, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.2954584016741136e-05, |
|
"loss": 3.6628, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.281318929924778e-05, |
|
"loss": 3.6589, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2671794581754425e-05, |
|
"loss": 3.6568, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.2530399864261072e-05, |
|
"loss": 3.6583, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.2389005146767716e-05, |
|
"loss": 3.6586, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.2247610429274363e-05, |
|
"loss": 3.6534, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2106215711781009e-05, |
|
"loss": 3.6598, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.1964820994287654e-05, |
|
"loss": 3.6616, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.18234262767943e-05, |
|
"loss": 3.6594, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.1682031559300945e-05, |
|
"loss": 3.6565, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.154063684180759e-05, |
|
"loss": 3.6628, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.1399242124314236e-05, |
|
"loss": 3.6568, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.1257847406820881e-05, |
|
"loss": 3.6521, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.1116452689327528e-05, |
|
"loss": 3.6545, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.0975057971834172e-05, |
|
"loss": 3.6551, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.0833663254340819e-05, |
|
"loss": 3.6546, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.0692268536847463e-05, |
|
"loss": 3.6545, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.055087381935411e-05, |
|
"loss": 3.6546, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.0409479101860754e-05, |
|
"loss": 3.6538, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.02680843843674e-05, |
|
"loss": 3.6578, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.0126689666874046e-05, |
|
"loss": 3.6553, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 3.688018560409546, |
|
"eval_runtime": 860.525, |
|
"eval_samples_per_second": 264.753, |
|
"eval_steps_per_second": 33.095, |
|
"step": 141848 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.985294949380691e-06, |
|
"loss": 3.6382, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 9.843900231887337e-06, |
|
"loss": 3.6156, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 9.702505514393982e-06, |
|
"loss": 3.6166, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 9.561110796900628e-06, |
|
"loss": 3.6137, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 9.419716079407275e-06, |
|
"loss": 3.618, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 9.278321361913919e-06, |
|
"loss": 3.6158, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 9.136926644420566e-06, |
|
"loss": 3.6158, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 8.99553192692721e-06, |
|
"loss": 3.6178, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 8.854137209433856e-06, |
|
"loss": 3.6262, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 8.7127424919405e-06, |
|
"loss": 3.6197, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 8.571347774447147e-06, |
|
"loss": 3.6185, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 8.429953056953793e-06, |
|
"loss": 3.617, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 8.288558339460438e-06, |
|
"loss": 3.6157, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.147163621967084e-06, |
|
"loss": 3.6202, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 8.005768904473729e-06, |
|
"loss": 3.6187, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 7.864374186980374e-06, |
|
"loss": 3.6189, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 7.72297946948702e-06, |
|
"loss": 3.6212, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 7.581584751993666e-06, |
|
"loss": 3.6188, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 7.440190034500312e-06, |
|
"loss": 3.6146, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 7.298795317006957e-06, |
|
"loss": 3.6246, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 7.157400599513603e-06, |
|
"loss": 3.6147, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 7.016005882020248e-06, |
|
"loss": 3.6233, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 6.874611164526894e-06, |
|
"loss": 3.621, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 6.7332164470335385e-06, |
|
"loss": 3.6209, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 6.591821729540185e-06, |
|
"loss": 3.6155, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 6.450427012046831e-06, |
|
"loss": 3.6176, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 6.309032294553476e-06, |
|
"loss": 3.6138, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 6.167637577060122e-06, |
|
"loss": 3.6194, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 6.026242859566767e-06, |
|
"loss": 3.6161, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 5.884848142073413e-06, |
|
"loss": 3.6121, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 5.743453424580058e-06, |
|
"loss": 3.6169, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 5.6020587070867035e-06, |
|
"loss": 3.6173, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 5.460663989593349e-06, |
|
"loss": 3.6137, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.319269272099994e-06, |
|
"loss": 3.614, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.177874554606641e-06, |
|
"loss": 3.6115, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.036479837113286e-06, |
|
"loss": 3.6107, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 3.6684656143188477, |
|
"eval_runtime": 860.3576, |
|
"eval_samples_per_second": 264.805, |
|
"eval_steps_per_second": 33.101, |
|
"step": 159579 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 4.895085119619931e-06, |
|
"loss": 3.5905, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 4.753690402126577e-06, |
|
"loss": 3.5857, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 4.612295684633222e-06, |
|
"loss": 3.584, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 4.470900967139868e-06, |
|
"loss": 3.5892, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.329506249646514e-06, |
|
"loss": 3.5847, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.188111532153159e-06, |
|
"loss": 3.5864, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 4.046716814659805e-06, |
|
"loss": 3.5882, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 3.90532209716645e-06, |
|
"loss": 3.5864, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 3.7639273796730956e-06, |
|
"loss": 3.5862, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 3.622532662179741e-06, |
|
"loss": 3.5871, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 3.4811379446863864e-06, |
|
"loss": 3.5843, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 3.3397432271930326e-06, |
|
"loss": 3.5877, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 3.198348509699678e-06, |
|
"loss": 3.5847, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.0569537922063235e-06, |
|
"loss": 3.5853, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.915559074712969e-06, |
|
"loss": 3.5876, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 2.7741643572196143e-06, |
|
"loss": 3.5896, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.63276963972626e-06, |
|
"loss": 3.5886, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 2.4913749222329055e-06, |
|
"loss": 3.5863, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.349980204739551e-06, |
|
"loss": 3.5921, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.208585487246197e-06, |
|
"loss": 3.5952, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.067190769752842e-06, |
|
"loss": 3.5875, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 2.5996837230317406e-05, |
|
"loss": 3.6184, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 2.592623969275952e-05, |
|
"loss": 3.6347, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 2.5855642155201625e-05, |
|
"loss": 3.6381, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 2.5785044617643738e-05, |
|
"loss": 3.6428, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 2.5714447080085845e-05, |
|
"loss": 3.6468, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 2.5643849542527958e-05, |
|
"loss": 3.6507, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 2.5573252004970067e-05, |
|
"loss": 3.6533, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 2.550265446741218e-05, |
|
"loss": 3.646, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 2.5432056929854287e-05, |
|
"loss": 3.6513, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 2.53614593922964e-05, |
|
"loss": 3.6528, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 2.5290861854738506e-05, |
|
"loss": 3.656, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 2.522026431718062e-05, |
|
"loss": 3.651, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.5149666779622726e-05, |
|
"loss": 3.6527, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 2.507906924206484e-05, |
|
"loss": 3.6576, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 3.6973445415496826, |
|
"eval_runtime": 860.6149, |
|
"eval_samples_per_second": 264.726, |
|
"eval_steps_per_second": 33.091, |
|
"step": 177310 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 2.500847170450695e-05, |
|
"loss": 3.6368, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 2.4937874166949058e-05, |
|
"loss": 3.6203, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 2.4867276629391168e-05, |
|
"loss": 3.6227, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 2.4796679091833278e-05, |
|
"loss": 3.6261, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 2.4726081554275387e-05, |
|
"loss": 3.6253, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 2.4655484016717497e-05, |
|
"loss": 3.6296, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 2.4584886479159607e-05, |
|
"loss": 3.6292, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 2.4514288941601717e-05, |
|
"loss": 3.635, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 2.444369140404383e-05, |
|
"loss": 3.6267, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 2.437309386648594e-05, |
|
"loss": 3.637, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 2.430249632892805e-05, |
|
"loss": 3.6339, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 2.423189879137016e-05, |
|
"loss": 3.6369, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 2.416130125381227e-05, |
|
"loss": 3.6363, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 2.409070371625438e-05, |
|
"loss": 3.6373, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 2.4020106178696488e-05, |
|
"loss": 3.6365, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 2.3949508641138598e-05, |
|
"loss": 3.6401, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 2.3878911103580707e-05, |
|
"loss": 3.633, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 2.380831356602282e-05, |
|
"loss": 3.6379, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 2.373771602846493e-05, |
|
"loss": 3.6306, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 2.366711849090704e-05, |
|
"loss": 3.6375, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 2.359652095334915e-05, |
|
"loss": 3.6401, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 2.352592341579126e-05, |
|
"loss": 3.6425, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 2.345532587823337e-05, |
|
"loss": 3.6464, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 2.338472834067548e-05, |
|
"loss": 3.6418, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 2.331413080311759e-05, |
|
"loss": 3.6419, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 2.32435332655597e-05, |
|
"loss": 3.6386, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 2.3172935728001808e-05, |
|
"loss": 3.6403, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 2.3102338190443918e-05, |
|
"loss": 3.639, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 2.3031740652886028e-05, |
|
"loss": 3.6391, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 2.2961143115328137e-05, |
|
"loss": 3.6352, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 2.2890545577770247e-05, |
|
"loss": 3.6367, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 2.2819948040212357e-05, |
|
"loss": 3.641, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 2.2749350502654466e-05, |
|
"loss": 3.642, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 2.267875296509658e-05, |
|
"loss": 3.6384, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 2.260815542753869e-05, |
|
"loss": 3.6334, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 2.25375578899808e-05, |
|
"loss": 3.6441, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 3.6708991527557373, |
|
"eval_runtime": 860.0592, |
|
"eval_samples_per_second": 264.897, |
|
"eval_steps_per_second": 33.113, |
|
"step": 195041 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 2.246696035242291e-05, |
|
"loss": 3.5829, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 2.239636281486502e-05, |
|
"loss": 3.5793, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 2.2325765277307128e-05, |
|
"loss": 3.5869, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 2.2255167739749238e-05, |
|
"loss": 3.5855, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 2.2184570202191348e-05, |
|
"loss": 3.587, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"learning_rate": 2.2113972664633457e-05, |
|
"loss": 3.5912, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 2.204337512707557e-05, |
|
"loss": 3.5869, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"learning_rate": 2.197277758951768e-05, |
|
"loss": 3.5893, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 2.190218005195979e-05, |
|
"loss": 3.598, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 2.18315825144019e-05, |
|
"loss": 3.5969, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 2.176098497684401e-05, |
|
"loss": 3.5992, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 2.169038743928612e-05, |
|
"loss": 3.5917, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 2.161978990172823e-05, |
|
"loss": 3.5978, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 2.154919236417034e-05, |
|
"loss": 3.5915, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 2.1478594826612448e-05, |
|
"loss": 3.5994, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 2.140799728905456e-05, |
|
"loss": 3.5927, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 2.133739975149667e-05, |
|
"loss": 3.5964, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 2.126680221393878e-05, |
|
"loss": 3.6008, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 2.1196204676380887e-05, |
|
"loss": 3.5938, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 2.1125607138822997e-05, |
|
"loss": 3.5922, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 2.1055009601265106e-05, |
|
"loss": 3.6034, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 2.0984412063707216e-05, |
|
"loss": 3.5964, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 2.091381452614933e-05, |
|
"loss": 3.5963, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 2.084321698859144e-05, |
|
"loss": 3.5982, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 2.077261945103355e-05, |
|
"loss": 3.6001, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 2.070202191347566e-05, |
|
"loss": 3.5998, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 2.0631424375917768e-05, |
|
"loss": 3.6043, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 2.0560826838359878e-05, |
|
"loss": 3.6034, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 2.0490229300801988e-05, |
|
"loss": 3.6005, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 2.0419631763244097e-05, |
|
"loss": 3.5998, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 2.0349034225686207e-05, |
|
"loss": 3.6008, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 2.027843668812832e-05, |
|
"loss": 3.5994, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 2.020783915057043e-05, |
|
"loss": 3.5982, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 2.013724161301254e-05, |
|
"loss": 3.5993, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 2.006664407545465e-05, |
|
"loss": 3.602, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 3.646425247192383, |
|
"eval_runtime": 859.6242, |
|
"eval_samples_per_second": 265.031, |
|
"eval_steps_per_second": 33.13, |
|
"step": 212772 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 1.999604653789676e-05, |
|
"loss": 3.5684, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"learning_rate": 1.992544900033887e-05, |
|
"loss": 3.54, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 1.985485146278098e-05, |
|
"loss": 3.5453, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 1.9784253925223088e-05, |
|
"loss": 3.5457, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 1.97136563876652e-05, |
|
"loss": 3.5534, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 1.964305885010731e-05, |
|
"loss": 3.552, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 1.957246131254942e-05, |
|
"loss": 3.5531, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 1.950186377499153e-05, |
|
"loss": 3.5527, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 1.943126623743364e-05, |
|
"loss": 3.5559, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 1.936066869987575e-05, |
|
"loss": 3.556, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 1.929007116231786e-05, |
|
"loss": 3.5563, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 1.921947362475997e-05, |
|
"loss": 3.5622, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 1.914887608720208e-05, |
|
"loss": 3.5628, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 1.9078278549644192e-05, |
|
"loss": 3.563, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 1.90076810120863e-05, |
|
"loss": 3.5585, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 1.8937083474528408e-05, |
|
"loss": 3.5627, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 1.8866485936970518e-05, |
|
"loss": 3.56, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 1.8795888399412628e-05, |
|
"loss": 3.5653, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"learning_rate": 1.8725290861854737e-05, |
|
"loss": 3.5669, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 1.8654693324296847e-05, |
|
"loss": 3.5644, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 1.858409578673896e-05, |
|
"loss": 3.5678, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 1.851349824918107e-05, |
|
"loss": 3.562, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 1.844290071162318e-05, |
|
"loss": 3.5674, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 1.837230317406529e-05, |
|
"loss": 3.5661, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 1.83017056365074e-05, |
|
"loss": 3.5652, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 1.823110809894951e-05, |
|
"loss": 3.567, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 1.816051056139162e-05, |
|
"loss": 3.5663, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 1.808991302383373e-05, |
|
"loss": 3.5649, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 1.8019315486275838e-05, |
|
"loss": 3.5655, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 1.794871794871795e-05, |
|
"loss": 3.5664, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 1.787812041116006e-05, |
|
"loss": 3.5617, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 1.780752287360217e-05, |
|
"loss": 3.5643, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"learning_rate": 1.773692533604428e-05, |
|
"loss": 3.5652, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"learning_rate": 1.766632779848639e-05, |
|
"loss": 3.5637, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 1.75957302609285e-05, |
|
"loss": 3.5626, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 1.752513272337061e-05, |
|
"loss": 3.5671, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 3.6284356117248535, |
|
"eval_runtime": 859.8678, |
|
"eval_samples_per_second": 264.956, |
|
"eval_steps_per_second": 33.12, |
|
"step": 230503 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 1.745453518581272e-05, |
|
"loss": 3.5115, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 1.738393764825483e-05, |
|
"loss": 3.5145, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 1.7313340110696942e-05, |
|
"loss": 3.5134, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"learning_rate": 1.7242742573139052e-05, |
|
"loss": 3.5196, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"learning_rate": 1.717214503558116e-05, |
|
"loss": 3.5224, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 1.710154749802327e-05, |
|
"loss": 3.5222, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 1.703094996046538e-05, |
|
"loss": 3.5284, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 13.23, |
|
"learning_rate": 1.696035242290749e-05, |
|
"loss": 3.5227, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 1.6889754885349597e-05, |
|
"loss": 3.5228, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 1.681915734779171e-05, |
|
"loss": 3.5313, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"learning_rate": 1.674855981023382e-05, |
|
"loss": 3.5317, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 1.667796227267593e-05, |
|
"loss": 3.5257, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 1.660736473511804e-05, |
|
"loss": 3.5308, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 1.653676719756015e-05, |
|
"loss": 3.5269, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 1.646616966000226e-05, |
|
"loss": 3.5294, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"learning_rate": 1.639557212244437e-05, |
|
"loss": 3.5298, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"learning_rate": 1.6324974584886478e-05, |
|
"loss": 3.5276, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 1.6254377047328588e-05, |
|
"loss": 3.53, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 1.61837795097707e-05, |
|
"loss": 3.5307, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 1.611318197221281e-05, |
|
"loss": 3.5359, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 1.604258443465492e-05, |
|
"loss": 3.5333, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 1.597198689709703e-05, |
|
"loss": 3.5384, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 1.590138935953914e-05, |
|
"loss": 3.5369, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 1.583079182198125e-05, |
|
"loss": 3.5339, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 1.576019428442336e-05, |
|
"loss": 3.5339, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"learning_rate": 1.568959674686547e-05, |
|
"loss": 3.5378, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 1.5618999209307582e-05, |
|
"loss": 3.5342, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 1.5548401671749692e-05, |
|
"loss": 3.5358, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 1.54778041341918e-05, |
|
"loss": 3.5333, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 1.540720659663391e-05, |
|
"loss": 3.5336, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 1.533660905907602e-05, |
|
"loss": 3.5374, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 1.526601152151813e-05, |
|
"loss": 3.5273, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 1.519541398396024e-05, |
|
"loss": 3.5332, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 1.5124816446402352e-05, |
|
"loss": 3.5335, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 1.5054218908844462e-05, |
|
"loss": 3.5329, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 3.613527297973633, |
|
"eval_runtime": 859.4422, |
|
"eval_samples_per_second": 265.087, |
|
"eval_steps_per_second": 33.137, |
|
"step": 248234 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"learning_rate": 1.4983621371286571e-05, |
|
"loss": 3.5101, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 14.04, |
|
"learning_rate": 1.4913023833728681e-05, |
|
"loss": 3.488, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 1.4842426296170792e-05, |
|
"loss": 3.4851, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"learning_rate": 1.4771828758612899e-05, |
|
"loss": 3.4952, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 1.4701231221055008e-05, |
|
"loss": 3.4913, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 1.463063368349712e-05, |
|
"loss": 3.491, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 1.456003614593923e-05, |
|
"loss": 3.4961, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 1.448943860838134e-05, |
|
"loss": 3.4923, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 1.4418841070823449e-05, |
|
"loss": 3.5026, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 1.4348243533265559e-05, |
|
"loss": 3.4991, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"learning_rate": 1.427764599570767e-05, |
|
"loss": 3.5018, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 1.420704845814978e-05, |
|
"loss": 3.4944, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"learning_rate": 1.413645092059189e-05, |
|
"loss": 3.4994, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 1.4065853383034e-05, |
|
"loss": 3.5039, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"learning_rate": 1.399525584547611e-05, |
|
"loss": 3.5029, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 1.392465830791822e-05, |
|
"loss": 3.5038, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 1.385406077036033e-05, |
|
"loss": 3.5012, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 1.378346323280244e-05, |
|
"loss": 3.5093, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"learning_rate": 1.3712865695244551e-05, |
|
"loss": 3.5047, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 14.55, |
|
"learning_rate": 1.3642268157686661e-05, |
|
"loss": 3.5092, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 1.357167062012877e-05, |
|
"loss": 3.5083, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 1.350107308257088e-05, |
|
"loss": 3.5023, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 1.343047554501299e-05, |
|
"loss": 3.5037, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 1.3359878007455102e-05, |
|
"loss": 3.507, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 1.3289280469897211e-05, |
|
"loss": 3.5005, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 1.3218682932339321e-05, |
|
"loss": 3.5107, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 1.314808539478143e-05, |
|
"loss": 3.5042, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 1.3077487857223542e-05, |
|
"loss": 3.5057, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"learning_rate": 1.3006890319665652e-05, |
|
"loss": 3.5072, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 1.2936292782107762e-05, |
|
"loss": 3.5061, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"learning_rate": 1.2865695244549871e-05, |
|
"loss": 3.5102, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 1.2795097706991983e-05, |
|
"loss": 3.5129, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"learning_rate": 1.2724500169434093e-05, |
|
"loss": 3.5113, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 1.2653902631876199e-05, |
|
"loss": 3.5039, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 14.97, |
|
"learning_rate": 1.258330509431831e-05, |
|
"loss": 3.5122, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 3.6014883518218994, |
|
"eval_runtime": 858.9229, |
|
"eval_samples_per_second": 265.247, |
|
"eval_steps_per_second": 33.157, |
|
"step": 265965 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 1.251270755676042e-05, |
|
"loss": 3.5016, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 1.2442110019202531e-05, |
|
"loss": 3.4598, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 15.06, |
|
"learning_rate": 1.2371512481644641e-05, |
|
"loss": 3.4628, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 15.09, |
|
"learning_rate": 1.230091494408675e-05, |
|
"loss": 3.4712, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 1.2230317406528862e-05, |
|
"loss": 3.4672, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"learning_rate": 1.2159719868970972e-05, |
|
"loss": 3.4665, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 1.208912233141308e-05, |
|
"loss": 3.4665, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 1.201852479385519e-05, |
|
"loss": 3.4733, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"learning_rate": 1.1947927256297301e-05, |
|
"loss": 3.4735, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"learning_rate": 1.187732971873941e-05, |
|
"loss": 3.4755, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"learning_rate": 1.180673218118152e-05, |
|
"loss": 3.4743, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 1.173613464362363e-05, |
|
"loss": 3.4799, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 15.34, |
|
"learning_rate": 1.1665537106065742e-05, |
|
"loss": 3.4805, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 1.1594939568507851e-05, |
|
"loss": 3.4771, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"learning_rate": 1.1524342030949961e-05, |
|
"loss": 3.482, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 1.1453744493392071e-05, |
|
"loss": 3.4788, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 1.138314695583418e-05, |
|
"loss": 3.4807, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"learning_rate": 1.1312549418276292e-05, |
|
"loss": 3.477, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 1.1241951880718402e-05, |
|
"loss": 3.4829, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 15.54, |
|
"learning_rate": 1.1171354343160511e-05, |
|
"loss": 3.4778, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 15.57, |
|
"learning_rate": 1.1100756805602621e-05, |
|
"loss": 3.4838, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 1.1030159268044731e-05, |
|
"loss": 3.4746, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 1.095956173048684e-05, |
|
"loss": 3.4845, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"learning_rate": 1.088896419292895e-05, |
|
"loss": 3.4892, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 1.081836665537106e-05, |
|
"loss": 3.4802, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 1.0747769117813171e-05, |
|
"loss": 3.4791, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"learning_rate": 1.0677171580255281e-05, |
|
"loss": 3.4818, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 1.0606574042697391e-05, |
|
"loss": 3.478, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"learning_rate": 1.05359765051395e-05, |
|
"loss": 3.479, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 1.0465378967581612e-05, |
|
"loss": 3.4834, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"learning_rate": 1.0394781430023722e-05, |
|
"loss": 3.4799, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 1.0324183892465831e-05, |
|
"loss": 3.4798, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 1.0253586354907941e-05, |
|
"loss": 3.4855, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"learning_rate": 1.0182988817350053e-05, |
|
"loss": 3.4856, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"learning_rate": 1.0112391279792162e-05, |
|
"loss": 3.4834, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 1.0041793742234272e-05, |
|
"loss": 3.487, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 3.5896410942077637, |
|
"eval_runtime": 859.6502, |
|
"eval_samples_per_second": 265.023, |
|
"eval_steps_per_second": 33.129, |
|
"step": 283696 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"learning_rate": 9.97119620467638e-06, |
|
"loss": 3.46, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 9.900598667118491e-06, |
|
"loss": 3.4381, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"learning_rate": 9.830001129560601e-06, |
|
"loss": 3.4432, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 9.759403592002711e-06, |
|
"loss": 3.4443, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"learning_rate": 9.68880605444482e-06, |
|
"loss": 3.4481, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"learning_rate": 9.618208516886932e-06, |
|
"loss": 3.4494, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"learning_rate": 9.547610979329042e-06, |
|
"loss": 3.4531, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 9.477013441771152e-06, |
|
"loss": 3.4528, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 9.406415904213261e-06, |
|
"loss": 3.4534, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"learning_rate": 9.335818366655371e-06, |
|
"loss": 3.4524, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 16.3, |
|
"learning_rate": 9.265220829097482e-06, |
|
"loss": 3.4561, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"learning_rate": 9.194623291539592e-06, |
|
"loss": 3.4514, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 16.36, |
|
"learning_rate": 9.124025753981702e-06, |
|
"loss": 3.4528, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 9.053428216423812e-06, |
|
"loss": 3.4565, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 16.41, |
|
"learning_rate": 8.982830678865923e-06, |
|
"loss": 3.4601, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 16.44, |
|
"learning_rate": 8.912233141308031e-06, |
|
"loss": 3.4571, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"learning_rate": 8.84163560375014e-06, |
|
"loss": 3.4532, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 8.77103806619225e-06, |
|
"loss": 3.4607, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"learning_rate": 8.700440528634362e-06, |
|
"loss": 3.4616, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"learning_rate": 8.629842991076472e-06, |
|
"loss": 3.4629, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 8.559245453518581e-06, |
|
"loss": 3.4557, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 8.488647915960691e-06, |
|
"loss": 3.4593, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 8.418050378402802e-06, |
|
"loss": 3.4591, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 8.347452840844912e-06, |
|
"loss": 3.4619, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 16.69, |
|
"learning_rate": 8.276855303287022e-06, |
|
"loss": 3.4594, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"learning_rate": 8.206257765729132e-06, |
|
"loss": 3.4588, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"learning_rate": 8.135660228171241e-06, |
|
"loss": 3.4623, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 16.78, |
|
"learning_rate": 8.065062690613353e-06, |
|
"loss": 3.4601, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"learning_rate": 7.994465153055462e-06, |
|
"loss": 3.4614, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 16.83, |
|
"learning_rate": 7.923867615497572e-06, |
|
"loss": 3.46, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 16.86, |
|
"learning_rate": 7.853270077939682e-06, |
|
"loss": 3.4612, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"learning_rate": 7.782672540381792e-06, |
|
"loss": 3.4629, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 16.92, |
|
"learning_rate": 7.712075002823901e-06, |
|
"loss": 3.4621, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 7.641477465266011e-06, |
|
"loss": 3.4602, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 7.570879927708122e-06, |
|
"loss": 3.4656, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 3.5812108516693115, |
|
"eval_runtime": 859.9572, |
|
"eval_samples_per_second": 264.928, |
|
"eval_steps_per_second": 33.117, |
|
"step": 301427 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 7.500282390150231e-06, |
|
"loss": 3.4562, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 7.429684852592342e-06, |
|
"loss": 3.4315, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 17.06, |
|
"learning_rate": 7.359087315034452e-06, |
|
"loss": 3.4326, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 7.288489777476562e-06, |
|
"loss": 3.432, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 7.217892239918672e-06, |
|
"loss": 3.435, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"learning_rate": 7.1472947023607825e-06, |
|
"loss": 3.4312, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 17.17, |
|
"learning_rate": 7.076697164802892e-06, |
|
"loss": 3.4332, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 17.2, |
|
"learning_rate": 7.006099627245003e-06, |
|
"loss": 3.433, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 17.23, |
|
"learning_rate": 6.9355020896871125e-06, |
|
"loss": 3.4289, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"learning_rate": 6.864904552129223e-06, |
|
"loss": 3.4376, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"learning_rate": 6.794307014571331e-06, |
|
"loss": 3.4358, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 17.31, |
|
"learning_rate": 6.723709477013442e-06, |
|
"loss": 3.434, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 6.653111939455551e-06, |
|
"loss": 3.4351, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 6.582514401897662e-06, |
|
"loss": 3.4357, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 6.511916864339772e-06, |
|
"loss": 3.4315, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"learning_rate": 6.441319326781882e-06, |
|
"loss": 3.4405, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"learning_rate": 6.370721789223992e-06, |
|
"loss": 3.4362, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 17.48, |
|
"learning_rate": 6.3001242516661025e-06, |
|
"loss": 3.4362, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"learning_rate": 6.229526714108212e-06, |
|
"loss": 3.436, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 17.54, |
|
"learning_rate": 6.158929176550323e-06, |
|
"loss": 3.4355, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 17.57, |
|
"learning_rate": 6.0883316389924325e-06, |
|
"loss": 3.4379, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 6.017734101434542e-06, |
|
"loss": 3.437, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 17.62, |
|
"learning_rate": 5.947136563876652e-06, |
|
"loss": 3.4371, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 17.65, |
|
"learning_rate": 5.8765390263187625e-06, |
|
"loss": 3.4417, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"learning_rate": 5.805941488760872e-06, |
|
"loss": 3.4445, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 5.735343951202982e-06, |
|
"loss": 3.4358, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 5.6647464136450925e-06, |
|
"loss": 3.44, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"learning_rate": 5.594148876087202e-06, |
|
"loss": 3.4435, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 17.79, |
|
"learning_rate": 5.523551338529313e-06, |
|
"loss": 3.4419, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 17.82, |
|
"learning_rate": 5.452953800971422e-06, |
|
"loss": 3.4375, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"learning_rate": 5.382356263413532e-06, |
|
"loss": 3.4409, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 5.311758725855642e-06, |
|
"loss": 3.4402, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 5.2411611882977525e-06, |
|
"loss": 3.4439, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"learning_rate": 5.170563650739862e-06, |
|
"loss": 3.4414, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"learning_rate": 5.099966113181973e-06, |
|
"loss": 3.4368, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 17.99, |
|
"learning_rate": 5.0293685756240826e-06, |
|
"loss": 3.4438, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 3.5748109817504883, |
|
"eval_runtime": 860.2197, |
|
"eval_samples_per_second": 264.847, |
|
"eval_steps_per_second": 33.107, |
|
"step": 319158 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"learning_rate": 4.958771038066193e-06, |
|
"loss": 3.4245, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 18.05, |
|
"learning_rate": 4.888173500508302e-06, |
|
"loss": 3.4111, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 18.08, |
|
"learning_rate": 4.8175759629504126e-06, |
|
"loss": 3.4159, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"learning_rate": 4.746978425392522e-06, |
|
"loss": 3.4101, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 18.13, |
|
"learning_rate": 4.676380887834633e-06, |
|
"loss": 3.416, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 4.6057833502767426e-06, |
|
"loss": 3.414, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"learning_rate": 4.535185812718853e-06, |
|
"loss": 3.4111, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 18.22, |
|
"learning_rate": 4.464588275160963e-06, |
|
"loss": 3.4234, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 4.393990737603073e-06, |
|
"loss": 3.4169, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 18.27, |
|
"learning_rate": 4.323393200045182e-06, |
|
"loss": 3.4224, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 4.252795662487293e-06, |
|
"loss": 3.4242, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 18.33, |
|
"learning_rate": 4.182198124929403e-06, |
|
"loss": 3.4177, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 18.36, |
|
"learning_rate": 4.111600587371512e-06, |
|
"loss": 3.4214, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 18.39, |
|
"learning_rate": 4.041003049813623e-06, |
|
"loss": 3.4213, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"learning_rate": 3.970405512255733e-06, |
|
"loss": 3.4261, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"learning_rate": 3.899807974697843e-06, |
|
"loss": 3.4181, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"learning_rate": 3.829210437139952e-06, |
|
"loss": 3.4202, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 3.7586128995820626e-06, |
|
"loss": 3.4213, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"learning_rate": 3.6880153620241728e-06, |
|
"loss": 3.4289, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"learning_rate": 3.617417824466283e-06, |
|
"loss": 3.4254, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"learning_rate": 3.546820286908393e-06, |
|
"loss": 3.4221, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 3.476222749350503e-06, |
|
"loss": 3.4201, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 18.64, |
|
"learning_rate": 3.405625211792613e-06, |
|
"loss": 3.4238, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"learning_rate": 3.3350276742347226e-06, |
|
"loss": 3.4235, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"learning_rate": 3.2644301366768328e-06, |
|
"loss": 3.4176, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 18.72, |
|
"learning_rate": 3.193832599118943e-06, |
|
"loss": 3.4208, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 3.1232350615610526e-06, |
|
"loss": 3.423, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 18.78, |
|
"learning_rate": 3.0526375240031628e-06, |
|
"loss": 3.4212, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 18.81, |
|
"learning_rate": 2.982039986445273e-06, |
|
"loss": 3.4249, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 18.84, |
|
"learning_rate": 2.9114424488873826e-06, |
|
"loss": 3.4181, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 2.840844911329493e-06, |
|
"loss": 3.4228, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"learning_rate": 2.770247373771603e-06, |
|
"loss": 3.4243, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 2.699649836213713e-06, |
|
"loss": 3.4253, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 18.95, |
|
"learning_rate": 2.629052298655823e-06, |
|
"loss": 3.4184, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 18.98, |
|
"learning_rate": 2.558454761097933e-06, |
|
"loss": 3.427, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_loss": 3.569897413253784, |
|
"eval_runtime": 860.0705, |
|
"eval_samples_per_second": 264.893, |
|
"eval_steps_per_second": 33.112, |
|
"step": 336889 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 2.487857223540043e-06, |
|
"loss": 3.4203, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 2.4172596859821532e-06, |
|
"loss": 3.4046, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 19.06, |
|
"learning_rate": 2.346662148424263e-06, |
|
"loss": 3.4027, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 19.09, |
|
"learning_rate": 2.276064610866373e-06, |
|
"loss": 3.4071, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 19.12, |
|
"learning_rate": 2.2054670733084832e-06, |
|
"loss": 3.4082, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"learning_rate": 2.134869535750593e-06, |
|
"loss": 3.4071, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 19.18, |
|
"learning_rate": 2.064271998192703e-06, |
|
"loss": 3.4052, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 1.9936744606348133e-06, |
|
"loss": 3.4072, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 19.23, |
|
"learning_rate": 1.9230769230769234e-06, |
|
"loss": 3.4036, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"learning_rate": 1.8524793855190331e-06, |
|
"loss": 3.4015, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 19.29, |
|
"learning_rate": 1.781881847961143e-06, |
|
"loss": 3.4058, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"learning_rate": 1.7112843104032532e-06, |
|
"loss": 3.4041, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"learning_rate": 1.6406867728453633e-06, |
|
"loss": 3.4061, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 1.570089235287473e-06, |
|
"loss": 3.405, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"learning_rate": 1.4994916977295832e-06, |
|
"loss": 3.4084, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 1.4288941601716933e-06, |
|
"loss": 3.4033, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 19.46, |
|
"learning_rate": 1.3582966226138033e-06, |
|
"loss": 3.4063, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"learning_rate": 1.2876990850559134e-06, |
|
"loss": 3.4113, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 19.51, |
|
"learning_rate": 1.2171015474980234e-06, |
|
"loss": 3.408, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 1.1465040099401335e-06, |
|
"loss": 3.4107, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"learning_rate": 1.0759064723822434e-06, |
|
"loss": 3.4072, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 1.0053089348243534e-06, |
|
"loss": 3.4045, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"learning_rate": 9.347113972664634e-07, |
|
"loss": 3.4063, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 19.65, |
|
"learning_rate": 8.641138597085733e-07, |
|
"loss": 3.4104, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 19.68, |
|
"learning_rate": 7.935163221506835e-07, |
|
"loss": 3.4067, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"learning_rate": 7.229187845927934e-07, |
|
"loss": 3.4046, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"learning_rate": 6.523212470349035e-07, |
|
"loss": 3.403, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 1.7069980994674747e-05, |
|
"loss": 3.4261, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"learning_rate": 1.7022938110381427e-05, |
|
"loss": 3.4388, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 1.6975895226088104e-05, |
|
"loss": 3.4412, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"learning_rate": 1.692885234179478e-05, |
|
"loss": 3.4422, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 1.688180945750146e-05, |
|
"loss": 3.4486, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"learning_rate": 1.6834766573208138e-05, |
|
"loss": 3.4484, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"learning_rate": 1.6787723688914815e-05, |
|
"loss": 3.453, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 19.97, |
|
"learning_rate": 1.6740680804621495e-05, |
|
"loss": 3.4555, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 1.6693637920328172e-05, |
|
"loss": 3.4618, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 3.5916237831115723, |
|
"eval_runtime": 859.4544, |
|
"eval_samples_per_second": 265.083, |
|
"eval_steps_per_second": 33.136, |
|
"step": 354620 |
|
}, |
|
{ |
|
"epoch": 20.02, |
|
"learning_rate": 1.6646595036034852e-05, |
|
"loss": 3.4385, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 20.05, |
|
"learning_rate": 1.659955215174153e-05, |
|
"loss": 3.4409, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 20.08, |
|
"learning_rate": 1.6552509267448206e-05, |
|
"loss": 3.4432, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 20.11, |
|
"learning_rate": 1.6505466383154886e-05, |
|
"loss": 3.4478, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 20.13, |
|
"learning_rate": 1.6458423498861563e-05, |
|
"loss": 3.4471, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 20.16, |
|
"learning_rate": 1.6411380614568243e-05, |
|
"loss": 3.4482, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 20.19, |
|
"learning_rate": 1.636433773027492e-05, |
|
"loss": 3.4443, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 20.22, |
|
"learning_rate": 1.6317294845981597e-05, |
|
"loss": 3.4487, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 20.25, |
|
"learning_rate": 1.6270251961688277e-05, |
|
"loss": 3.4514, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 20.28, |
|
"learning_rate": 1.6223209077394954e-05, |
|
"loss": 3.4481, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 20.3, |
|
"learning_rate": 1.6176166193101635e-05, |
|
"loss": 3.4513, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"learning_rate": 1.612912330880831e-05, |
|
"loss": 3.4527, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 20.36, |
|
"learning_rate": 1.6082080424514988e-05, |
|
"loss": 3.4545, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 20.39, |
|
"learning_rate": 1.603503754022167e-05, |
|
"loss": 3.4538, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"learning_rate": 1.5987994655928345e-05, |
|
"loss": 3.4566, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 20.44, |
|
"learning_rate": 1.5940951771635022e-05, |
|
"loss": 3.462, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 20.47, |
|
"learning_rate": 1.5893908887341703e-05, |
|
"loss": 3.454, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 1.584686600304838e-05, |
|
"loss": 3.4643, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 20.53, |
|
"learning_rate": 1.579982311875506e-05, |
|
"loss": 3.4671, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"learning_rate": 1.5752780234461737e-05, |
|
"loss": 3.464, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 20.59, |
|
"learning_rate": 1.5705737350168413e-05, |
|
"loss": 3.4596, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 20.61, |
|
"learning_rate": 1.5658694465875094e-05, |
|
"loss": 3.464, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 20.64, |
|
"learning_rate": 1.561165158158177e-05, |
|
"loss": 3.4623, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 20.67, |
|
"learning_rate": 1.556460869728845e-05, |
|
"loss": 3.4662, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 20.7, |
|
"learning_rate": 1.5517565812995128e-05, |
|
"loss": 3.4654, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 20.73, |
|
"learning_rate": 1.5470522928701805e-05, |
|
"loss": 3.4657, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"learning_rate": 1.5423480044408485e-05, |
|
"loss": 3.4667, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"learning_rate": 1.5376437160115162e-05, |
|
"loss": 3.4694, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 20.81, |
|
"learning_rate": 1.532939427582184e-05, |
|
"loss": 3.466, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"learning_rate": 1.528235139152852e-05, |
|
"loss": 3.4692, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 20.87, |
|
"learning_rate": 1.5235308507235196e-05, |
|
"loss": 3.4678, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 20.9, |
|
"learning_rate": 1.5188265622941874e-05, |
|
"loss": 3.471, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 20.92, |
|
"learning_rate": 1.5141222738648553e-05, |
|
"loss": 3.4701, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 20.95, |
|
"learning_rate": 1.5094179854355231e-05, |
|
"loss": 3.4677, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 20.98, |
|
"learning_rate": 1.5047136970061908e-05, |
|
"loss": 3.4688, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"eval_loss": 3.582139015197754, |
|
"eval_runtime": 860.6638, |
|
"eval_samples_per_second": 264.711, |
|
"eval_steps_per_second": 33.09, |
|
"step": 372351 |
|
}, |
|
{ |
|
"epoch": 21.01, |
|
"learning_rate": 1.5000094085768587e-05, |
|
"loss": 3.4558, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 21.04, |
|
"learning_rate": 1.4953051201475265e-05, |
|
"loss": 3.4203, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 21.06, |
|
"learning_rate": 1.4906008317181944e-05, |
|
"loss": 3.4192, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 21.09, |
|
"learning_rate": 1.4858965432888623e-05, |
|
"loss": 3.4208, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 21.12, |
|
"learning_rate": 1.48119225485953e-05, |
|
"loss": 3.4236, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 21.15, |
|
"learning_rate": 1.4764879664301978e-05, |
|
"loss": 3.4278, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 21.18, |
|
"learning_rate": 1.4717836780008657e-05, |
|
"loss": 3.4287, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 21.21, |
|
"learning_rate": 1.4670793895715335e-05, |
|
"loss": 3.4333, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 21.23, |
|
"learning_rate": 1.4623751011422012e-05, |
|
"loss": 3.4288, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 21.26, |
|
"learning_rate": 1.457670812712869e-05, |
|
"loss": 3.4313, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 21.29, |
|
"learning_rate": 1.452966524283537e-05, |
|
"loss": 3.4357, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 21.32, |
|
"learning_rate": 1.4482622358542048e-05, |
|
"loss": 3.4356, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 21.35, |
|
"learning_rate": 1.4435579474248725e-05, |
|
"loss": 3.4386, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 21.37, |
|
"learning_rate": 1.4388536589955403e-05, |
|
"loss": 3.4449, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 21.4, |
|
"learning_rate": 1.4341493705662082e-05, |
|
"loss": 3.4411, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 21.43, |
|
"learning_rate": 1.429445082136876e-05, |
|
"loss": 3.441, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 21.46, |
|
"learning_rate": 1.4247407937075439e-05, |
|
"loss": 3.4424, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 21.49, |
|
"learning_rate": 1.4200365052782116e-05, |
|
"loss": 3.4444, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 21.52, |
|
"learning_rate": 1.4153322168488794e-05, |
|
"loss": 3.4467, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 21.54, |
|
"learning_rate": 1.4106279284195473e-05, |
|
"loss": 3.4413, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 21.57, |
|
"learning_rate": 1.4059236399902152e-05, |
|
"loss": 3.4435, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"learning_rate": 1.4012193515608828e-05, |
|
"loss": 3.4398, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 21.63, |
|
"learning_rate": 1.3965150631315507e-05, |
|
"loss": 3.4444, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 21.66, |
|
"learning_rate": 1.3918107747022186e-05, |
|
"loss": 3.45, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 21.69, |
|
"learning_rate": 1.3871064862728864e-05, |
|
"loss": 3.4467, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 21.71, |
|
"learning_rate": 1.3824021978435541e-05, |
|
"loss": 3.4433, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 21.74, |
|
"learning_rate": 1.377697909414222e-05, |
|
"loss": 3.4472, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 21.77, |
|
"learning_rate": 1.3729936209848898e-05, |
|
"loss": 3.446, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"learning_rate": 1.3682893325555577e-05, |
|
"loss": 3.4479, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 21.83, |
|
"learning_rate": 1.3635850441262255e-05, |
|
"loss": 3.4484, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 21.85, |
|
"learning_rate": 1.3588807556968932e-05, |
|
"loss": 3.4534, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 21.88, |
|
"learning_rate": 1.354176467267561e-05, |
|
"loss": 3.4485, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 21.91, |
|
"learning_rate": 1.349472178838229e-05, |
|
"loss": 3.4465, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 21.94, |
|
"learning_rate": 1.3447678904088968e-05, |
|
"loss": 3.4475, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 21.97, |
|
"learning_rate": 1.3400636019795645e-05, |
|
"loss": 3.4553, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"learning_rate": 1.3353593135502323e-05, |
|
"loss": 3.4502, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"eval_loss": 3.5741803646087646, |
|
"eval_runtime": 861.811, |
|
"eval_samples_per_second": 264.358, |
|
"eval_steps_per_second": 33.046, |
|
"step": 390082 |
|
}, |
|
{ |
|
"epoch": 22.02, |
|
"learning_rate": 1.3306550251209002e-05, |
|
"loss": 3.4106, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 22.05, |
|
"learning_rate": 1.325950736691568e-05, |
|
"loss": 3.4081, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 22.08, |
|
"learning_rate": 1.3212464482622359e-05, |
|
"loss": 3.4116, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 22.11, |
|
"learning_rate": 1.3165421598329036e-05, |
|
"loss": 3.4073, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 22.14, |
|
"learning_rate": 1.3118378714035714e-05, |
|
"loss": 3.4114, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 22.16, |
|
"learning_rate": 1.3071335829742393e-05, |
|
"loss": 3.412, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 22.19, |
|
"learning_rate": 1.3024292945449072e-05, |
|
"loss": 3.4129, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 22.22, |
|
"learning_rate": 1.2977250061155748e-05, |
|
"loss": 3.4169, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 22.25, |
|
"learning_rate": 1.2930207176862427e-05, |
|
"loss": 3.4106, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 22.28, |
|
"learning_rate": 1.2883164292569106e-05, |
|
"loss": 3.4163, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 22.31, |
|
"learning_rate": 1.2836121408275784e-05, |
|
"loss": 3.4197, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 22.33, |
|
"learning_rate": 1.2789078523982461e-05, |
|
"loss": 3.4195, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 22.36, |
|
"learning_rate": 1.274203563968914e-05, |
|
"loss": 3.4128, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 22.39, |
|
"learning_rate": 1.2694992755395818e-05, |
|
"loss": 3.4224, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 22.42, |
|
"learning_rate": 1.2647949871102497e-05, |
|
"loss": 3.422, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 22.45, |
|
"learning_rate": 1.2600906986809175e-05, |
|
"loss": 3.424, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 22.47, |
|
"learning_rate": 1.2553864102515856e-05, |
|
"loss": 3.424, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"learning_rate": 1.2506821218222534e-05, |
|
"loss": 3.4251, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 22.53, |
|
"learning_rate": 1.2459778333929211e-05, |
|
"loss": 3.4241, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 22.56, |
|
"learning_rate": 1.2412735449635888e-05, |
|
"loss": 3.4282, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 22.59, |
|
"learning_rate": 1.2365692565342566e-05, |
|
"loss": 3.4269, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 22.62, |
|
"learning_rate": 1.2318649681049245e-05, |
|
"loss": 3.4258, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 22.64, |
|
"learning_rate": 1.2271606796755924e-05, |
|
"loss": 3.4238, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"learning_rate": 1.22245639124626e-05, |
|
"loss": 3.4293, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"learning_rate": 1.2177521028169279e-05, |
|
"loss": 3.4269, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"learning_rate": 1.2130478143875958e-05, |
|
"loss": 3.424, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 22.76, |
|
"learning_rate": 1.2083435259582636e-05, |
|
"loss": 3.432, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 22.78, |
|
"learning_rate": 1.2036392375289315e-05, |
|
"loss": 3.4304, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 22.81, |
|
"learning_rate": 1.1989349490995992e-05, |
|
"loss": 3.4302, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 22.84, |
|
"learning_rate": 1.194230660670267e-05, |
|
"loss": 3.4301, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 22.87, |
|
"learning_rate": 1.1895263722409349e-05, |
|
"loss": 3.4315, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 22.9, |
|
"learning_rate": 1.1848220838116027e-05, |
|
"loss": 3.4274, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 22.93, |
|
"learning_rate": 1.1801177953822704e-05, |
|
"loss": 3.4335, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 22.95, |
|
"learning_rate": 1.1754135069529383e-05, |
|
"loss": 3.4301, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 22.98, |
|
"learning_rate": 1.1707092185236061e-05, |
|
"loss": 3.4303, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"eval_loss": 3.566288709640503, |
|
"eval_runtime": 859.4108, |
|
"eval_samples_per_second": 265.097, |
|
"eval_steps_per_second": 33.138, |
|
"step": 407813 |
|
}, |
|
{ |
|
"epoch": 23.01, |
|
"learning_rate": 1.166004930094274e-05, |
|
"loss": 3.4179, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 23.04, |
|
"learning_rate": 1.1613006416649418e-05, |
|
"loss": 3.3839, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 23.07, |
|
"learning_rate": 1.1565963532356095e-05, |
|
"loss": 3.3893, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 23.1, |
|
"learning_rate": 1.1518920648062774e-05, |
|
"loss": 3.3931, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 23.12, |
|
"learning_rate": 1.1471877763769452e-05, |
|
"loss": 3.3943, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 23.15, |
|
"learning_rate": 1.1424834879476131e-05, |
|
"loss": 3.396, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 23.18, |
|
"learning_rate": 1.1377791995182808e-05, |
|
"loss": 3.3988, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 23.21, |
|
"learning_rate": 1.1330749110889487e-05, |
|
"loss": 3.3979, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 23.24, |
|
"learning_rate": 1.1283706226596167e-05, |
|
"loss": 3.4025, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 23.26, |
|
"learning_rate": 1.1236663342302844e-05, |
|
"loss": 3.4016, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 23.29, |
|
"learning_rate": 1.1189620458009522e-05, |
|
"loss": 3.4051, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 23.32, |
|
"learning_rate": 1.11425775737162e-05, |
|
"loss": 3.4066, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 23.35, |
|
"learning_rate": 1.109553468942288e-05, |
|
"loss": 3.4032, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 23.38, |
|
"learning_rate": 1.1048491805129558e-05, |
|
"loss": 3.401, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 23.41, |
|
"learning_rate": 1.1001448920836235e-05, |
|
"loss": 3.4062, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 23.43, |
|
"learning_rate": 1.0954406036542913e-05, |
|
"loss": 3.4046, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 23.46, |
|
"learning_rate": 1.0907363152249592e-05, |
|
"loss": 3.403, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 23.49, |
|
"learning_rate": 1.086032026795627e-05, |
|
"loss": 3.4082, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 23.52, |
|
"learning_rate": 1.0813277383662947e-05, |
|
"loss": 3.4062, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 23.55, |
|
"learning_rate": 1.0766234499369626e-05, |
|
"loss": 3.4059, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 23.57, |
|
"learning_rate": 1.0719191615076305e-05, |
|
"loss": 3.4051, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 23.6, |
|
"learning_rate": 1.0672148730782983e-05, |
|
"loss": 3.4084, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 23.63, |
|
"learning_rate": 1.062510584648966e-05, |
|
"loss": 3.4098, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 23.66, |
|
"learning_rate": 1.0578062962196339e-05, |
|
"loss": 3.4129, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 23.69, |
|
"learning_rate": 1.0531020077903017e-05, |
|
"loss": 3.4123, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 23.72, |
|
"learning_rate": 1.0483977193609696e-05, |
|
"loss": 3.4118, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 23.74, |
|
"learning_rate": 1.0436934309316374e-05, |
|
"loss": 3.4061, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 23.77, |
|
"learning_rate": 1.0389891425023051e-05, |
|
"loss": 3.4157, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 23.8, |
|
"learning_rate": 1.034284854072973e-05, |
|
"loss": 3.4088, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 23.83, |
|
"learning_rate": 1.0295805656436408e-05, |
|
"loss": 3.4107, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 23.86, |
|
"learning_rate": 1.0248762772143087e-05, |
|
"loss": 3.414, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 23.88, |
|
"learning_rate": 1.0201719887849764e-05, |
|
"loss": 3.4169, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 23.91, |
|
"learning_rate": 1.0154677003556442e-05, |
|
"loss": 3.4168, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 23.94, |
|
"learning_rate": 1.010763411926312e-05, |
|
"loss": 3.4131, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 23.97, |
|
"learning_rate": 1.00605912349698e-05, |
|
"loss": 3.4162, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 1.0013548350676476e-05, |
|
"loss": 3.4126, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_loss": 3.560105323791504, |
|
"eval_runtime": 859.9342, |
|
"eval_samples_per_second": 264.935, |
|
"eval_steps_per_second": 33.118, |
|
"step": 425544 |
|
}, |
|
{ |
|
"epoch": 24.03, |
|
"learning_rate": 9.966505466383155e-06, |
|
"loss": 3.3813, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 24.05, |
|
"learning_rate": 9.919462582089833e-06, |
|
"loss": 3.3754, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 24.08, |
|
"learning_rate": 9.872419697796512e-06, |
|
"loss": 3.3793, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 24.11, |
|
"learning_rate": 9.82537681350319e-06, |
|
"loss": 3.38, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 24.14, |
|
"learning_rate": 9.778333929209867e-06, |
|
"loss": 3.3797, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 24.17, |
|
"learning_rate": 9.731291044916546e-06, |
|
"loss": 3.3844, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 24.19, |
|
"learning_rate": 9.684248160623225e-06, |
|
"loss": 3.3853, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 24.22, |
|
"learning_rate": 9.637205276329903e-06, |
|
"loss": 3.3808, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 24.25, |
|
"learning_rate": 9.59016239203658e-06, |
|
"loss": 3.3847, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 24.28, |
|
"learning_rate": 9.543119507743259e-06, |
|
"loss": 3.3879, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 24.31, |
|
"learning_rate": 9.496076623449937e-06, |
|
"loss": 3.3847, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 24.34, |
|
"learning_rate": 9.449033739156616e-06, |
|
"loss": 3.3837, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 24.36, |
|
"learning_rate": 9.401990854863294e-06, |
|
"loss": 3.3863, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 24.39, |
|
"learning_rate": 9.354947970569971e-06, |
|
"loss": 3.3862, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 24.42, |
|
"learning_rate": 9.30790508627665e-06, |
|
"loss": 3.3882, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 24.45, |
|
"learning_rate": 9.260862201983328e-06, |
|
"loss": 3.3904, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 24.48, |
|
"learning_rate": 9.213819317690007e-06, |
|
"loss": 3.3907, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 24.51, |
|
"learning_rate": 9.166776433396684e-06, |
|
"loss": 3.3888, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 24.53, |
|
"learning_rate": 9.119733549103362e-06, |
|
"loss": 3.3882, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 24.56, |
|
"learning_rate": 9.072690664810041e-06, |
|
"loss": 3.3944, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 24.59, |
|
"learning_rate": 9.02564778051672e-06, |
|
"loss": 3.4002, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 24.62, |
|
"learning_rate": 8.978604896223396e-06, |
|
"loss": 3.3936, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 24.65, |
|
"learning_rate": 8.931562011930075e-06, |
|
"loss": 3.3935, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 24.67, |
|
"learning_rate": 8.884519127636753e-06, |
|
"loss": 3.3914, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 24.7, |
|
"learning_rate": 8.837476243343432e-06, |
|
"loss": 3.3971, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 24.73, |
|
"learning_rate": 8.79043335905011e-06, |
|
"loss": 3.3956, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 24.76, |
|
"learning_rate": 8.74339047475679e-06, |
|
"loss": 3.3923, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 24.79, |
|
"learning_rate": 8.696347590463468e-06, |
|
"loss": 3.393, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 24.82, |
|
"learning_rate": 8.649304706170146e-06, |
|
"loss": 3.3975, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 24.84, |
|
"learning_rate": 8.602261821876823e-06, |
|
"loss": 3.3979, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 24.87, |
|
"learning_rate": 8.555218937583502e-06, |
|
"loss": 3.3958, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 24.9, |
|
"learning_rate": 8.50817605329018e-06, |
|
"loss": 3.4036, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 24.93, |
|
"learning_rate": 8.461133168996859e-06, |
|
"loss": 3.4009, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 24.96, |
|
"learning_rate": 8.414090284703536e-06, |
|
"loss": 3.3963, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 24.98, |
|
"learning_rate": 8.367047400410214e-06, |
|
"loss": 3.3982, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"eval_loss": 3.5556538105010986, |
|
"eval_runtime": 860.2845, |
|
"eval_samples_per_second": 264.828, |
|
"eval_steps_per_second": 33.104, |
|
"step": 443275 |
|
}, |
|
{ |
|
"epoch": 25.01, |
|
"learning_rate": 8.320004516116893e-06, |
|
"loss": 3.3834, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 25.04, |
|
"learning_rate": 8.272961631823571e-06, |
|
"loss": 3.365, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 25.07, |
|
"learning_rate": 8.22591874753025e-06, |
|
"loss": 3.3641, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 25.1, |
|
"learning_rate": 8.178875863236927e-06, |
|
"loss": 3.3643, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 25.13, |
|
"learning_rate": 8.131832978943605e-06, |
|
"loss": 3.3723, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 25.15, |
|
"learning_rate": 8.084790094650284e-06, |
|
"loss": 3.3711, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 25.18, |
|
"learning_rate": 8.037747210356963e-06, |
|
"loss": 3.3663, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 25.21, |
|
"learning_rate": 7.99070432606364e-06, |
|
"loss": 3.368, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 25.24, |
|
"learning_rate": 7.943661441770318e-06, |
|
"loss": 3.3691, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 25.27, |
|
"learning_rate": 7.896618557476997e-06, |
|
"loss": 3.3733, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 25.29, |
|
"learning_rate": 7.849575673183675e-06, |
|
"loss": 3.3713, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 25.32, |
|
"learning_rate": 7.802532788890352e-06, |
|
"loss": 3.3695, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 25.35, |
|
"learning_rate": 7.75548990459703e-06, |
|
"loss": 3.3751, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 25.38, |
|
"learning_rate": 7.70844702030371e-06, |
|
"loss": 3.3769, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 25.41, |
|
"learning_rate": 7.661404136010388e-06, |
|
"loss": 3.3783, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 25.44, |
|
"learning_rate": 7.6143612517170655e-06, |
|
"loss": 3.3743, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 25.46, |
|
"learning_rate": 7.567318367423744e-06, |
|
"loss": 3.375, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 25.49, |
|
"learning_rate": 7.520275483130422e-06, |
|
"loss": 3.3749, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 25.52, |
|
"learning_rate": 7.4732325988371e-06, |
|
"loss": 3.377, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 25.55, |
|
"learning_rate": 7.426189714543778e-06, |
|
"loss": 3.3709, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 25.58, |
|
"learning_rate": 7.379146830250457e-06, |
|
"loss": 3.3807, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 25.6, |
|
"learning_rate": 7.332103945957134e-06, |
|
"loss": 3.3785, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 25.63, |
|
"learning_rate": 7.285061061663813e-06, |
|
"loss": 3.3809, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 25.66, |
|
"learning_rate": 7.238018177370491e-06, |
|
"loss": 3.379, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 25.69, |
|
"learning_rate": 7.190975293077169e-06, |
|
"loss": 3.3808, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 25.72, |
|
"learning_rate": 7.143932408783848e-06, |
|
"loss": 3.3849, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 25.75, |
|
"learning_rate": 7.0968895244905256e-06, |
|
"loss": 3.3845, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 25.77, |
|
"learning_rate": 7.049846640197204e-06, |
|
"loss": 3.3793, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 25.8, |
|
"learning_rate": 7.002803755903882e-06, |
|
"loss": 3.3838, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 25.83, |
|
"learning_rate": 6.95576087161056e-06, |
|
"loss": 3.3796, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 25.86, |
|
"learning_rate": 6.908717987317238e-06, |
|
"loss": 3.3828, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 25.89, |
|
"learning_rate": 6.861675103023917e-06, |
|
"loss": 3.3845, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 25.92, |
|
"learning_rate": 6.8146322187305944e-06, |
|
"loss": 3.382, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 25.94, |
|
"learning_rate": 6.767589334437273e-06, |
|
"loss": 3.3828, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 25.97, |
|
"learning_rate": 6.720546450143951e-06, |
|
"loss": 3.3774, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"learning_rate": 6.673503565850629e-06, |
|
"loss": 3.3832, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"eval_loss": 3.550107717514038, |
|
"eval_runtime": 859.3358, |
|
"eval_samples_per_second": 265.12, |
|
"eval_steps_per_second": 33.141, |
|
"step": 461006 |
|
}, |
|
{ |
|
"epoch": 26.03, |
|
"learning_rate": 6.626460681557307e-06, |
|
"loss": 3.3516, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 26.06, |
|
"learning_rate": 6.579417797263986e-06, |
|
"loss": 3.3521, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 26.08, |
|
"learning_rate": 6.532374912970664e-06, |
|
"loss": 3.3533, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 26.11, |
|
"learning_rate": 6.485332028677342e-06, |
|
"loss": 3.3493, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 26.14, |
|
"learning_rate": 6.4382891443840204e-06, |
|
"loss": 3.3576, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 26.17, |
|
"learning_rate": 6.391246260090698e-06, |
|
"loss": 3.3533, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 26.2, |
|
"learning_rate": 6.344203375797377e-06, |
|
"loss": 3.3592, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 26.23, |
|
"learning_rate": 6.297160491504056e-06, |
|
"loss": 3.3543, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 26.25, |
|
"learning_rate": 6.250117607210734e-06, |
|
"loss": 3.3571, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 26.28, |
|
"learning_rate": 6.203074722917412e-06, |
|
"loss": 3.361, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 26.31, |
|
"learning_rate": 6.15603183862409e-06, |
|
"loss": 3.358, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 26.34, |
|
"learning_rate": 6.108988954330768e-06, |
|
"loss": 3.3632, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 26.37, |
|
"learning_rate": 6.0619460700374465e-06, |
|
"loss": 3.3561, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 26.39, |
|
"learning_rate": 6.014903185744124e-06, |
|
"loss": 3.3647, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 26.42, |
|
"learning_rate": 5.967860301450803e-06, |
|
"loss": 3.3581, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 26.45, |
|
"learning_rate": 5.9208174171574805e-06, |
|
"loss": 3.3651, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 26.48, |
|
"learning_rate": 5.873774532864159e-06, |
|
"loss": 3.364, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 26.51, |
|
"learning_rate": 5.826731648570837e-06, |
|
"loss": 3.3621, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 26.54, |
|
"learning_rate": 5.779688764277515e-06, |
|
"loss": 3.3669, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 26.56, |
|
"learning_rate": 5.732645879984194e-06, |
|
"loss": 3.3607, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 26.59, |
|
"learning_rate": 5.6856029956908725e-06, |
|
"loss": 3.3613, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 26.62, |
|
"learning_rate": 5.63856011139755e-06, |
|
"loss": 3.3672, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 26.65, |
|
"learning_rate": 5.591517227104229e-06, |
|
"loss": 3.3666, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 26.68, |
|
"learning_rate": 5.544474342810907e-06, |
|
"loss": 3.3653, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 26.7, |
|
"learning_rate": 5.497431458517585e-06, |
|
"loss": 3.3717, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 26.73, |
|
"learning_rate": 5.450388574224264e-06, |
|
"loss": 3.3701, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 26.76, |
|
"learning_rate": 5.403345689930941e-06, |
|
"loss": 3.364, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"learning_rate": 5.35630280563762e-06, |
|
"loss": 3.3687, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 26.82, |
|
"learning_rate": 5.309259921344298e-06, |
|
"loss": 3.365, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 26.85, |
|
"learning_rate": 5.262217037050976e-06, |
|
"loss": 3.3683, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 26.87, |
|
"learning_rate": 5.215174152757654e-06, |
|
"loss": 3.3723, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 26.9, |
|
"learning_rate": 5.1681312684643325e-06, |
|
"loss": 3.3718, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 26.93, |
|
"learning_rate": 5.12108838417101e-06, |
|
"loss": 3.3684, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 26.96, |
|
"learning_rate": 5.074045499877689e-06, |
|
"loss": 3.3694, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 26.99, |
|
"learning_rate": 5.0270026155843665e-06, |
|
"loss": 3.3695, |
|
"step": 478500 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"eval_loss": 3.5464906692504883, |
|
"eval_runtime": 861.0727, |
|
"eval_samples_per_second": 264.585, |
|
"eval_steps_per_second": 33.074, |
|
"step": 478737 |
|
}, |
|
{ |
|
"epoch": 27.01, |
|
"learning_rate": 4.979959731291045e-06, |
|
"loss": 3.357, |
|
"step": 479000 |
|
}, |
|
{ |
|
"epoch": 27.04, |
|
"learning_rate": 4.932916846997724e-06, |
|
"loss": 3.3454, |
|
"step": 479500 |
|
}, |
|
{ |
|
"epoch": 27.07, |
|
"learning_rate": 4.885873962704401e-06, |
|
"loss": 3.3438, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 27.1, |
|
"learning_rate": 4.83883107841108e-06, |
|
"loss": 3.3425, |
|
"step": 480500 |
|
}, |
|
{ |
|
"epoch": 27.13, |
|
"learning_rate": 4.791788194117758e-06, |
|
"loss": 3.3451, |
|
"step": 481000 |
|
}, |
|
{ |
|
"epoch": 27.16, |
|
"learning_rate": 4.744745309824436e-06, |
|
"loss": 3.3429, |
|
"step": 481500 |
|
}, |
|
{ |
|
"epoch": 27.18, |
|
"learning_rate": 4.697702425531114e-06, |
|
"loss": 3.3475, |
|
"step": 482000 |
|
}, |
|
{ |
|
"epoch": 27.21, |
|
"learning_rate": 4.6506595412377925e-06, |
|
"loss": 3.3515, |
|
"step": 482500 |
|
}, |
|
{ |
|
"epoch": 27.24, |
|
"learning_rate": 4.60361665694447e-06, |
|
"loss": 3.3481, |
|
"step": 483000 |
|
}, |
|
{ |
|
"epoch": 27.27, |
|
"learning_rate": 4.556573772651149e-06, |
|
"loss": 3.3485, |
|
"step": 483500 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"learning_rate": 4.5095308883578265e-06, |
|
"loss": 3.3448, |
|
"step": 484000 |
|
}, |
|
{ |
|
"epoch": 27.33, |
|
"learning_rate": 4.462488004064506e-06, |
|
"loss": 3.347, |
|
"step": 484500 |
|
}, |
|
{ |
|
"epoch": 27.35, |
|
"learning_rate": 4.415445119771184e-06, |
|
"loss": 3.3505, |
|
"step": 485000 |
|
}, |
|
{ |
|
"epoch": 27.38, |
|
"learning_rate": 4.368402235477862e-06, |
|
"loss": 3.3507, |
|
"step": 485500 |
|
}, |
|
{ |
|
"epoch": 27.41, |
|
"learning_rate": 4.32135935118454e-06, |
|
"loss": 3.3483, |
|
"step": 486000 |
|
}, |
|
{ |
|
"epoch": 27.44, |
|
"learning_rate": 4.2743164668912185e-06, |
|
"loss": 3.3499, |
|
"step": 486500 |
|
}, |
|
{ |
|
"epoch": 27.47, |
|
"learning_rate": 4.227273582597896e-06, |
|
"loss": 3.3531, |
|
"step": 487000 |
|
}, |
|
{ |
|
"epoch": 27.49, |
|
"learning_rate": 4.180230698304575e-06, |
|
"loss": 3.3495, |
|
"step": 487500 |
|
}, |
|
{ |
|
"epoch": 27.52, |
|
"learning_rate": 4.133187814011253e-06, |
|
"loss": 3.3527, |
|
"step": 488000 |
|
}, |
|
{ |
|
"epoch": 27.55, |
|
"learning_rate": 4.086144929717931e-06, |
|
"loss": 3.3534, |
|
"step": 488500 |
|
}, |
|
{ |
|
"epoch": 27.58, |
|
"learning_rate": 4.03910204542461e-06, |
|
"loss": 3.3518, |
|
"step": 489000 |
|
}, |
|
{ |
|
"epoch": 27.61, |
|
"learning_rate": 3.992059161131287e-06, |
|
"loss": 3.352, |
|
"step": 489500 |
|
}, |
|
{ |
|
"epoch": 27.64, |
|
"learning_rate": 3.945016276837966e-06, |
|
"loss": 3.3569, |
|
"step": 490000 |
|
}, |
|
{ |
|
"epoch": 27.66, |
|
"learning_rate": 3.897973392544644e-06, |
|
"loss": 3.3542, |
|
"step": 490500 |
|
}, |
|
{ |
|
"epoch": 27.69, |
|
"learning_rate": 3.850930508251322e-06, |
|
"loss": 3.352, |
|
"step": 491000 |
|
}, |
|
{ |
|
"epoch": 27.72, |
|
"learning_rate": 3.8038876239580004e-06, |
|
"loss": 3.3476, |
|
"step": 491500 |
|
}, |
|
{ |
|
"epoch": 27.75, |
|
"learning_rate": 3.7568447396646786e-06, |
|
"loss": 3.3581, |
|
"step": 492000 |
|
}, |
|
{ |
|
"epoch": 27.78, |
|
"learning_rate": 3.7098018553713567e-06, |
|
"loss": 3.3593, |
|
"step": 492500 |
|
}, |
|
{ |
|
"epoch": 27.8, |
|
"learning_rate": 3.662758971078035e-06, |
|
"loss": 3.3519, |
|
"step": 493000 |
|
}, |
|
{ |
|
"epoch": 27.83, |
|
"learning_rate": 3.615716086784713e-06, |
|
"loss": 3.3515, |
|
"step": 493500 |
|
}, |
|
{ |
|
"epoch": 27.86, |
|
"learning_rate": 3.568673202491391e-06, |
|
"loss": 3.3579, |
|
"step": 494000 |
|
}, |
|
{ |
|
"epoch": 27.89, |
|
"learning_rate": 3.5216303181980693e-06, |
|
"loss": 3.3567, |
|
"step": 494500 |
|
}, |
|
{ |
|
"epoch": 27.92, |
|
"learning_rate": 3.4745874339047474e-06, |
|
"loss": 3.3516, |
|
"step": 495000 |
|
}, |
|
{ |
|
"epoch": 27.95, |
|
"learning_rate": 3.4275445496114256e-06, |
|
"loss": 3.3578, |
|
"step": 495500 |
|
}, |
|
{ |
|
"epoch": 27.97, |
|
"learning_rate": 3.3805016653181037e-06, |
|
"loss": 3.3517, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_loss": 3.542843818664551, |
|
"eval_runtime": 859.162, |
|
"eval_samples_per_second": 265.174, |
|
"eval_steps_per_second": 33.147, |
|
"step": 496468 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"learning_rate": 3.333458781024782e-06, |
|
"loss": 3.3555, |
|
"step": 496500 |
|
}, |
|
{ |
|
"epoch": 28.03, |
|
"learning_rate": 3.2864158967314604e-06, |
|
"loss": 3.3329, |
|
"step": 497000 |
|
}, |
|
{ |
|
"epoch": 28.06, |
|
"learning_rate": 3.2393730124381386e-06, |
|
"loss": 3.3364, |
|
"step": 497500 |
|
}, |
|
{ |
|
"epoch": 28.09, |
|
"learning_rate": 3.192330128144817e-06, |
|
"loss": 3.3337, |
|
"step": 498000 |
|
}, |
|
{ |
|
"epoch": 28.11, |
|
"learning_rate": 3.1452872438514953e-06, |
|
"loss": 3.34, |
|
"step": 498500 |
|
}, |
|
{ |
|
"epoch": 28.14, |
|
"learning_rate": 3.0982443595581734e-06, |
|
"loss": 3.3379, |
|
"step": 499000 |
|
}, |
|
{ |
|
"epoch": 28.17, |
|
"learning_rate": 3.0512014752648516e-06, |
|
"loss": 3.3386, |
|
"step": 499500 |
|
}, |
|
{ |
|
"epoch": 28.2, |
|
"learning_rate": 3.0041585909715297e-06, |
|
"loss": 3.335, |
|
"step": 500000 |
|
}, |
|
{ |
|
"epoch": 28.23, |
|
"learning_rate": 2.957115706678208e-06, |
|
"loss": 3.3351, |
|
"step": 500500 |
|
}, |
|
{ |
|
"epoch": 28.26, |
|
"learning_rate": 2.910072822384886e-06, |
|
"loss": 3.3398, |
|
"step": 501000 |
|
}, |
|
{ |
|
"epoch": 28.28, |
|
"learning_rate": 2.8630299380915646e-06, |
|
"loss": 3.3323, |
|
"step": 501500 |
|
}, |
|
{ |
|
"epoch": 28.31, |
|
"learning_rate": 2.8159870537982427e-06, |
|
"loss": 3.339, |
|
"step": 502000 |
|
}, |
|
{ |
|
"epoch": 28.34, |
|
"learning_rate": 2.768944169504921e-06, |
|
"loss": 3.3374, |
|
"step": 502500 |
|
}, |
|
{ |
|
"epoch": 28.37, |
|
"learning_rate": 2.721901285211599e-06, |
|
"loss": 3.3367, |
|
"step": 503000 |
|
}, |
|
{ |
|
"epoch": 28.4, |
|
"learning_rate": 2.674858400918277e-06, |
|
"loss": 3.3337, |
|
"step": 503500 |
|
}, |
|
{ |
|
"epoch": 28.42, |
|
"learning_rate": 2.6278155166249553e-06, |
|
"loss": 3.3371, |
|
"step": 504000 |
|
}, |
|
{ |
|
"epoch": 28.45, |
|
"learning_rate": 2.5807726323316335e-06, |
|
"loss": 3.3464, |
|
"step": 504500 |
|
}, |
|
{ |
|
"epoch": 28.48, |
|
"learning_rate": 2.5337297480383116e-06, |
|
"loss": 3.3459, |
|
"step": 505000 |
|
}, |
|
{ |
|
"epoch": 28.51, |
|
"learning_rate": 2.48668686374499e-06, |
|
"loss": 3.344, |
|
"step": 505500 |
|
}, |
|
{ |
|
"epoch": 28.54, |
|
"learning_rate": 2.4396439794516683e-06, |
|
"loss": 3.3355, |
|
"step": 506000 |
|
}, |
|
{ |
|
"epoch": 28.57, |
|
"learning_rate": 2.3926010951583465e-06, |
|
"loss": 3.3448, |
|
"step": 506500 |
|
}, |
|
{ |
|
"epoch": 28.59, |
|
"learning_rate": 2.3455582108650246e-06, |
|
"loss": 3.343, |
|
"step": 507000 |
|
}, |
|
{ |
|
"epoch": 28.62, |
|
"learning_rate": 2.2985153265717028e-06, |
|
"loss": 3.3461, |
|
"step": 507500 |
|
}, |
|
{ |
|
"epoch": 28.65, |
|
"learning_rate": 2.2514724422783813e-06, |
|
"loss": 3.3407, |
|
"step": 508000 |
|
}, |
|
{ |
|
"epoch": 28.68, |
|
"learning_rate": 2.2044295579850595e-06, |
|
"loss": 3.3378, |
|
"step": 508500 |
|
}, |
|
{ |
|
"epoch": 28.71, |
|
"learning_rate": 2.1573866736917376e-06, |
|
"loss": 3.3437, |
|
"step": 509000 |
|
}, |
|
{ |
|
"epoch": 28.73, |
|
"learning_rate": 2.1103437893984158e-06, |
|
"loss": 3.3464, |
|
"step": 509500 |
|
}, |
|
{ |
|
"epoch": 28.76, |
|
"learning_rate": 2.063300905105094e-06, |
|
"loss": 3.3434, |
|
"step": 510000 |
|
}, |
|
{ |
|
"epoch": 28.79, |
|
"learning_rate": 2.016258020811772e-06, |
|
"loss": 3.3374, |
|
"step": 510500 |
|
}, |
|
{ |
|
"epoch": 28.82, |
|
"learning_rate": 1.9692151365184502e-06, |
|
"loss": 3.3488, |
|
"step": 511000 |
|
}, |
|
{ |
|
"epoch": 28.85, |
|
"learning_rate": 1.9221722522251284e-06, |
|
"loss": 3.3462, |
|
"step": 511500 |
|
}, |
|
{ |
|
"epoch": 28.88, |
|
"learning_rate": 1.8751293679318067e-06, |
|
"loss": 3.3398, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 28.9, |
|
"learning_rate": 1.8280864836384849e-06, |
|
"loss": 3.3429, |
|
"step": 512500 |
|
}, |
|
{ |
|
"epoch": 28.93, |
|
"learning_rate": 1.781043599345163e-06, |
|
"loss": 3.3382, |
|
"step": 513000 |
|
}, |
|
{ |
|
"epoch": 28.96, |
|
"learning_rate": 1.7340007150518412e-06, |
|
"loss": 3.3414, |
|
"step": 513500 |
|
}, |
|
{ |
|
"epoch": 28.99, |
|
"learning_rate": 1.6869578307585193e-06, |
|
"loss": 3.3375, |
|
"step": 514000 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"eval_loss": 3.540179967880249, |
|
"eval_runtime": 860.1608, |
|
"eval_samples_per_second": 264.866, |
|
"eval_steps_per_second": 33.109, |
|
"step": 514199 |
|
}, |
|
{ |
|
"epoch": 29.02, |
|
"learning_rate": 1.6399149464651979e-06, |
|
"loss": 3.336, |
|
"step": 514500 |
|
}, |
|
{ |
|
"epoch": 29.05, |
|
"learning_rate": 1.592872062171876e-06, |
|
"loss": 3.3316, |
|
"step": 515000 |
|
}, |
|
{ |
|
"epoch": 29.07, |
|
"learning_rate": 1.5458291778785542e-06, |
|
"loss": 3.3263, |
|
"step": 515500 |
|
}, |
|
{ |
|
"epoch": 29.1, |
|
"learning_rate": 1.4987862935852325e-06, |
|
"loss": 3.3269, |
|
"step": 516000 |
|
}, |
|
{ |
|
"epoch": 29.13, |
|
"learning_rate": 1.4517434092919107e-06, |
|
"loss": 3.3329, |
|
"step": 516500 |
|
}, |
|
{ |
|
"epoch": 29.16, |
|
"learning_rate": 1.4047005249985888e-06, |
|
"loss": 3.3321, |
|
"step": 517000 |
|
}, |
|
{ |
|
"epoch": 29.19, |
|
"learning_rate": 1.357657640705267e-06, |
|
"loss": 3.3296, |
|
"step": 517500 |
|
}, |
|
{ |
|
"epoch": 29.21, |
|
"learning_rate": 1.310614756411945e-06, |
|
"loss": 3.3256, |
|
"step": 518000 |
|
}, |
|
{ |
|
"epoch": 29.24, |
|
"learning_rate": 1.2635718721186233e-06, |
|
"loss": 3.3269, |
|
"step": 518500 |
|
}, |
|
{ |
|
"epoch": 29.27, |
|
"learning_rate": 1.2165289878253016e-06, |
|
"loss": 3.33, |
|
"step": 519000 |
|
}, |
|
{ |
|
"epoch": 29.3, |
|
"learning_rate": 1.16948610353198e-06, |
|
"loss": 3.3332, |
|
"step": 519500 |
|
}, |
|
{ |
|
"epoch": 29.33, |
|
"learning_rate": 1.1224432192386581e-06, |
|
"loss": 3.3308, |
|
"step": 520000 |
|
}, |
|
{ |
|
"epoch": 29.36, |
|
"learning_rate": 1.0754003349453363e-06, |
|
"loss": 3.3305, |
|
"step": 520500 |
|
}, |
|
{ |
|
"epoch": 29.38, |
|
"learning_rate": 1.0283574506520144e-06, |
|
"loss": 3.3304, |
|
"step": 521000 |
|
}, |
|
{ |
|
"epoch": 29.41, |
|
"learning_rate": 9.813145663586925e-07, |
|
"loss": 3.3361, |
|
"step": 521500 |
|
}, |
|
{ |
|
"epoch": 29.44, |
|
"learning_rate": 9.342716820653708e-07, |
|
"loss": 3.3296, |
|
"step": 522000 |
|
}, |
|
{ |
|
"epoch": 29.47, |
|
"learning_rate": 8.872287977720489e-07, |
|
"loss": 3.3292, |
|
"step": 522500 |
|
}, |
|
{ |
|
"epoch": 29.5, |
|
"learning_rate": 8.401859134787273e-07, |
|
"loss": 3.329, |
|
"step": 523000 |
|
}, |
|
{ |
|
"epoch": 29.52, |
|
"learning_rate": 7.931430291854055e-07, |
|
"loss": 3.3334, |
|
"step": 523500 |
|
}, |
|
{ |
|
"epoch": 29.55, |
|
"learning_rate": 7.461001448920837e-07, |
|
"loss": 3.3317, |
|
"step": 524000 |
|
}, |
|
{ |
|
"epoch": 29.58, |
|
"learning_rate": 6.990572605987618e-07, |
|
"loss": 3.3288, |
|
"step": 524500 |
|
}, |
|
{ |
|
"epoch": 29.61, |
|
"learning_rate": 6.5201437630544e-07, |
|
"loss": 3.3311, |
|
"step": 525000 |
|
}, |
|
{ |
|
"epoch": 29.64, |
|
"learning_rate": 6.049714920121182e-07, |
|
"loss": 3.3321, |
|
"step": 525500 |
|
}, |
|
{ |
|
"epoch": 29.67, |
|
"learning_rate": 5.579286077187965e-07, |
|
"loss": 3.3318, |
|
"step": 526000 |
|
}, |
|
{ |
|
"epoch": 29.69, |
|
"learning_rate": 5.108857234254746e-07, |
|
"loss": 3.3331, |
|
"step": 526500 |
|
}, |
|
{ |
|
"epoch": 29.72, |
|
"learning_rate": 4.6384283913215295e-07, |
|
"loss": 3.3286, |
|
"step": 527000 |
|
}, |
|
{ |
|
"epoch": 29.75, |
|
"learning_rate": 4.167999548388311e-07, |
|
"loss": 3.3293, |
|
"step": 527500 |
|
}, |
|
{ |
|
"epoch": 29.78, |
|
"learning_rate": 3.6975707054550934e-07, |
|
"loss": 3.3342, |
|
"step": 528000 |
|
}, |
|
{ |
|
"epoch": 29.81, |
|
"learning_rate": 3.227141862521875e-07, |
|
"loss": 3.3296, |
|
"step": 528500 |
|
}, |
|
{ |
|
"epoch": 29.83, |
|
"learning_rate": 2.7567130195886574e-07, |
|
"loss": 3.3293, |
|
"step": 529000 |
|
}, |
|
{ |
|
"epoch": 29.86, |
|
"learning_rate": 2.2862841766554391e-07, |
|
"loss": 3.3345, |
|
"step": 529500 |
|
}, |
|
{ |
|
"epoch": 29.89, |
|
"learning_rate": 1.8158553337222214e-07, |
|
"loss": 3.329, |
|
"step": 530000 |
|
} |
|
], |
|
"max_steps": 531930, |
|
"num_train_epochs": 30, |
|
"total_flos": 2.215731804585984e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|