|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1624, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006157635467980296, |
|
"grad_norm": 7.724584736238262, |
|
"learning_rate": 1.8404907975460124e-08, |
|
"loss": 0.9586, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003078817733990148, |
|
"grad_norm": 8.79591767451402, |
|
"learning_rate": 9.202453987730061e-08, |
|
"loss": 1.0017, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006157635467980296, |
|
"grad_norm": 7.712094187091605, |
|
"learning_rate": 1.8404907975460122e-07, |
|
"loss": 1.0138, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009236453201970444, |
|
"grad_norm": 6.686984831397533, |
|
"learning_rate": 2.7607361963190183e-07, |
|
"loss": 0.9935, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012315270935960592, |
|
"grad_norm": 2.9705498717633567, |
|
"learning_rate": 3.6809815950920245e-07, |
|
"loss": 0.9693, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01539408866995074, |
|
"grad_norm": 5.230925406141613, |
|
"learning_rate": 4.601226993865031e-07, |
|
"loss": 0.9517, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01847290640394089, |
|
"grad_norm": 2.0252746766726393, |
|
"learning_rate": 5.521472392638037e-07, |
|
"loss": 0.9302, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.021551724137931036, |
|
"grad_norm": 3.420398960371135, |
|
"learning_rate": 6.441717791411044e-07, |
|
"loss": 0.917, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.024630541871921183, |
|
"grad_norm": 2.4920184562580787, |
|
"learning_rate": 7.361963190184049e-07, |
|
"loss": 0.9173, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02770935960591133, |
|
"grad_norm": 1.7523436431972144, |
|
"learning_rate": 8.282208588957056e-07, |
|
"loss": 0.8939, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03078817733990148, |
|
"grad_norm": 1.5172341196305177, |
|
"learning_rate": 9.202453987730062e-07, |
|
"loss": 0.9125, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.033866995073891626, |
|
"grad_norm": 1.2637609259029952, |
|
"learning_rate": 1.0122699386503068e-06, |
|
"loss": 0.8659, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03694581280788178, |
|
"grad_norm": 1.405211777798568, |
|
"learning_rate": 1.1042944785276073e-06, |
|
"loss": 0.8606, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04002463054187192, |
|
"grad_norm": 1.400488357062075, |
|
"learning_rate": 1.1963190184049078e-06, |
|
"loss": 0.8914, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04310344827586207, |
|
"grad_norm": 1.3965560372062285, |
|
"learning_rate": 1.2883435582822088e-06, |
|
"loss": 0.8638, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.046182266009852216, |
|
"grad_norm": 1.397481181493605, |
|
"learning_rate": 1.3803680981595093e-06, |
|
"loss": 0.8872, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04926108374384237, |
|
"grad_norm": 1.3920893415057805, |
|
"learning_rate": 1.4723926380368098e-06, |
|
"loss": 0.8637, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05233990147783251, |
|
"grad_norm": 1.4291285911903127, |
|
"learning_rate": 1.5644171779141105e-06, |
|
"loss": 0.8743, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05541871921182266, |
|
"grad_norm": 1.3542212432257583, |
|
"learning_rate": 1.6564417177914112e-06, |
|
"loss": 0.8523, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.058497536945812806, |
|
"grad_norm": 1.4178159427780204, |
|
"learning_rate": 1.7484662576687115e-06, |
|
"loss": 0.8696, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06157635467980296, |
|
"grad_norm": 1.3221606943995998, |
|
"learning_rate": 1.8404907975460124e-06, |
|
"loss": 0.8772, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06465517241379311, |
|
"grad_norm": 1.4394077687183169, |
|
"learning_rate": 1.932515337423313e-06, |
|
"loss": 0.8695, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06773399014778325, |
|
"grad_norm": 1.3808456562566545, |
|
"learning_rate": 2.0245398773006137e-06, |
|
"loss": 0.8829, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0708128078817734, |
|
"grad_norm": 1.4593385524523184, |
|
"learning_rate": 2.1165644171779144e-06, |
|
"loss": 0.8715, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07389162561576355, |
|
"grad_norm": 1.4179679608588616, |
|
"learning_rate": 2.2085889570552147e-06, |
|
"loss": 0.8614, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0769704433497537, |
|
"grad_norm": 1.4261678544854806, |
|
"learning_rate": 2.3006134969325154e-06, |
|
"loss": 0.8509, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08004926108374384, |
|
"grad_norm": 1.5460976196498386, |
|
"learning_rate": 2.3926380368098157e-06, |
|
"loss": 0.8593, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08312807881773399, |
|
"grad_norm": 1.481503657824365, |
|
"learning_rate": 2.4846625766871164e-06, |
|
"loss": 0.8706, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08620689655172414, |
|
"grad_norm": 1.4651088227604514, |
|
"learning_rate": 2.5766871165644175e-06, |
|
"loss": 0.8782, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08928571428571429, |
|
"grad_norm": 1.4218537682816195, |
|
"learning_rate": 2.668711656441718e-06, |
|
"loss": 0.858, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09236453201970443, |
|
"grad_norm": 1.5217714939469515, |
|
"learning_rate": 2.7607361963190186e-06, |
|
"loss": 0.858, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09544334975369458, |
|
"grad_norm": 1.4463329417962474, |
|
"learning_rate": 2.852760736196319e-06, |
|
"loss": 0.8901, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09852216748768473, |
|
"grad_norm": 1.2738127091018583, |
|
"learning_rate": 2.9447852760736196e-06, |
|
"loss": 0.851, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10160098522167488, |
|
"grad_norm": 1.4212078637635643, |
|
"learning_rate": 2.999986128612664e-06, |
|
"loss": 0.8679, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10467980295566502, |
|
"grad_norm": 1.4528112933674504, |
|
"learning_rate": 2.9998300784514776e-06, |
|
"loss": 0.8659, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.10775862068965517, |
|
"grad_norm": 1.3669524004218505, |
|
"learning_rate": 2.99950065699352e-06, |
|
"loss": 0.8814, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11083743842364532, |
|
"grad_norm": 1.2653588333266155, |
|
"learning_rate": 2.9989979023179235e-06, |
|
"loss": 0.8585, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11391625615763547, |
|
"grad_norm": 1.29184019357291, |
|
"learning_rate": 2.9983218725400893e-06, |
|
"loss": 0.8508, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.11699507389162561, |
|
"grad_norm": 1.2992358486439284, |
|
"learning_rate": 2.9974726458049776e-06, |
|
"loss": 0.8633, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12007389162561577, |
|
"grad_norm": 1.250920595649257, |
|
"learning_rate": 2.9964503202780676e-06, |
|
"loss": 0.8468, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12315270935960591, |
|
"grad_norm": 1.3222082464366094, |
|
"learning_rate": 2.9952550141340154e-06, |
|
"loss": 0.8722, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12623152709359606, |
|
"grad_norm": 1.5229405032439445, |
|
"learning_rate": 2.99388686554299e-06, |
|
"loss": 0.8768, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12931034482758622, |
|
"grad_norm": 1.3539647424442673, |
|
"learning_rate": 2.9923460326547038e-06, |
|
"loss": 0.8416, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.13238916256157635, |
|
"grad_norm": 2.1043494775741354, |
|
"learning_rate": 2.990632693580131e-06, |
|
"loss": 0.8574, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1354679802955665, |
|
"grad_norm": 1.4039066575750099, |
|
"learning_rate": 2.988747046370918e-06, |
|
"loss": 0.8629, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13854679802955666, |
|
"grad_norm": 1.4041519802548768, |
|
"learning_rate": 2.986689308996492e-06, |
|
"loss": 0.8723, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1416256157635468, |
|
"grad_norm": 2.804730066260758, |
|
"learning_rate": 2.984459719318862e-06, |
|
"loss": 0.8578, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.14470443349753695, |
|
"grad_norm": 1.3160437065661477, |
|
"learning_rate": 2.9820585350651265e-06, |
|
"loss": 0.8485, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1477832512315271, |
|
"grad_norm": 1.2698953454345538, |
|
"learning_rate": 2.9794860337976802e-06, |
|
"loss": 0.8587, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15086206896551724, |
|
"grad_norm": 1.9733415697734495, |
|
"learning_rate": 2.9767425128821288e-06, |
|
"loss": 0.8506, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1539408866995074, |
|
"grad_norm": 1.3672039273596184, |
|
"learning_rate": 2.9738282894529177e-06, |
|
"loss": 0.8665, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.15701970443349753, |
|
"grad_norm": 1.3462677423269411, |
|
"learning_rate": 2.9707437003766704e-06, |
|
"loss": 0.8573, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.16009852216748768, |
|
"grad_norm": 1.395722464948162, |
|
"learning_rate": 2.96748910221325e-06, |
|
"loss": 0.8646, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.16317733990147784, |
|
"grad_norm": 1.2949662167486484, |
|
"learning_rate": 2.964064871174544e-06, |
|
"loss": 0.8594, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.16625615763546797, |
|
"grad_norm": 1.3986879793877978, |
|
"learning_rate": 2.9604714030809755e-06, |
|
"loss": 0.85, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.16933497536945813, |
|
"grad_norm": 1.3544807529329361, |
|
"learning_rate": 2.9567091133157484e-06, |
|
"loss": 0.8615, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 1.25058167504149, |
|
"learning_rate": 2.9527784367768305e-06, |
|
"loss": 0.8561, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.17549261083743842, |
|
"grad_norm": 1.2827316707560608, |
|
"learning_rate": 2.948679827826687e-06, |
|
"loss": 0.8632, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.17857142857142858, |
|
"grad_norm": 1.3580739609598804, |
|
"learning_rate": 2.9444137602397515e-06, |
|
"loss": 0.8823, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1816502463054187, |
|
"grad_norm": 1.2894476992215949, |
|
"learning_rate": 2.9399807271476674e-06, |
|
"loss": 0.854, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.18472906403940886, |
|
"grad_norm": 1.3479537928129113, |
|
"learning_rate": 2.935381240982281e-06, |
|
"loss": 0.8585, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.18780788177339902, |
|
"grad_norm": 1.294266030285128, |
|
"learning_rate": 2.9306158334164066e-06, |
|
"loss": 0.8503, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.19088669950738915, |
|
"grad_norm": 1.2555453597545074, |
|
"learning_rate": 2.9256850553023724e-06, |
|
"loss": 0.868, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1939655172413793, |
|
"grad_norm": 1.1788928865051034, |
|
"learning_rate": 2.920589476608343e-06, |
|
"loss": 0.8339, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.19704433497536947, |
|
"grad_norm": 1.2770185785021044, |
|
"learning_rate": 2.9153296863524315e-06, |
|
"loss": 0.8524, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2001231527093596, |
|
"grad_norm": 1.394262786635702, |
|
"learning_rate": 2.90990629253462e-06, |
|
"loss": 0.8679, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.20320197044334976, |
|
"grad_norm": 1.275454411248854, |
|
"learning_rate": 2.9043199220664704e-06, |
|
"loss": 0.8456, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2062807881773399, |
|
"grad_norm": 1.1750409774056632, |
|
"learning_rate": 2.8985712206986627e-06, |
|
"loss": 0.8614, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.20935960591133004, |
|
"grad_norm": 1.2682207809915187, |
|
"learning_rate": 2.8926608529463473e-06, |
|
"loss": 0.8547, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2124384236453202, |
|
"grad_norm": 1.2484760929773475, |
|
"learning_rate": 2.8865895020123326e-06, |
|
"loss": 0.856, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.21551724137931033, |
|
"grad_norm": 1.218507223565624, |
|
"learning_rate": 2.880357869708111e-06, |
|
"loss": 0.8828, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2185960591133005, |
|
"grad_norm": 1.2015850857754256, |
|
"learning_rate": 2.8739666763727315e-06, |
|
"loss": 0.8602, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.22167487684729065, |
|
"grad_norm": 1.3408884610418483, |
|
"learning_rate": 2.8674166607895357e-06, |
|
"loss": 0.8563, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.22475369458128078, |
|
"grad_norm": 1.4836285692174822, |
|
"learning_rate": 2.8607085801007565e-06, |
|
"loss": 0.8471, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.22783251231527094, |
|
"grad_norm": 1.3404651597215993, |
|
"learning_rate": 2.85384320972e-06, |
|
"loss": 0.8482, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2309113300492611, |
|
"grad_norm": 1.5386066904678533, |
|
"learning_rate": 2.846821343242608e-06, |
|
"loss": 0.8389, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.23399014778325122, |
|
"grad_norm": 1.2754166515854897, |
|
"learning_rate": 2.839643792353928e-06, |
|
"loss": 0.8653, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.23706896551724138, |
|
"grad_norm": 1.3945185547743246, |
|
"learning_rate": 2.832311386735483e-06, |
|
"loss": 0.8808, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.24014778325123154, |
|
"grad_norm": 1.2744156648350857, |
|
"learning_rate": 2.824824973969069e-06, |
|
"loss": 0.8558, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.24322660098522167, |
|
"grad_norm": 1.405344843136268, |
|
"learning_rate": 2.817185419438777e-06, |
|
"loss": 0.856, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.24630541871921183, |
|
"grad_norm": 1.3542416852004673, |
|
"learning_rate": 2.8093936062309614e-06, |
|
"loss": 0.8386, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.24938423645320196, |
|
"grad_norm": 1.2804821522839576, |
|
"learning_rate": 2.8014504350321594e-06, |
|
"loss": 0.874, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2524630541871921, |
|
"grad_norm": 1.403073424101837, |
|
"learning_rate": 2.7933568240249776e-06, |
|
"loss": 0.8525, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2555418719211823, |
|
"grad_norm": 1.5114916625601154, |
|
"learning_rate": 2.7851137087819562e-06, |
|
"loss": 0.8657, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.25862068965517243, |
|
"grad_norm": 1.3288954210806725, |
|
"learning_rate": 2.776722042157421e-06, |
|
"loss": 0.8539, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2616995073891626, |
|
"grad_norm": 1.2388602665884483, |
|
"learning_rate": 2.768182794177341e-06, |
|
"loss": 0.8492, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2647783251231527, |
|
"grad_norm": 1.3100863403911909, |
|
"learning_rate": 2.7594969519271988e-06, |
|
"loss": 0.84, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.26785714285714285, |
|
"grad_norm": 1.3687219418507923, |
|
"learning_rate": 2.7506655194378874e-06, |
|
"loss": 0.8514, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.270935960591133, |
|
"grad_norm": 1.4724892872074864, |
|
"learning_rate": 2.7416895175696533e-06, |
|
"loss": 0.8641, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.27401477832512317, |
|
"grad_norm": 1.2885096742121314, |
|
"learning_rate": 2.7325699838940894e-06, |
|
"loss": 0.8671, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2770935960591133, |
|
"grad_norm": 1.2454446036705014, |
|
"learning_rate": 2.723307972574199e-06, |
|
"loss": 0.8634, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2801724137931034, |
|
"grad_norm": 1.2916820035058567, |
|
"learning_rate": 2.713904554242539e-06, |
|
"loss": 0.8541, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2832512315270936, |
|
"grad_norm": 1.2593373941499395, |
|
"learning_rate": 2.7043608158774645e-06, |
|
"loss": 0.8535, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.28633004926108374, |
|
"grad_norm": 1.2902647841096262, |
|
"learning_rate": 2.6946778606774777e-06, |
|
"loss": 0.8534, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.2894088669950739, |
|
"grad_norm": 1.2821496105379782, |
|
"learning_rate": 2.684856807933706e-06, |
|
"loss": 0.858, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.29248768472906406, |
|
"grad_norm": 1.2703981820641408, |
|
"learning_rate": 2.6748987929005187e-06, |
|
"loss": 0.8494, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2955665024630542, |
|
"grad_norm": 1.2473762414475023, |
|
"learning_rate": 2.664804966664298e-06, |
|
"loss": 0.872, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2986453201970443, |
|
"grad_norm": 1.440650000265737, |
|
"learning_rate": 2.6545764960103823e-06, |
|
"loss": 0.8612, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.3017241379310345, |
|
"grad_norm": 1.2075401918303994, |
|
"learning_rate": 2.6442145632881894e-06, |
|
"loss": 0.8573, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.30480295566502463, |
|
"grad_norm": 1.1592052019959331, |
|
"learning_rate": 2.6337203662745465e-06, |
|
"loss": 0.8655, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3078817733990148, |
|
"grad_norm": 1.2734539628875496, |
|
"learning_rate": 2.623095118035235e-06, |
|
"loss": 0.8672, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.31096059113300495, |
|
"grad_norm": 1.2334639159935359, |
|
"learning_rate": 2.612340046784765e-06, |
|
"loss": 0.8678, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.31403940886699505, |
|
"grad_norm": 1.2822243726091629, |
|
"learning_rate": 2.601456395744403e-06, |
|
"loss": 0.8604, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3171182266009852, |
|
"grad_norm": 1.31932161927138, |
|
"learning_rate": 2.590445422998462e-06, |
|
"loss": 0.8774, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.32019704433497537, |
|
"grad_norm": 1.2448157244019422, |
|
"learning_rate": 2.579308401348876e-06, |
|
"loss": 0.8539, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3232758620689655, |
|
"grad_norm": 1.276792729621205, |
|
"learning_rate": 2.56804661816807e-06, |
|
"loss": 0.8426, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3263546798029557, |
|
"grad_norm": 1.1505036776360344, |
|
"learning_rate": 2.556661375250149e-06, |
|
"loss": 0.8579, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3294334975369458, |
|
"grad_norm": 1.1370655194463635, |
|
"learning_rate": 2.5451539886604165e-06, |
|
"loss": 0.8439, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.33251231527093594, |
|
"grad_norm": 1.2780922824871068, |
|
"learning_rate": 2.533525788583248e-06, |
|
"loss": 0.8554, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3355911330049261, |
|
"grad_norm": 1.2281556867963181, |
|
"learning_rate": 2.5217781191683284e-06, |
|
"loss": 0.8439, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.33866995073891626, |
|
"grad_norm": 1.1944897494951232, |
|
"learning_rate": 2.509912338375275e-06, |
|
"loss": 0.8389, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3417487684729064, |
|
"grad_norm": 1.2161826106487, |
|
"learning_rate": 2.497929817816667e-06, |
|
"loss": 0.8658, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 1.2343890699461388, |
|
"learning_rate": 2.4858319425994978e-06, |
|
"loss": 0.8615, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.3479064039408867, |
|
"grad_norm": 1.3096521558859537, |
|
"learning_rate": 2.4736201111650593e-06, |
|
"loss": 0.8627, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.35098522167487683, |
|
"grad_norm": 1.260420116901076, |
|
"learning_rate": 2.4612957351272963e-06, |
|
"loss": 0.8572, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.354064039408867, |
|
"grad_norm": 1.2711909674880961, |
|
"learning_rate": 2.448860239109627e-06, |
|
"loss": 0.85, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 1.3045345199158591, |
|
"learning_rate": 2.4363150605802704e-06, |
|
"loss": 0.8242, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3602216748768473, |
|
"grad_norm": 1.255053722802097, |
|
"learning_rate": 2.423661649686081e-06, |
|
"loss": 0.8664, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3633004926108374, |
|
"grad_norm": 1.1808467142096963, |
|
"learning_rate": 2.41090146908492e-06, |
|
"loss": 0.8254, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.36637931034482757, |
|
"grad_norm": 1.2161111833295437, |
|
"learning_rate": 2.398035993776582e-06, |
|
"loss": 0.8303, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3694581280788177, |
|
"grad_norm": 1.2018738392776112, |
|
"learning_rate": 2.385066710932294e-06, |
|
"loss": 0.8471, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3725369458128079, |
|
"grad_norm": 1.3104602005273063, |
|
"learning_rate": 2.3719951197228068e-06, |
|
"loss": 0.8396, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.37561576354679804, |
|
"grad_norm": 1.3053912734059634, |
|
"learning_rate": 2.3588227311451007e-06, |
|
"loss": 0.8557, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3786945812807882, |
|
"grad_norm": 1.1887756973639412, |
|
"learning_rate": 2.3455510678477217e-06, |
|
"loss": 0.8428, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3817733990147783, |
|
"grad_norm": 1.3202665606343256, |
|
"learning_rate": 2.3321816639547747e-06, |
|
"loss": 0.8443, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.38485221674876846, |
|
"grad_norm": 1.481406724728173, |
|
"learning_rate": 2.3187160648885864e-06, |
|
"loss": 0.8455, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3879310344827586, |
|
"grad_norm": 1.253872927504699, |
|
"learning_rate": 2.305155827191066e-06, |
|
"loss": 0.8424, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3910098522167488, |
|
"grad_norm": 1.1863474981605258, |
|
"learning_rate": 2.291502518343774e-06, |
|
"loss": 0.8657, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.39408866995073893, |
|
"grad_norm": 1.3010619531118812, |
|
"learning_rate": 2.2777577165867354e-06, |
|
"loss": 0.8527, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.39716748768472904, |
|
"grad_norm": 1.2446913565784508, |
|
"learning_rate": 2.2639230107360033e-06, |
|
"loss": 0.8465, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.4002463054187192, |
|
"grad_norm": 1.2678544206625766, |
|
"learning_rate": 2.25e-06, |
|
"loss": 0.8398, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.40332512315270935, |
|
"grad_norm": 1.2805775413961258, |
|
"learning_rate": 2.235990293794659e-06, |
|
"loss": 0.8698, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.4064039408866995, |
|
"grad_norm": 1.2080762381092112, |
|
"learning_rate": 2.2218955115573864e-06, |
|
"loss": 0.8551, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.40948275862068967, |
|
"grad_norm": 1.31904016779892, |
|
"learning_rate": 2.2077172825598645e-06, |
|
"loss": 0.8429, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.4125615763546798, |
|
"grad_norm": 1.1884365693687557, |
|
"learning_rate": 2.1934572457197163e-06, |
|
"loss": 0.8592, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.41564039408866993, |
|
"grad_norm": 1.143061537231462, |
|
"learning_rate": 2.179117049411057e-06, |
|
"loss": 0.8619, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4187192118226601, |
|
"grad_norm": 1.2386292522406654, |
|
"learning_rate": 2.164698351273952e-06, |
|
"loss": 0.8545, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.42179802955665024, |
|
"grad_norm": 1.2021340390709248, |
|
"learning_rate": 2.150202818022805e-06, |
|
"loss": 0.8501, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.4248768472906404, |
|
"grad_norm": 1.2781965578814818, |
|
"learning_rate": 2.1356321252536947e-06, |
|
"loss": 0.843, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.42795566502463056, |
|
"grad_norm": 1.2463234389480125, |
|
"learning_rate": 2.1209879572506836e-06, |
|
"loss": 0.8532, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.43103448275862066, |
|
"grad_norm": 1.2490176056060265, |
|
"learning_rate": 2.10627200679113e-06, |
|
"loss": 0.8512, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4341133004926108, |
|
"grad_norm": 1.186593391495473, |
|
"learning_rate": 2.09148597495001e-06, |
|
"loss": 0.8642, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.437192118226601, |
|
"grad_norm": 1.3071569875293647, |
|
"learning_rate": 2.0766315709032837e-06, |
|
"loss": 0.8312, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.44027093596059114, |
|
"grad_norm": 1.2158014479042887, |
|
"learning_rate": 2.0617105117303247e-06, |
|
"loss": 0.8503, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4433497536945813, |
|
"grad_norm": 1.2188719938005892, |
|
"learning_rate": 2.046724522215437e-06, |
|
"loss": 0.8381, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.44642857142857145, |
|
"grad_norm": 1.2816897559505145, |
|
"learning_rate": 2.03167533464848e-06, |
|
"loss": 0.8531, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.44950738916256155, |
|
"grad_norm": 1.1545846489999156, |
|
"learning_rate": 2.016564688624627e-06, |
|
"loss": 0.8556, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4525862068965517, |
|
"grad_norm": 1.247432677442672, |
|
"learning_rate": 2.001394330843276e-06, |
|
"loss": 0.8273, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.45566502463054187, |
|
"grad_norm": 1.200421987011754, |
|
"learning_rate": 1.9861660149061435e-06, |
|
"loss": 0.8559, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.45874384236453203, |
|
"grad_norm": 1.242386621339582, |
|
"learning_rate": 1.9708815011145605e-06, |
|
"loss": 0.847, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4618226600985222, |
|
"grad_norm": 1.2714576314445647, |
|
"learning_rate": 1.9555425562659878e-06, |
|
"loss": 0.8493, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4649014778325123, |
|
"grad_norm": 1.199306019483452, |
|
"learning_rate": 1.940150953449791e-06, |
|
"loss": 0.8256, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.46798029556650245, |
|
"grad_norm": 1.176599940688529, |
|
"learning_rate": 1.924708471842276e-06, |
|
"loss": 0.8414, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4710591133004926, |
|
"grad_norm": 1.1674248964748613, |
|
"learning_rate": 1.90921689650103e-06, |
|
"loss": 0.8429, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.47413793103448276, |
|
"grad_norm": 1.2904494723192372, |
|
"learning_rate": 1.8936780181585799e-06, |
|
"loss": 0.8361, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4772167487684729, |
|
"grad_norm": 1.2504397812966646, |
|
"learning_rate": 1.8780936330153935e-06, |
|
"loss": 0.8677, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4802955665024631, |
|
"grad_norm": 1.1554267622763779, |
|
"learning_rate": 1.86246554253225e-06, |
|
"loss": 0.8502, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4833743842364532, |
|
"grad_norm": 1.2494756999598795, |
|
"learning_rate": 1.8467955532220017e-06, |
|
"loss": 0.8431, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.48645320197044334, |
|
"grad_norm": 1.1864925645414461, |
|
"learning_rate": 1.831085476440753e-06, |
|
"loss": 0.849, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.4895320197044335, |
|
"grad_norm": 1.230711251312914, |
|
"learning_rate": 1.8153371281784756e-06, |
|
"loss": 0.8516, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.49261083743842365, |
|
"grad_norm": 1.2701213116535017, |
|
"learning_rate": 1.7995523288490959e-06, |
|
"loss": 0.8649, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4956896551724138, |
|
"grad_norm": 1.2920442137078154, |
|
"learning_rate": 1.783732903080062e-06, |
|
"loss": 0.8495, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4987684729064039, |
|
"grad_norm": 1.2610776640382462, |
|
"learning_rate": 1.7678806795014293e-06, |
|
"loss": 0.8486, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5018472906403941, |
|
"grad_norm": 1.2512197750575502, |
|
"learning_rate": 1.751997490534482e-06, |
|
"loss": 0.8433, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5049261083743842, |
|
"grad_norm": 1.2693463293367837, |
|
"learning_rate": 1.7360851721799163e-06, |
|
"loss": 0.8159, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5080049261083743, |
|
"grad_norm": 1.1915234367340757, |
|
"learning_rate": 1.7201455638056097e-06, |
|
"loss": 0.8367, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5110837438423645, |
|
"grad_norm": 1.3350984034693405, |
|
"learning_rate": 1.7041805079340006e-06, |
|
"loss": 0.8412, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5141625615763546, |
|
"grad_norm": 1.1953853505995866, |
|
"learning_rate": 1.6881918500291052e-06, |
|
"loss": 0.8608, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 1.2261829318707587, |
|
"learning_rate": 1.6721814382831911e-06, |
|
"loss": 0.8235, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.520320197044335, |
|
"grad_norm": 1.197786936841144, |
|
"learning_rate": 1.6561511234031394e-06, |
|
"loss": 0.8364, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5233990147783252, |
|
"grad_norm": 1.2488334159401087, |
|
"learning_rate": 1.6401027583965135e-06, |
|
"loss": 0.847, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5264778325123153, |
|
"grad_norm": 1.1650388115198271, |
|
"learning_rate": 1.624038198357361e-06, |
|
"loss": 0.852, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5295566502463054, |
|
"grad_norm": 1.1509354844941455, |
|
"learning_rate": 1.6079593002517785e-06, |
|
"loss": 0.875, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5326354679802956, |
|
"grad_norm": 1.2308644335652692, |
|
"learning_rate": 1.5918679227032564e-06, |
|
"loss": 0.8358, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5357142857142857, |
|
"grad_norm": 1.1735480926744941, |
|
"learning_rate": 1.575765925777834e-06, |
|
"loss": 0.8408, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5387931034482759, |
|
"grad_norm": 1.2034817032001748, |
|
"learning_rate": 1.559655170769084e-06, |
|
"loss": 0.8405, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.541871921182266, |
|
"grad_norm": 1.268978613523728, |
|
"learning_rate": 1.543537519982963e-06, |
|
"loss": 0.847, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5449507389162561, |
|
"grad_norm": 1.2241899601063064, |
|
"learning_rate": 1.5274148365225372e-06, |
|
"loss": 0.8394, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5480295566502463, |
|
"grad_norm": 1.1676337312724008, |
|
"learning_rate": 1.5112889840726194e-06, |
|
"loss": 0.8622, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5511083743842364, |
|
"grad_norm": 1.1301543137797185, |
|
"learning_rate": 1.4951618266843384e-06, |
|
"loss": 0.8266, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5541871921182266, |
|
"grad_norm": 1.30141347624095, |
|
"learning_rate": 1.4790352285596656e-06, |
|
"loss": 0.8408, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5572660098522167, |
|
"grad_norm": 1.3012499210796624, |
|
"learning_rate": 1.4629110538359251e-06, |
|
"loss": 0.8329, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.5603448275862069, |
|
"grad_norm": 1.2275901091379853, |
|
"learning_rate": 1.4467911663703118e-06, |
|
"loss": 0.8341, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5634236453201971, |
|
"grad_norm": 1.2421267817837207, |
|
"learning_rate": 1.4306774295244372e-06, |
|
"loss": 0.8484, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5665024630541872, |
|
"grad_norm": 1.1849317684107135, |
|
"learning_rate": 1.4145717059489405e-06, |
|
"loss": 0.845, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5695812807881774, |
|
"grad_norm": 1.1560168622553235, |
|
"learning_rate": 1.3984758573681741e-06, |
|
"loss": 0.8343, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5726600985221675, |
|
"grad_norm": 1.303736696697277, |
|
"learning_rate": 1.3823917443649994e-06, |
|
"loss": 0.8146, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5757389162561576, |
|
"grad_norm": 1.2387568449101967, |
|
"learning_rate": 1.3663212261657191e-06, |
|
"loss": 0.845, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5788177339901478, |
|
"grad_norm": 1.2382013938085306, |
|
"learning_rate": 1.3502661604251562e-06, |
|
"loss": 0.8403, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5818965517241379, |
|
"grad_norm": 1.1875665599598455, |
|
"learning_rate": 1.3342284030119233e-06, |
|
"loss": 0.8313, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5849753694581281, |
|
"grad_norm": 1.1428289684636799, |
|
"learning_rate": 1.3182098077938954e-06, |
|
"loss": 0.8334, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5880541871921182, |
|
"grad_norm": 1.2318379752097235, |
|
"learning_rate": 1.3022122264239134e-06, |
|
"loss": 0.8584, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5911330049261084, |
|
"grad_norm": 1.1908093263154107, |
|
"learning_rate": 1.286237508125744e-06, |
|
"loss": 0.8573, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5942118226600985, |
|
"grad_norm": 1.1684422268700045, |
|
"learning_rate": 1.2702874994803206e-06, |
|
"loss": 0.8376, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5972906403940886, |
|
"grad_norm": 1.2315278737517545, |
|
"learning_rate": 1.254364044212291e-06, |
|
"loss": 0.8505, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6003694581280788, |
|
"grad_norm": 1.2388539336501045, |
|
"learning_rate": 1.2384689829768929e-06, |
|
"loss": 0.824, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.603448275862069, |
|
"grad_norm": 1.246236595179206, |
|
"learning_rate": 1.2226041531471835e-06, |
|
"loss": 0.8553, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6065270935960592, |
|
"grad_norm": 1.3256259396928607, |
|
"learning_rate": 1.2067713886016547e-06, |
|
"loss": 0.8266, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.6096059113300493, |
|
"grad_norm": 1.1362791976484927, |
|
"learning_rate": 1.1909725195122443e-06, |
|
"loss": 0.8394, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6126847290640394, |
|
"grad_norm": 1.166355873497428, |
|
"learning_rate": 1.1752093721327784e-06, |
|
"loss": 0.8277, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6157635467980296, |
|
"grad_norm": 1.1743428317131432, |
|
"learning_rate": 1.1594837685878725e-06, |
|
"loss": 0.8183, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6188423645320197, |
|
"grad_norm": 1.2512344679138039, |
|
"learning_rate": 1.143797526662299e-06, |
|
"loss": 0.8492, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6219211822660099, |
|
"grad_norm": 1.1784283535469242, |
|
"learning_rate": 1.1281524595908653e-06, |
|
"loss": 0.8268, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 1.1906881964438814, |
|
"learning_rate": 1.1125503758488158e-06, |
|
"loss": 0.8516, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6280788177339901, |
|
"grad_norm": 1.2048898820900105, |
|
"learning_rate": 1.0969930789427798e-06, |
|
"loss": 0.8505, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6311576354679803, |
|
"grad_norm": 1.2384331414468177, |
|
"learning_rate": 1.0814823672023007e-06, |
|
"loss": 0.8327, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6342364532019704, |
|
"grad_norm": 1.2370825731431196, |
|
"learning_rate": 1.0660200335719569e-06, |
|
"loss": 0.8577, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6373152709359606, |
|
"grad_norm": 1.1890634236003874, |
|
"learning_rate": 1.0506078654041095e-06, |
|
"loss": 0.8402, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6403940886699507, |
|
"grad_norm": 1.1933853331056958, |
|
"learning_rate": 1.0352476442522963e-06, |
|
"loss": 0.8606, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6434729064039408, |
|
"grad_norm": 1.2675635086457242, |
|
"learning_rate": 1.0199411456652932e-06, |
|
"loss": 0.8239, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.646551724137931, |
|
"grad_norm": 1.1317105500451243, |
|
"learning_rate": 1.004690138981871e-06, |
|
"loss": 0.8607, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6496305418719212, |
|
"grad_norm": 1.1674980504418897, |
|
"learning_rate": 9.89496387126274e-07, |
|
"loss": 0.8396, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6527093596059114, |
|
"grad_norm": 1.2991830801026518, |
|
"learning_rate": 9.74361646404432e-07, |
|
"loss": 0.8159, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6557881773399015, |
|
"grad_norm": 1.2506097073385514, |
|
"learning_rate": 9.592876663009462e-07, |
|
"loss": 0.8434, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6588669950738916, |
|
"grad_norm": 1.185318025796248, |
|
"learning_rate": 9.442761892768561e-07, |
|
"loss": 0.8189, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6619458128078818, |
|
"grad_norm": 1.1985852310220102, |
|
"learning_rate": 9.293289505682223e-07, |
|
"loss": 0.8503, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6650246305418719, |
|
"grad_norm": 1.1993709021835215, |
|
"learning_rate": 9.144476779855462e-07, |
|
"loss": 0.8644, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6681034482758621, |
|
"grad_norm": 1.1947284213216074, |
|
"learning_rate": 8.99634091714042e-07, |
|
"loss": 0.8465, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6711822660098522, |
|
"grad_norm": 1.1476627282557448, |
|
"learning_rate": 8.848899041147947e-07, |
|
"loss": 0.8372, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6742610837438424, |
|
"grad_norm": 1.1027722973387861, |
|
"learning_rate": 8.702168195268239e-07, |
|
"loss": 0.8319, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.6773399014778325, |
|
"grad_norm": 1.1582755840301646, |
|
"learning_rate": 8.556165340700687e-07, |
|
"loss": 0.833, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6804187192118226, |
|
"grad_norm": 1.2919754975931053, |
|
"learning_rate": 8.410907354493304e-07, |
|
"loss": 0.827, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.6834975369458128, |
|
"grad_norm": 1.1351158697269301, |
|
"learning_rate": 8.266411027591801e-07, |
|
"loss": 0.8461, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6865763546798029, |
|
"grad_norm": 1.1950786235402677, |
|
"learning_rate": 8.12269306289868e-07, |
|
"loss": 0.8359, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 1.2839040815642278, |
|
"learning_rate": 7.979770073342484e-07, |
|
"loss": 0.8451, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6927339901477833, |
|
"grad_norm": 1.2286952134329439, |
|
"learning_rate": 7.837658579957422e-07, |
|
"loss": 0.8289, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6958128078817734, |
|
"grad_norm": 1.1443853696759385, |
|
"learning_rate": 7.696375009973643e-07, |
|
"loss": 0.8217, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6988916256157636, |
|
"grad_norm": 1.1889401437521319, |
|
"learning_rate": 7.555935694918371e-07, |
|
"loss": 0.8449, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.7019704433497537, |
|
"grad_norm": 1.1789396438426583, |
|
"learning_rate": 7.41635686872804e-07, |
|
"loss": 0.8419, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7050492610837439, |
|
"grad_norm": 1.2331607811782397, |
|
"learning_rate": 7.277654665871785e-07, |
|
"loss": 0.8083, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.708128078817734, |
|
"grad_norm": 1.2248130017166632, |
|
"learning_rate": 7.139845119486371e-07, |
|
"loss": 0.8161, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7112068965517241, |
|
"grad_norm": 1.1673298869963107, |
|
"learning_rate": 7.00294415952286e-07, |
|
"loss": 0.8377, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 1.2028852767764227, |
|
"learning_rate": 6.866967610905234e-07, |
|
"loss": 0.8386, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7173645320197044, |
|
"grad_norm": 1.2317394913921158, |
|
"learning_rate": 6.731931191701088e-07, |
|
"loss": 0.8309, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7204433497536946, |
|
"grad_norm": 1.1573011339095831, |
|
"learning_rate": 6.597850511304739e-07, |
|
"loss": 0.8271, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7235221674876847, |
|
"grad_norm": 1.2372202934272833, |
|
"learning_rate": 6.464741068632883e-07, |
|
"loss": 0.8266, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7266009852216748, |
|
"grad_norm": 1.151607214843666, |
|
"learning_rate": 6.332618250332988e-07, |
|
"loss": 0.8136, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.729679802955665, |
|
"grad_norm": 1.9235923929261696, |
|
"learning_rate": 6.201497329004718e-07, |
|
"loss": 0.8591, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7327586206896551, |
|
"grad_norm": 1.196046809628109, |
|
"learning_rate": 6.071393461434488e-07, |
|
"loss": 0.8331, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7358374384236454, |
|
"grad_norm": 1.1981116626841852, |
|
"learning_rate": 5.942321686843444e-07, |
|
"loss": 0.8279, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7389162561576355, |
|
"grad_norm": 1.1554442972192485, |
|
"learning_rate": 5.814296925149026e-07, |
|
"loss": 0.8507, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7419950738916257, |
|
"grad_norm": 1.2386249887563705, |
|
"learning_rate": 5.687333975240304e-07, |
|
"loss": 0.8363, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7450738916256158, |
|
"grad_norm": 1.2412252117484952, |
|
"learning_rate": 5.561447513267311e-07, |
|
"loss": 0.8452, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7481527093596059, |
|
"grad_norm": 1.1498554373981902, |
|
"learning_rate": 5.436652090944596e-07, |
|
"loss": 0.8224, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.7512315270935961, |
|
"grad_norm": 1.1413456236749904, |
|
"learning_rate": 5.312962133869093e-07, |
|
"loss": 0.8401, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7543103448275862, |
|
"grad_norm": 1.2683847422835812, |
|
"learning_rate": 5.19039193985266e-07, |
|
"loss": 0.8245, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7573891625615764, |
|
"grad_norm": 1.1722812031534011, |
|
"learning_rate": 5.068955677269281e-07, |
|
"loss": 0.8383, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7604679802955665, |
|
"grad_norm": 1.112650732199805, |
|
"learning_rate": 4.948667383417332e-07, |
|
"loss": 0.8508, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7635467980295566, |
|
"grad_norm": 1.2242225043847113, |
|
"learning_rate": 4.829540962896927e-07, |
|
"loss": 0.8402, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7666256157635468, |
|
"grad_norm": 1.1909265717652961, |
|
"learning_rate": 4.7115901860026634e-07, |
|
"loss": 0.8439, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7697044334975369, |
|
"grad_norm": 1.1870700244025632, |
|
"learning_rate": 4.594828687131814e-07, |
|
"loss": 0.8339, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7727832512315271, |
|
"grad_norm": 1.2048136606527728, |
|
"learning_rate": 4.4792699632083043e-07, |
|
"loss": 0.8238, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7758620689655172, |
|
"grad_norm": 1.218329342603074, |
|
"learning_rate": 4.36492737212255e-07, |
|
"loss": 0.8395, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7789408866995073, |
|
"grad_norm": 1.2254982042270262, |
|
"learning_rate": 4.251814131187349e-07, |
|
"loss": 0.8227, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.7820197044334976, |
|
"grad_norm": 1.201055974202684, |
|
"learning_rate": 4.13994331561004e-07, |
|
"loss": 0.8227, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7850985221674877, |
|
"grad_norm": 1.1075936645522768, |
|
"learning_rate": 4.0293278569811197e-07, |
|
"loss": 0.821, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.7881773399014779, |
|
"grad_norm": 1.1826513329616417, |
|
"learning_rate": 3.9199805417793833e-07, |
|
"loss": 0.838, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.791256157635468, |
|
"grad_norm": 1.1905828475039764, |
|
"learning_rate": 3.8119140098939074e-07, |
|
"loss": 0.8275, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.7943349753694581, |
|
"grad_norm": 1.1823163936900083, |
|
"learning_rate": 3.705140753162973e-07, |
|
"loss": 0.8552, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7974137931034483, |
|
"grad_norm": 1.1726764171251989, |
|
"learning_rate": 3.5996731139300476e-07, |
|
"loss": 0.8298, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.8004926108374384, |
|
"grad_norm": 1.1733827657285574, |
|
"learning_rate": 3.495523283617106e-07, |
|
"loss": 0.8385, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8035714285714286, |
|
"grad_norm": 1.1548336967321098, |
|
"learning_rate": 3.39270330131538e-07, |
|
"loss": 0.8288, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.8066502463054187, |
|
"grad_norm": 1.3225748675948077, |
|
"learning_rate": 3.2912250523937e-07, |
|
"loss": 0.8431, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8097290640394089, |
|
"grad_norm": 1.2203880304737789, |
|
"learning_rate": 3.1911002671246164e-07, |
|
"loss": 0.8359, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.812807881773399, |
|
"grad_norm": 1.1635933717026323, |
|
"learning_rate": 3.092340519328474e-07, |
|
"loss": 0.8692, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8158866995073891, |
|
"grad_norm": 1.246775297732715, |
|
"learning_rate": 2.9949572250355176e-07, |
|
"loss": 0.8133, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8189655172413793, |
|
"grad_norm": 1.0986912775205508, |
|
"learning_rate": 2.8989616411662826e-07, |
|
"loss": 0.8441, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8220443349753694, |
|
"grad_norm": 1.169380610686736, |
|
"learning_rate": 2.8043648642303716e-07, |
|
"loss": 0.8247, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8251231527093597, |
|
"grad_norm": 1.1817068971461555, |
|
"learning_rate": 2.7111778290437465e-07, |
|
"loss": 0.8469, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8282019704433498, |
|
"grad_norm": 1.165591743669198, |
|
"learning_rate": 2.619411307464747e-07, |
|
"loss": 0.8349, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8312807881773399, |
|
"grad_norm": 1.2640850790656615, |
|
"learning_rate": 2.529075907148916e-07, |
|
"loss": 0.8406, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8343596059113301, |
|
"grad_norm": 1.1927946924138935, |
|
"learning_rate": 2.440182070322818e-07, |
|
"loss": 0.8403, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.8374384236453202, |
|
"grad_norm": 1.2480501406180602, |
|
"learning_rate": 2.352740072577002e-07, |
|
"loss": 0.8292, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8405172413793104, |
|
"grad_norm": 1.181685915033771, |
|
"learning_rate": 2.2667600216781757e-07, |
|
"loss": 0.8202, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8435960591133005, |
|
"grad_norm": 1.30446620812208, |
|
"learning_rate": 2.182251856400826e-07, |
|
"loss": 0.8334, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8466748768472906, |
|
"grad_norm": 1.3663542827209243, |
|
"learning_rate": 2.0992253453783595e-07, |
|
"loss": 0.8557, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8497536945812808, |
|
"grad_norm": 1.2325789044380537, |
|
"learning_rate": 2.0176900859738906e-07, |
|
"loss": 0.8417, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8528325123152709, |
|
"grad_norm": 1.262532082314722, |
|
"learning_rate": 1.93765550317087e-07, |
|
"loss": 0.8197, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.8559113300492611, |
|
"grad_norm": 1.1713865158884151, |
|
"learning_rate": 1.8591308484835833e-07, |
|
"loss": 0.8603, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.8589901477832512, |
|
"grad_norm": 1.1577049466829008, |
|
"learning_rate": 1.7821251988877556e-07, |
|
"loss": 0.8388, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 1.2469180931897679, |
|
"learning_rate": 1.706647455771302e-07, |
|
"loss": 0.8434, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8651477832512315, |
|
"grad_norm": 1.236966787026265, |
|
"learning_rate": 1.6327063439053675e-07, |
|
"loss": 0.8381, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.8682266009852216, |
|
"grad_norm": 1.2012445829877945, |
|
"learning_rate": 1.56031041043582e-07, |
|
"loss": 0.838, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.8713054187192119, |
|
"grad_norm": 1.2587646510638395, |
|
"learning_rate": 1.4894680238952318e-07, |
|
"loss": 0.8465, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.874384236453202, |
|
"grad_norm": 1.1884863239984578, |
|
"learning_rate": 1.4201873732355343e-07, |
|
"loss": 0.8392, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8774630541871922, |
|
"grad_norm": 1.2366520156475107, |
|
"learning_rate": 1.3524764668814305e-07, |
|
"loss": 0.849, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8805418719211823, |
|
"grad_norm": 1.2477328107078396, |
|
"learning_rate": 1.2863431318046615e-07, |
|
"loss": 0.8666, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8836206896551724, |
|
"grad_norm": 1.1572068680725016, |
|
"learning_rate": 1.2217950126192523e-07, |
|
"loss": 0.8382, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8866995073891626, |
|
"grad_norm": 1.1300521771674885, |
|
"learning_rate": 1.158839570697861e-07, |
|
"loss": 0.845, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8897783251231527, |
|
"grad_norm": 1.158079158149626, |
|
"learning_rate": 1.0974840833092603e-07, |
|
"loss": 0.8225, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.8928571428571429, |
|
"grad_norm": 1.24229865374301, |
|
"learning_rate": 1.0377356427771567e-07, |
|
"loss": 0.8397, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.895935960591133, |
|
"grad_norm": 1.1992240511175045, |
|
"learning_rate": 9.796011556603418e-08, |
|
"loss": 0.828, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.8990147783251231, |
|
"grad_norm": 1.2150252181899364, |
|
"learning_rate": 9.230873419543373e-08, |
|
"loss": 0.837, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9020935960591133, |
|
"grad_norm": 1.232372254864163, |
|
"learning_rate": 8.682007343146198e-08, |
|
"loss": 0.8361, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.9051724137931034, |
|
"grad_norm": 1.1760952608145245, |
|
"learning_rate": 8.14947677301468e-08, |
|
"loss": 0.837, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9082512315270936, |
|
"grad_norm": 1.1647514203380553, |
|
"learning_rate": 7.633343266465731e-08, |
|
"loss": 0.8263, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9113300492610837, |
|
"grad_norm": 1.109297673263934, |
|
"learning_rate": 7.133666485414858e-08, |
|
"loss": 0.836, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9144088669950738, |
|
"grad_norm": 1.2140360761876554, |
|
"learning_rate": 6.650504189479417e-08, |
|
"loss": 0.8109, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.9174876847290641, |
|
"grad_norm": 1.1851881393830357, |
|
"learning_rate": 6.183912229302135e-08, |
|
"loss": 0.8556, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9205665024630542, |
|
"grad_norm": 1.2139054216696445, |
|
"learning_rate": 5.7339445400949763e-08, |
|
"loss": 0.82, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9236453201970444, |
|
"grad_norm": 1.1684560907583603, |
|
"learning_rate": 5.3006531354045596e-08, |
|
"loss": 0.8435, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9267241379310345, |
|
"grad_norm": 1.1344224210764826, |
|
"learning_rate": 4.8840881010998536e-08, |
|
"loss": 0.8245, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9298029556650246, |
|
"grad_norm": 1.1550860518730384, |
|
"learning_rate": 4.4842975895823926e-08, |
|
"loss": 0.8538, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9328817733990148, |
|
"grad_norm": 1.208021844650614, |
|
"learning_rate": 4.1013278142201904e-08, |
|
"loss": 0.8433, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9359605911330049, |
|
"grad_norm": 1.1428227153271917, |
|
"learning_rate": 3.7352230440058534e-08, |
|
"loss": 0.848, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9390394088669951, |
|
"grad_norm": 1.156453897693646, |
|
"learning_rate": 3.386025598439185e-08, |
|
"loss": 0.8419, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9421182266009852, |
|
"grad_norm": 1.1782380725430572, |
|
"learning_rate": 3.053775842635453e-08, |
|
"loss": 0.8388, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9451970443349754, |
|
"grad_norm": 1.2305725491852646, |
|
"learning_rate": 2.7385121826593363e-08, |
|
"loss": 0.8413, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9482758620689655, |
|
"grad_norm": 1.2052501974714247, |
|
"learning_rate": 2.4402710610854582e-08, |
|
"loss": 0.8246, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9513546798029556, |
|
"grad_norm": 1.1451901965086437, |
|
"learning_rate": 2.159086952785827e-08, |
|
"loss": 0.8216, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.9544334975369458, |
|
"grad_norm": 1.223720355889366, |
|
"learning_rate": 1.894992360944786e-08, |
|
"loss": 0.8243, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9575123152709359, |
|
"grad_norm": 1.1654986160551557, |
|
"learning_rate": 1.648017813301739e-08, |
|
"loss": 0.8346, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9605911330049262, |
|
"grad_norm": 1.185817379490152, |
|
"learning_rate": 1.4181918586225029e-08, |
|
"loss": 0.8289, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.9636699507389163, |
|
"grad_norm": 1.0858902044429564, |
|
"learning_rate": 1.2055410633990515e-08, |
|
"loss": 0.8487, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.9667487684729064, |
|
"grad_norm": 1.1956389597775392, |
|
"learning_rate": 1.0100900087787357e-08, |
|
"loss": 0.8301, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.9698275862068966, |
|
"grad_norm": 1.061385774108055, |
|
"learning_rate": 8.318612877227571e-09, |
|
"loss": 0.8522, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.9729064039408867, |
|
"grad_norm": 1.1543403896164468, |
|
"learning_rate": 6.708755023946245e-09, |
|
"loss": 0.8269, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.9759852216748769, |
|
"grad_norm": 1.1713134315197617, |
|
"learning_rate": 5.271512617786078e-09, |
|
"loss": 0.8472, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.979064039408867, |
|
"grad_norm": 1.1439940501727845, |
|
"learning_rate": 4.007051795287098e-09, |
|
"loss": 0.8214, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9821428571428571, |
|
"grad_norm": 1.1517573735882696, |
|
"learning_rate": 2.9155187204820134e-09, |
|
"loss": 0.8461, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.9852216748768473, |
|
"grad_norm": 1.28422286593181, |
|
"learning_rate": 1.997039568000403e-09, |
|
"loss": 0.8107, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9883004926108374, |
|
"grad_norm": 1.1825383865033323, |
|
"learning_rate": 1.2517205084837112e-09, |
|
"loss": 0.8388, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9913793103448276, |
|
"grad_norm": 1.212203466455243, |
|
"learning_rate": 6.796476963130683e-10, |
|
"loss": 0.8353, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9944581280788177, |
|
"grad_norm": 1.2340921903407838, |
|
"learning_rate": 2.8088725964958705e-10, |
|
"loss": 0.8475, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.9975369458128078, |
|
"grad_norm": 1.1861846694894687, |
|
"learning_rate": 5.548529279081338e-11, |
|
"loss": 0.8177, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.8360112905502319, |
|
"eval_runtime": 942.1064, |
|
"eval_samples_per_second": 24.529, |
|
"eval_steps_per_second": 1.534, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1624, |
|
"total_flos": 1147278378860544.0, |
|
"train_loss": 0.8499554973737947, |
|
"train_runtime": 34684.9056, |
|
"train_samples_per_second": 5.993, |
|
"train_steps_per_second": 0.047 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1624, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1147278378860544.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|