|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9997655182451062, |
|
"eval_steps": 500, |
|
"global_step": 966, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010349539526346855, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 2.061855670103093e-06, |
|
"loss": 1.7433, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005174769763173427, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 1.0309278350515464e-05, |
|
"loss": 1.7444, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010349539526346855, |
|
"grad_norm": 1.0, |
|
"learning_rate": 2.0618556701030927e-05, |
|
"loss": 1.7363, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.015524309289520283, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 3.0927835051546395e-05, |
|
"loss": 1.7131, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02069907905269371, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 4.1237113402061855e-05, |
|
"loss": 1.7097, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02587384881586714, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 5.1546391752577315e-05, |
|
"loss": 1.6876, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.031048618579040566, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 6.185567010309279e-05, |
|
"loss": 1.552, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.036223388342213995, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 7.216494845360825e-05, |
|
"loss": 1.5049, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04139815810538742, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 8.247422680412371e-05, |
|
"loss": 1.4914, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04657292786856085, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 9.278350515463918e-05, |
|
"loss": 1.4412, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05174769763173428, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 0.00010309278350515463, |
|
"loss": 1.4186, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0569224673949077, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 0.0001134020618556701, |
|
"loss": 1.4002, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06209723715808113, |
|
"grad_norm": 0.1552734375, |
|
"learning_rate": 0.00012371134020618558, |
|
"loss": 1.3963, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06727200692125455, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 0.00013402061855670103, |
|
"loss": 1.3791, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07244677668442799, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 0.0001443298969072165, |
|
"loss": 1.3596, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07762154644760141, |
|
"grad_norm": 0.1484375, |
|
"learning_rate": 0.00015463917525773197, |
|
"loss": 1.3376, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08279631621077484, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 0.00016494845360824742, |
|
"loss": 1.3465, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08797108597394827, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 0.0001752577319587629, |
|
"loss": 1.3416, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0931458557371217, |
|
"grad_norm": 0.076171875, |
|
"learning_rate": 0.00018556701030927837, |
|
"loss": 1.329, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09832062550029512, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 0.00019587628865979381, |
|
"loss": 1.3077, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.10349539526346856, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 0.00019999411876613578, |
|
"loss": 1.3115, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10867016502664198, |
|
"grad_norm": 0.07568359375, |
|
"learning_rate": 0.00019995818039769561, |
|
"loss": 1.3185, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1138449347898154, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00019988958274085367, |
|
"loss": 1.2811, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11901970455298884, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 0.0001997883482084895, |
|
"loss": 1.3017, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12419447431616226, |
|
"grad_norm": 0.0771484375, |
|
"learning_rate": 0.0001996545098769134, |
|
"loss": 1.2927, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1293692440793357, |
|
"grad_norm": 0.091796875, |
|
"learning_rate": 0.0001994881114750593, |
|
"loss": 1.3015, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1345440138425091, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 0.00019928920737019733, |
|
"loss": 1.2854, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13971878360568254, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 0.00019905786255017044, |
|
"loss": 1.2995, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14489355336885598, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 0.00019879415260216088, |
|
"loss": 1.2739, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1500683231320294, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00019849816368799355, |
|
"loss": 1.2802, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15524309289520283, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 0.0001981699925159847, |
|
"loss": 1.3024, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16041786265837626, |
|
"grad_norm": 0.07421875, |
|
"learning_rate": 0.0001978097463093441, |
|
"loss": 1.2642, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16559263242154967, |
|
"grad_norm": 0.06982421875, |
|
"learning_rate": 0.00019741754277114232, |
|
"loss": 1.2516, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1707674021847231, |
|
"grad_norm": 0.072265625, |
|
"learning_rate": 0.00019699351004585352, |
|
"loss": 1.2675, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17594217194789655, |
|
"grad_norm": 0.07958984375, |
|
"learning_rate": 0.00019653778667748695, |
|
"loss": 1.2646, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.18111694171106996, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.00019605052156432043, |
|
"loss": 1.2773, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1862917114742434, |
|
"grad_norm": 0.07763671875, |
|
"learning_rate": 0.000195531873910251, |
|
"loss": 1.2653, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19146648123741683, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.00019498201317277828, |
|
"loss": 1.2572, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19664125100059024, |
|
"grad_norm": 0.0654296875, |
|
"learning_rate": 0.0001944011190076376, |
|
"loss": 1.2737, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.20181602076376368, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 0.00019378938121010128, |
|
"loss": 1.2848, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2069907905269371, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 0.00019314699965296696, |
|
"loss": 1.2814, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21216556029011052, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.000192474184221253, |
|
"loss": 1.2685, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.21734033005328396, |
|
"grad_norm": 0.07275390625, |
|
"learning_rate": 0.00019177115474362322, |
|
"loss": 1.2611, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2225150998164574, |
|
"grad_norm": 0.06982421875, |
|
"learning_rate": 0.000191038140920562, |
|
"loss": 1.2541, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2276898695796308, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.0001902753822493248, |
|
"loss": 1.2657, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.23286463934280424, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.00018948312794568673, |
|
"loss": 1.2687, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.23803940910597768, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 0.0001886616368625171, |
|
"loss": 1.2779, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2432141788691511, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 0.00018781117740520386, |
|
"loss": 1.2647, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.24838894863232452, |
|
"grad_norm": 0.0810546875, |
|
"learning_rate": 0.00018693202744395827, |
|
"loss": 1.244, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.25356371839549796, |
|
"grad_norm": 0.12451171875, |
|
"learning_rate": 0.00018602447422302597, |
|
"loss": 1.2408, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2587384881586714, |
|
"grad_norm": 0.1318359375, |
|
"learning_rate": 0.00018508881426683614, |
|
"loss": 1.2875, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2639132579218448, |
|
"grad_norm": 0.076171875, |
|
"learning_rate": 0.00018412535328311814, |
|
"loss": 1.259, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2690880276850182, |
|
"grad_norm": 0.09033203125, |
|
"learning_rate": 0.00018313440606301763, |
|
"loss": 1.2646, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.27426279744819165, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 0.00018211629637824515, |
|
"loss": 1.2452, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2794375672113651, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 0.00018107135687529043, |
|
"loss": 1.2608, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2846123369745385, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00017999992896673665, |
|
"loss": 1.2508, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.28978710673771196, |
|
"grad_norm": 0.11962890625, |
|
"learning_rate": 0.00017890236271971122, |
|
"loss": 1.2413, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.29496187650088534, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.00017777901674150828, |
|
"loss": 1.269, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3001366462640588, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 0.00017663025806242123, |
|
"loss": 1.2453, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3053114160272322, |
|
"grad_norm": 0.07421875, |
|
"learning_rate": 0.00017545646201582303, |
|
"loss": 1.2591, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.31048618579040566, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 0.00017425801211553373, |
|
"loss": 1.2555, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3156609555535791, |
|
"grad_norm": 0.07080078125, |
|
"learning_rate": 0.0001730352999305152, |
|
"loss": 1.2656, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.32083572531675253, |
|
"grad_norm": 0.154296875, |
|
"learning_rate": 0.00017178872495693398, |
|
"loss": 1.2608, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3260104950799259, |
|
"grad_norm": 0.076171875, |
|
"learning_rate": 0.00017051869448763408, |
|
"loss": 1.2787, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.33118526484309935, |
|
"grad_norm": 0.1513671875, |
|
"learning_rate": 0.00016922562347906238, |
|
"loss": 1.2887, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3363600346062728, |
|
"grad_norm": 0.11962890625, |
|
"learning_rate": 0.00016790993441569022, |
|
"loss": 1.2624, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3415348043694462, |
|
"grad_norm": 0.07470703125, |
|
"learning_rate": 0.00016657205717197495, |
|
"loss": 1.2616, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.34670957413261966, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.00016521242887190764, |
|
"loss": 1.2643, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3518843438957931, |
|
"grad_norm": 0.123046875, |
|
"learning_rate": 0.0001638314937461915, |
|
"loss": 1.2767, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3570591136589665, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00016242970298709867, |
|
"loss": 1.2648, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3622338834221399, |
|
"grad_norm": 0.12109375, |
|
"learning_rate": 0.00016100751460105243, |
|
"loss": 1.2409, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.36740865318531335, |
|
"grad_norm": 0.0908203125, |
|
"learning_rate": 0.000159565393258983, |
|
"loss": 1.2392, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3725834229484868, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 0.00015810381014450556, |
|
"loss": 1.2709, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3777581927116602, |
|
"grad_norm": 0.0908203125, |
|
"learning_rate": 0.00015662324279997115, |
|
"loss": 1.2556, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.38293296247483366, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.000155124174970439, |
|
"loss": 1.2623, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.38810773223800704, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 0.00015360709644562297, |
|
"loss": 1.2489, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3932825020011805, |
|
"grad_norm": 0.07861328125, |
|
"learning_rate": 0.0001520725028998629, |
|
"loss": 1.2771, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3984572717643539, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.00015052089573017283, |
|
"loss": 1.2523, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.40363204152752735, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 0.00014895278189242016, |
|
"loss": 1.241, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4088068112907008, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00014736867373568794, |
|
"loss": 1.2578, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4139815810538742, |
|
"grad_norm": 0.0810546875, |
|
"learning_rate": 0.00014576908883487548, |
|
"loss": 1.2625, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4191563508170476, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 0.0001441545498215912, |
|
"loss": 1.2606, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.42433112058022104, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00014252558421339354, |
|
"loss": 1.2321, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4295058903433945, |
|
"grad_norm": 0.0810546875, |
|
"learning_rate": 0.00014088272424143546, |
|
"loss": 1.2494, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.4346806601065679, |
|
"grad_norm": 0.0771484375, |
|
"learning_rate": 0.0001392265066765686, |
|
"loss": 1.2588, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.43985542986974135, |
|
"grad_norm": 0.07958984375, |
|
"learning_rate": 0.00013755747265396467, |
|
"loss": 1.2534, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.4450301996329148, |
|
"grad_norm": 0.091796875, |
|
"learning_rate": 0.00013587616749631036, |
|
"loss": 1.2393, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4502049693960882, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 0.00013418314053563455, |
|
"loss": 1.2526, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4553797391592616, |
|
"grad_norm": 0.078125, |
|
"learning_rate": 0.00013247894493382506, |
|
"loss": 1.2459, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.46055450892243505, |
|
"grad_norm": 0.080078125, |
|
"learning_rate": 0.00013076413750189467, |
|
"loss": 1.2582, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4657292786856085, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 0.00012903927851805414, |
|
"loss": 1.2537, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4709040484487819, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 0.00012730493154465311, |
|
"loss": 1.2601, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.47607881821195536, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 0.0001255616632440475, |
|
"loss": 1.2612, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.48125358797512874, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 0.00012381004319345402, |
|
"loss": 1.2555, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4864283577383022, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 0.00012205064369885291, |
|
"loss": 1.247, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4916031275014756, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 0.00012028403960799821, |
|
"loss": 1.2467, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.49677789726464905, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 0.00011851080812259838, |
|
"loss": 1.2369, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5019526670278225, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 0.00011673152860972724, |
|
"loss": 1.2644, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5071274367909959, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00011494678241252781, |
|
"loss": 1.2345, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5123022065541694, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00011315715266027014, |
|
"loss": 1.2556, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5174769763173428, |
|
"grad_norm": 0.0810546875, |
|
"learning_rate": 0.00011136322407782602, |
|
"loss": 1.2403, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5226517460805162, |
|
"grad_norm": 0.0751953125, |
|
"learning_rate": 0.00010956558279462187, |
|
"loss": 1.2345, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.5278265158436896, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00010776481615313315, |
|
"loss": 1.2298, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.533001285606863, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 0.00010596151251698199, |
|
"loss": 1.2475, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.5381760553700364, |
|
"grad_norm": 0.08056640625, |
|
"learning_rate": 0.0001041562610787017, |
|
"loss": 1.2542, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5433508251332099, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00010234965166723003, |
|
"loss": 1.2534, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5485255948963833, |
|
"grad_norm": 0.1474609375, |
|
"learning_rate": 0.00010054227455519458, |
|
"loss": 1.2598, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5537003646595567, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 9.873472026605346e-05, |
|
"loss": 1.2551, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5588751344227302, |
|
"grad_norm": 0.0771484375, |
|
"learning_rate": 9.692757938115376e-05, |
|
"loss": 1.2553, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5640499041859036, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 9.512144234677116e-05, |
|
"loss": 1.2584, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.569224673949077, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 9.331689928119381e-05, |
|
"loss": 1.2492, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5743994437122505, |
|
"grad_norm": 0.07177734375, |
|
"learning_rate": 9.151453978191311e-05, |
|
"loss": 1.2315, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5795742134754239, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 8.9714952732985e-05, |
|
"loss": 1.2255, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5847489832385974, |
|
"grad_norm": 0.08984375, |
|
"learning_rate": 8.791872611262393e-05, |
|
"loss": 1.2269, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5899237530017707, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 8.612644680109319e-05, |
|
"loss": 1.2481, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5950985227649441, |
|
"grad_norm": 0.07958984375, |
|
"learning_rate": 8.433870038895377e-05, |
|
"loss": 1.2304, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.6002732925281176, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 8.255607098573454e-05, |
|
"loss": 1.2507, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.605448062291291, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 8.077914102908661e-05, |
|
"loss": 1.2652, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.6106228320544644, |
|
"grad_norm": 0.07470703125, |
|
"learning_rate": 7.90084910944835e-05, |
|
"loss": 1.2474, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6157976018176379, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 7.724469970553027e-05, |
|
"loss": 1.2387, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.6209723715808113, |
|
"grad_norm": 0.0712890625, |
|
"learning_rate": 7.548834314494255e-05, |
|
"loss": 1.236, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6261471413439847, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 7.37399952662581e-05, |
|
"loss": 1.2304, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.6313219111071582, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 7.200022730634187e-05, |
|
"loss": 1.229, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6364966808703316, |
|
"grad_norm": 0.1552734375, |
|
"learning_rate": 7.026960769874605e-05, |
|
"loss": 1.2483, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6416714506335051, |
|
"grad_norm": 0.07080078125, |
|
"learning_rate": 6.85487018879861e-05, |
|
"loss": 1.2515, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6468462203966785, |
|
"grad_norm": 0.140625, |
|
"learning_rate": 6.683807214479323e-05, |
|
"loss": 1.2388, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6520209901598518, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 6.513827738240407e-05, |
|
"loss": 1.2316, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6571957599230253, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 6.344987297394713e-05, |
|
"loss": 1.2409, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6623705296861987, |
|
"grad_norm": 0.11767578125, |
|
"learning_rate": 6.177341057098607e-05, |
|
"loss": 1.2661, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6675452994493721, |
|
"grad_norm": 0.08056640625, |
|
"learning_rate": 6.010943792327875e-05, |
|
"loss": 1.2272, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6727200692125456, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 5.845849869981137e-05, |
|
"loss": 1.2615, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.677894838975719, |
|
"grad_norm": 0.07275390625, |
|
"learning_rate": 5.682113231116557e-05, |
|
"loss": 1.2653, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6830696087388924, |
|
"grad_norm": 0.0751953125, |
|
"learning_rate": 5.519787373327725e-05, |
|
"loss": 1.2355, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6882443785020659, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 5.358925333264403e-05, |
|
"loss": 1.2322, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6934191482652393, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 5.1995796693038844e-05, |
|
"loss": 1.2549, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6985939180284128, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 5.0418024443786395e-05, |
|
"loss": 1.2155, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7037686877915862, |
|
"grad_norm": 0.078125, |
|
"learning_rate": 4.885645208965779e-05, |
|
"loss": 1.2497, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.7089434575547596, |
|
"grad_norm": 0.072265625, |
|
"learning_rate": 4.7311589842440415e-05, |
|
"loss": 1.2692, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.714118227317933, |
|
"grad_norm": 0.07275390625, |
|
"learning_rate": 4.578394245423626e-05, |
|
"loss": 1.2593, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7192929970811064, |
|
"grad_norm": 0.0712890625, |
|
"learning_rate": 4.427400905254483e-05, |
|
"loss": 1.2458, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7244677668442798, |
|
"grad_norm": 0.09033203125, |
|
"learning_rate": 4.278228297718307e-05, |
|
"loss": 1.2285, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7296425366074533, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 4.1309251619097154e-05, |
|
"loss": 1.243, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.7348173063706267, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 3.985539626111708e-05, |
|
"loss": 1.2492, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7399920761338001, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 3.842119192070762e-05, |
|
"loss": 1.2401, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.7451668458969736, |
|
"grad_norm": 0.12158203125, |
|
"learning_rate": 3.700710719476594e-05, |
|
"loss": 1.2563, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.750341615660147, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 3.561360410651713e-05, |
|
"loss": 1.2479, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7555163854233204, |
|
"grad_norm": 0.07373046875, |
|
"learning_rate": 3.424113795455779e-05, |
|
"loss": 1.2366, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7606911551864939, |
|
"grad_norm": 0.12255859375, |
|
"learning_rate": 3.289015716409631e-05, |
|
"loss": 1.2184, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7658659249496673, |
|
"grad_norm": 0.0693359375, |
|
"learning_rate": 3.156110314043933e-05, |
|
"loss": 1.2454, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7710406947128408, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 3.025441012477126e-05, |
|
"loss": 1.2323, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.7762154644760141, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 2.89705050522753e-05, |
|
"loss": 1.2344, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7813902342391875, |
|
"grad_norm": 0.06689453125, |
|
"learning_rate": 2.7709807412640743e-05, |
|
"loss": 1.2428, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.786565004002361, |
|
"grad_norm": 0.1279296875, |
|
"learning_rate": 2.6472729113003615e-05, |
|
"loss": 1.2585, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7917397737655344, |
|
"grad_norm": 0.06884765625, |
|
"learning_rate": 2.5259674343364104e-05, |
|
"loss": 1.2397, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7969145435287078, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 2.4071039444526046e-05, |
|
"loss": 1.2581, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8020893132918813, |
|
"grad_norm": 0.07177734375, |
|
"learning_rate": 2.290721277860064e-05, |
|
"loss": 1.2582, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.8072640830550547, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 2.176857460211693e-05, |
|
"loss": 1.2389, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8124388528182281, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 2.0655496941780994e-05, |
|
"loss": 1.245, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.8176136225814016, |
|
"grad_norm": 0.091796875, |
|
"learning_rate": 1.9568343472923524e-05, |
|
"loss": 1.2398, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.822788392344575, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 1.8507469400676735e-05, |
|
"loss": 1.2408, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8279631621077485, |
|
"grad_norm": 0.076171875, |
|
"learning_rate": 1.7473221343918055e-05, |
|
"loss": 1.2355, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8331379318709219, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 1.6465937222019745e-05, |
|
"loss": 1.2552, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.8383127016340952, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 1.5485946144440354e-05, |
|
"loss": 1.2461, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8434874713972687, |
|
"grad_norm": 0.07861328125, |
|
"learning_rate": 1.4533568303195333e-05, |
|
"loss": 1.2449, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.8486622411604421, |
|
"grad_norm": 0.06591796875, |
|
"learning_rate": 1.3609114868240536e-05, |
|
"loss": 1.2424, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8538370109236155, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 1.2712887885803937e-05, |
|
"loss": 1.2218, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.859011780686789, |
|
"grad_norm": 0.06982421875, |
|
"learning_rate": 1.184518017969829e-05, |
|
"loss": 1.2556, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8641865504499624, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 1.1006275255646681e-05, |
|
"loss": 1.2493, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.8693613202131358, |
|
"grad_norm": 0.0751953125, |
|
"learning_rate": 1.0196447208653193e-05, |
|
"loss": 1.2509, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8745360899763093, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 9.415960633447674e-06, |
|
"loss": 1.2322, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.8797108597394827, |
|
"grad_norm": 0.0908203125, |
|
"learning_rate": 8.665070538035036e-06, |
|
"loss": 1.2422, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8848856295026561, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 7.944022260376416e-06, |
|
"loss": 1.238, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8900603992658296, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 7.253051388230248e-06, |
|
"loss": 1.25, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.895235169029003, |
|
"grad_norm": 0.07177734375, |
|
"learning_rate": 6.592383682178593e-06, |
|
"loss": 1.2485, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.9004099387921763, |
|
"grad_norm": 0.06689453125, |
|
"learning_rate": 5.96223500186458e-06, |
|
"loss": 1.2503, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.9055847085553498, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 5.362811235464726e-06, |
|
"loss": 1.2552, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.9107594783185232, |
|
"grad_norm": 0.07373046875, |
|
"learning_rate": 4.794308232419065e-06, |
|
"loss": 1.2271, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.9159342480816967, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 4.256911739441505e-06, |
|
"loss": 1.2295, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.9211090178448701, |
|
"grad_norm": 0.068359375, |
|
"learning_rate": 3.7507973398307584e-06, |
|
"loss": 1.2511, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9262837876080435, |
|
"grad_norm": 0.07177734375, |
|
"learning_rate": 3.276130396102217e-06, |
|
"loss": 1.2527, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.931458557371217, |
|
"grad_norm": 0.2080078125, |
|
"learning_rate": 2.8330659959589946e-06, |
|
"loss": 1.2444, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9366333271343904, |
|
"grad_norm": 0.06640625, |
|
"learning_rate": 2.421748901620369e-06, |
|
"loss": 1.2495, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.9418080968975638, |
|
"grad_norm": 0.06884765625, |
|
"learning_rate": 2.0423135025235294e-06, |
|
"loss": 1.2631, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9469828666607373, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 1.6948837714146793e-06, |
|
"loss": 1.2336, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.9521576364239107, |
|
"grad_norm": 0.0712890625, |
|
"learning_rate": 1.3795732238434556e-06, |
|
"loss": 1.28, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9573324061870842, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 1.0964848810740113e-06, |
|
"loss": 1.2429, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.9625071759502575, |
|
"grad_norm": 0.07958984375, |
|
"learning_rate": 8.457112364250064e-07, |
|
"loss": 1.2278, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9676819457134309, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 6.273342250492254e-07, |
|
"loss": 1.2675, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.9728567154766044, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 4.4142519716300344e-07, |
|
"loss": 1.2405, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9780314852397778, |
|
"grad_norm": 0.072265625, |
|
"learning_rate": 2.8804489473395203e-07, |
|
"loss": 1.2512, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.9832062550029512, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 1.6724343163483368e-07, |
|
"loss": 1.2416, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9883810247661247, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 7.906027726981568e-08, |
|
"loss": 1.2292, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.9935557945292981, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 2.3524243678685332e-08, |
|
"loss": 1.2187, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.9987305642924715, |
|
"grad_norm": 0.07080078125, |
|
"learning_rate": 6.534761230914122e-10, |
|
"loss": 1.2557, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9997655182451062, |
|
"eval_loss": 1.2492531538009644, |
|
"eval_runtime": 2450.1313, |
|
"eval_samples_per_second": 5.585, |
|
"eval_steps_per_second": 5.585, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.9997655182451062, |
|
"step": 966, |
|
"total_flos": 3.041046347630248e+18, |
|
"train_loss": 1.1233504201067654, |
|
"train_runtime": 77534.6649, |
|
"train_samples_per_second": 1.595, |
|
"train_steps_per_second": 0.012 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 966, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 20, |
|
"total_flos": 3.041046347630248e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|