|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 58.02469135802469, |
|
"eval_steps": 500, |
|
"global_step": 4700, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.6172839506172839, |
|
"grad_norm": 62.88630294799805, |
|
"learning_rate": 9.916000000000001e-06, |
|
"loss": 6.6856, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.2345679012345678, |
|
"grad_norm": 387.6339111328125, |
|
"learning_rate": 9.818000000000002e-06, |
|
"loss": 5.5869, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.8518518518518519, |
|
"grad_norm": 245.18975830078125, |
|
"learning_rate": 9.718e-06, |
|
"loss": 5.159, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.4691358024691357, |
|
"grad_norm": 330.3158264160156, |
|
"learning_rate": 9.618e-06, |
|
"loss": 4.0494, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.0864197530864197, |
|
"grad_norm": 138.14215087890625, |
|
"learning_rate": 9.518000000000001e-06, |
|
"loss": 3.6313, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.7037037037037037, |
|
"grad_norm": 166.69479370117188, |
|
"learning_rate": 9.418e-06, |
|
"loss": 3.3391, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 4.320987654320987, |
|
"grad_norm": 107.54407501220703, |
|
"learning_rate": 9.318e-06, |
|
"loss": 3.3295, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.938271604938271, |
|
"grad_norm": 293.8923034667969, |
|
"learning_rate": 9.218e-06, |
|
"loss": 3.0956, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 5.555555555555555, |
|
"grad_norm": 226.05360412597656, |
|
"learning_rate": 9.118000000000001e-06, |
|
"loss": 3.0271, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 6.172839506172839, |
|
"grad_norm": 352.8487854003906, |
|
"learning_rate": 9.018e-06, |
|
"loss": 3.0627, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 6.790123456790123, |
|
"grad_norm": 97.6641616821289, |
|
"learning_rate": 8.918000000000002e-06, |
|
"loss": 3.0312, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 7.407407407407407, |
|
"grad_norm": 90.92312622070312, |
|
"learning_rate": 8.818000000000001e-06, |
|
"loss": 3.0228, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 8.024691358024691, |
|
"grad_norm": 241.7625732421875, |
|
"learning_rate": 8.718e-06, |
|
"loss": 3.0873, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 8.641975308641975, |
|
"grad_norm": 162.14071655273438, |
|
"learning_rate": 8.618000000000001e-06, |
|
"loss": 2.9582, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 9.25925925925926, |
|
"grad_norm": 128.74766540527344, |
|
"learning_rate": 8.518e-06, |
|
"loss": 2.9787, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 9.876543209876543, |
|
"grad_norm": 164.0232391357422, |
|
"learning_rate": 8.418000000000001e-06, |
|
"loss": 2.969, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 10.493827160493828, |
|
"grad_norm": 110.58142852783203, |
|
"learning_rate": 8.318e-06, |
|
"loss": 2.884, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 11.11111111111111, |
|
"grad_norm": 111.70948791503906, |
|
"learning_rate": 8.218e-06, |
|
"loss": 2.8216, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 11.728395061728396, |
|
"grad_norm": 111.31181335449219, |
|
"learning_rate": 8.118000000000001e-06, |
|
"loss": 2.8621, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 12.345679012345679, |
|
"grad_norm": 84.1104507446289, |
|
"learning_rate": 8.018e-06, |
|
"loss": 2.7653, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 12.962962962962964, |
|
"grad_norm": 85.67730712890625, |
|
"learning_rate": 7.918e-06, |
|
"loss": 2.8418, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 13.580246913580247, |
|
"grad_norm": 567.8911743164062, |
|
"learning_rate": 7.818e-06, |
|
"loss": 2.789, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 14.197530864197532, |
|
"grad_norm": 98.47197723388672, |
|
"learning_rate": 7.718000000000001e-06, |
|
"loss": 2.7996, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 14.814814814814815, |
|
"grad_norm": 86.87311553955078, |
|
"learning_rate": 7.618000000000001e-06, |
|
"loss": 2.7087, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 15.432098765432098, |
|
"grad_norm": 115.806640625, |
|
"learning_rate": 7.518000000000001e-06, |
|
"loss": 2.6898, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 16.049382716049383, |
|
"grad_norm": 108.76448059082031, |
|
"learning_rate": 7.418000000000001e-06, |
|
"loss": 2.7533, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 16.666666666666668, |
|
"grad_norm": 137.66285705566406, |
|
"learning_rate": 7.318000000000001e-06, |
|
"loss": 2.6817, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 17.28395061728395, |
|
"grad_norm": 167.23983764648438, |
|
"learning_rate": 7.218e-06, |
|
"loss": 2.6612, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 17.901234567901234, |
|
"grad_norm": 80.27250671386719, |
|
"learning_rate": 7.118e-06, |
|
"loss": 2.7053, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 18.51851851851852, |
|
"grad_norm": 87.89417266845703, |
|
"learning_rate": 7.018e-06, |
|
"loss": 2.6016, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 19.135802469135804, |
|
"grad_norm": 244.07081604003906, |
|
"learning_rate": 6.9180000000000005e-06, |
|
"loss": 2.6377, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 19.753086419753085, |
|
"grad_norm": 66.56304931640625, |
|
"learning_rate": 6.818e-06, |
|
"loss": 2.5969, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 20.37037037037037, |
|
"grad_norm": 146.2065887451172, |
|
"learning_rate": 6.718e-06, |
|
"loss": 2.5849, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 20.987654320987655, |
|
"grad_norm": 105.38662719726562, |
|
"learning_rate": 6.618000000000001e-06, |
|
"loss": 2.588, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 21.604938271604937, |
|
"grad_norm": 172.37232971191406, |
|
"learning_rate": 6.518000000000001e-06, |
|
"loss": 2.5201, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 22.22222222222222, |
|
"grad_norm": 1186.192626953125, |
|
"learning_rate": 6.418000000000001e-06, |
|
"loss": 2.5618, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 22.839506172839506, |
|
"grad_norm": 181.00384521484375, |
|
"learning_rate": 6.318000000000001e-06, |
|
"loss": 2.496, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 23.45679012345679, |
|
"grad_norm": 90.32185363769531, |
|
"learning_rate": 6.2180000000000004e-06, |
|
"loss": 2.5254, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 24.074074074074073, |
|
"grad_norm": 117.39266967773438, |
|
"learning_rate": 6.1180000000000005e-06, |
|
"loss": 2.4665, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 24.691358024691358, |
|
"grad_norm": 117.4901123046875, |
|
"learning_rate": 6.018000000000001e-06, |
|
"loss": 2.4541, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 25.308641975308642, |
|
"grad_norm": 112.75492095947266, |
|
"learning_rate": 5.918000000000001e-06, |
|
"loss": 2.4716, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 25.925925925925927, |
|
"grad_norm": 75.71508026123047, |
|
"learning_rate": 5.818e-06, |
|
"loss": 2.4643, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 26.54320987654321, |
|
"grad_norm": 87.87818145751953, |
|
"learning_rate": 5.718e-06, |
|
"loss": 2.452, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 27.160493827160494, |
|
"grad_norm": 119.02462005615234, |
|
"learning_rate": 5.618e-06, |
|
"loss": 2.4431, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 27.77777777777778, |
|
"grad_norm": 95.6996841430664, |
|
"learning_rate": 5.518e-06, |
|
"loss": 2.396, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 28.395061728395063, |
|
"grad_norm": 82.37759399414062, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 2.4554, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 29.012345679012345, |
|
"grad_norm": 63.04928207397461, |
|
"learning_rate": 5.320000000000001e-06, |
|
"loss": 2.3868, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 29.62962962962963, |
|
"grad_norm": 85.531494140625, |
|
"learning_rate": 5.220000000000001e-06, |
|
"loss": 2.4088, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 30.246913580246915, |
|
"grad_norm": 148.06163024902344, |
|
"learning_rate": 5.12e-06, |
|
"loss": 2.4479, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 30.864197530864196, |
|
"grad_norm": 120.42308807373047, |
|
"learning_rate": 5.02e-06, |
|
"loss": 2.4293, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 31.48148148148148, |
|
"grad_norm": 47.70389938354492, |
|
"learning_rate": 4.92e-06, |
|
"loss": 2.3727, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 32.098765432098766, |
|
"grad_norm": 67.6549301147461, |
|
"learning_rate": 4.8200000000000004e-06, |
|
"loss": 2.3938, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 32.71604938271605, |
|
"grad_norm": 71.10252380371094, |
|
"learning_rate": 4.7200000000000005e-06, |
|
"loss": 2.3758, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 33.333333333333336, |
|
"grad_norm": 69.4305419921875, |
|
"learning_rate": 4.620000000000001e-06, |
|
"loss": 2.3855, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 33.95061728395062, |
|
"grad_norm": 58.54874038696289, |
|
"learning_rate": 4.520000000000001e-06, |
|
"loss": 2.3638, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 34.5679012345679, |
|
"grad_norm": 135.54541015625, |
|
"learning_rate": 4.42e-06, |
|
"loss": 2.3785, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 35.18518518518518, |
|
"grad_norm": 191.87705993652344, |
|
"learning_rate": 4.32e-06, |
|
"loss": 2.3247, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 35.80246913580247, |
|
"grad_norm": 101.19218444824219, |
|
"learning_rate": 4.22e-06, |
|
"loss": 2.3322, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 36.41975308641975, |
|
"grad_norm": 97.59493255615234, |
|
"learning_rate": 4.12e-06, |
|
"loss": 2.3189, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 37.03703703703704, |
|
"grad_norm": 50.98225784301758, |
|
"learning_rate": 4.0200000000000005e-06, |
|
"loss": 2.3337, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 37.65432098765432, |
|
"grad_norm": 78.88179016113281, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 2.3707, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 38.27160493827161, |
|
"grad_norm": 136.63319396972656, |
|
"learning_rate": 3.820000000000001e-06, |
|
"loss": 2.2602, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 38.888888888888886, |
|
"grad_norm": 87.68150329589844, |
|
"learning_rate": 3.7200000000000004e-06, |
|
"loss": 2.3111, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 39.50617283950617, |
|
"grad_norm": 179.4459686279297, |
|
"learning_rate": 3.62e-06, |
|
"loss": 2.3092, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 40.123456790123456, |
|
"grad_norm": 76.10836791992188, |
|
"learning_rate": 3.52e-06, |
|
"loss": 2.3029, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 40.74074074074074, |
|
"grad_norm": 85.23873901367188, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 2.2699, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 41.358024691358025, |
|
"grad_norm": 156.77699279785156, |
|
"learning_rate": 3.3200000000000004e-06, |
|
"loss": 2.2795, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 41.97530864197531, |
|
"grad_norm": 87.01202392578125, |
|
"learning_rate": 3.2200000000000005e-06, |
|
"loss": 2.2961, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 42.592592592592595, |
|
"grad_norm": 208.42947387695312, |
|
"learning_rate": 3.12e-06, |
|
"loss": 2.2692, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 43.20987654320987, |
|
"grad_norm": 82.78223419189453, |
|
"learning_rate": 3.0200000000000003e-06, |
|
"loss": 2.2482, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 43.82716049382716, |
|
"grad_norm": 110.6844482421875, |
|
"learning_rate": 2.92e-06, |
|
"loss": 2.2755, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 44.44444444444444, |
|
"grad_norm": 57.944942474365234, |
|
"learning_rate": 2.82e-06, |
|
"loss": 2.302, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 45.06172839506173, |
|
"grad_norm": 72.0767593383789, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 2.2305, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 45.67901234567901, |
|
"grad_norm": 154.40426635742188, |
|
"learning_rate": 2.6200000000000003e-06, |
|
"loss": 2.2532, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 46.2962962962963, |
|
"grad_norm": 112.4802017211914, |
|
"learning_rate": 2.52e-06, |
|
"loss": 2.2596, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 46.91358024691358, |
|
"grad_norm": 97.9261245727539, |
|
"learning_rate": 2.42e-06, |
|
"loss": 2.2296, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 47.53086419753087, |
|
"grad_norm": 86.22404479980469, |
|
"learning_rate": 2.3200000000000002e-06, |
|
"loss": 2.2086, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 48.148148148148145, |
|
"grad_norm": 118.13029479980469, |
|
"learning_rate": 2.2200000000000003e-06, |
|
"loss": 2.2208, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 48.76543209876543, |
|
"grad_norm": 83.22029113769531, |
|
"learning_rate": 2.12e-06, |
|
"loss": 2.2228, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 49.382716049382715, |
|
"grad_norm": 46.90211868286133, |
|
"learning_rate": 2.02e-06, |
|
"loss": 2.2279, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"grad_norm": 666.5877685546875, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 2.2356, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 50.617283950617285, |
|
"grad_norm": 73.89546966552734, |
|
"learning_rate": 1.8200000000000002e-06, |
|
"loss": 2.2332, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 51.23456790123457, |
|
"grad_norm": 205.4402313232422, |
|
"learning_rate": 1.72e-06, |
|
"loss": 2.1635, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 51.851851851851855, |
|
"grad_norm": 46.229618072509766, |
|
"learning_rate": 1.6200000000000002e-06, |
|
"loss": 2.1759, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 52.46913580246913, |
|
"grad_norm": 58.453006744384766, |
|
"learning_rate": 1.52e-06, |
|
"loss": 2.1893, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 53.08641975308642, |
|
"grad_norm": 102.3731689453125, |
|
"learning_rate": 1.4220000000000001e-06, |
|
"loss": 2.2028, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 53.7037037037037, |
|
"grad_norm": 82.2330093383789, |
|
"learning_rate": 1.3220000000000002e-06, |
|
"loss": 2.2168, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 54.32098765432099, |
|
"grad_norm": 197.8307342529297, |
|
"learning_rate": 1.2220000000000001e-06, |
|
"loss": 2.2205, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 54.93827160493827, |
|
"grad_norm": 108.3754653930664, |
|
"learning_rate": 1.122e-06, |
|
"loss": 2.1846, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 55.55555555555556, |
|
"grad_norm": 87.77450561523438, |
|
"learning_rate": 1.0220000000000001e-06, |
|
"loss": 2.1763, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 56.17283950617284, |
|
"grad_norm": 93.21278381347656, |
|
"learning_rate": 9.220000000000001e-07, |
|
"loss": 2.2124, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 56.79012345679013, |
|
"grad_norm": 65.71028137207031, |
|
"learning_rate": 8.22e-07, |
|
"loss": 2.1856, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 57.407407407407405, |
|
"grad_norm": 84.39445495605469, |
|
"learning_rate": 7.22e-07, |
|
"loss": 2.1913, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 58.02469135802469, |
|
"grad_norm": 124.40509033203125, |
|
"learning_rate": 6.22e-07, |
|
"loss": 2.177, |
|
"step": 4700 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 62, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.7965428811776e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|