|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9990403071017274, |
|
"eval_steps": 40.0, |
|
"global_step": 347, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.1999938524606463e-05, |
|
"loss": 2.6336, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1999754099685592e-05, |
|
"loss": 2.287, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1999446729016585e-05, |
|
"loss": 1.9992, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1999016418898025e-05, |
|
"loss": 1.9326, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1998463178147731e-05, |
|
"loss": 1.773, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1997787018102607e-05, |
|
"loss": 1.6502, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1996987952618384e-05, |
|
"loss": 1.5299, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1996065998069355e-05, |
|
"loss": 1.5532, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1995021173348024e-05, |
|
"loss": 1.4142, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1993853499864725e-05, |
|
"loss": 1.5215, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1992563001547192e-05, |
|
"loss": 1.4892, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1991149704840053e-05, |
|
"loss": 1.5194, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1989613638704296e-05, |
|
"loss": 1.6177, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1987954834616683e-05, |
|
"loss": 1.468, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1986173326569089e-05, |
|
"loss": 1.4379, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1984269151067816e-05, |
|
"loss": 1.4556, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1982242347132847e-05, |
|
"loss": 1.4937, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1980092956297035e-05, |
|
"loss": 1.581, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1977821022605263e-05, |
|
"loss": 1.4928, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.1975426592613539e-05, |
|
"loss": 1.4856, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.1972909715388035e-05, |
|
"loss": 1.5337, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.1970270442504094e-05, |
|
"loss": 1.4193, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.196750882804516e-05, |
|
"loss": 1.5631, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.196462492860168e-05, |
|
"loss": 1.4632, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1961618803269935e-05, |
|
"loss": 1.5262, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.195849051365084e-05, |
|
"loss": 1.4775, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.1955240123848675e-05, |
|
"loss": 1.2899, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.1951867700469767e-05, |
|
"loss": 1.5403, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.194837331262114e-05, |
|
"loss": 1.4858, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.1944757031909076e-05, |
|
"loss": 1.4674, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.1941018932437674e-05, |
|
"loss": 1.4887, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.193715909080731e-05, |
|
"loss": 1.5058, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1933177586113079e-05, |
|
"loss": 1.3734, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.192907449994317e-05, |
|
"loss": 1.4409, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1924849916377196e-05, |
|
"loss": 1.3174, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1920503921984469e-05, |
|
"loss": 1.4628, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1916036605822227e-05, |
|
"loss": 1.3974, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.191144805943381e-05, |
|
"loss": 1.5108, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1906738376846786e-05, |
|
"loss": 1.4599, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.1901907654571013e-05, |
|
"loss": 1.5997, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.1896955991596679e-05, |
|
"loss": 1.3063, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.189188348939226e-05, |
|
"loss": 1.4545, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.1886690251902445e-05, |
|
"loss": 1.3852, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.188137638554601e-05, |
|
"loss": 1.4043, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.1875941999213624e-05, |
|
"loss": 1.3884, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.187038720426564e-05, |
|
"loss": 1.4439, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.1864712114529786e-05, |
|
"loss": 1.4599, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.1858916846298859e-05, |
|
"loss": 1.4984, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.1853001518328323e-05, |
|
"loss": 1.3452, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.1846966251833882e-05, |
|
"loss": 1.3936, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.1840811170488998e-05, |
|
"loss": 1.4235, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.1834536400422352e-05, |
|
"loss": 1.4595, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.1828142070215266e-05, |
|
"loss": 1.4447, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.1821628310899061e-05, |
|
"loss": 1.5234, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.1814995255952373e-05, |
|
"loss": 1.4176, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.1808243041298429e-05, |
|
"loss": 1.4023, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.1801371805302244e-05, |
|
"loss": 1.4635, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1794381688767794e-05, |
|
"loss": 1.4292, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1787272834935138e-05, |
|
"loss": 1.4894, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1780045389477471e-05, |
|
"loss": 1.2991, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.1772699500498142e-05, |
|
"loss": 1.4466, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.1765235318527625e-05, |
|
"loss": 1.41, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.1757652996520427e-05, |
|
"loss": 1.3542, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.174995268985196e-05, |
|
"loss": 1.4283, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.1742134556315348e-05, |
|
"loss": 1.4417, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.1734198756118204e-05, |
|
"loss": 1.4124, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.1726145451879344e-05, |
|
"loss": 1.4461, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.1717974808625447e-05, |
|
"loss": 1.3768, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.1709686993787686e-05, |
|
"loss": 1.4599, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.1701282177198285e-05, |
|
"loss": 1.5219, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.1692760531087049e-05, |
|
"loss": 1.4588, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.1684122230077822e-05, |
|
"loss": 1.3674, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.1675367451184928e-05, |
|
"loss": 1.422, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.1666496373809524e-05, |
|
"loss": 1.471, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.1657509179735934e-05, |
|
"loss": 1.5206, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.164840605312792e-05, |
|
"loss": 1.4817, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.1639187180524918e-05, |
|
"loss": 1.4353, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.1629852750838196e-05, |
|
"loss": 1.4303, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.1620402955347004e-05, |
|
"loss": 1.4781, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.1610837987694637e-05, |
|
"loss": 1.4428, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.1601158043884482e-05, |
|
"loss": 1.4312, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.159136332227599e-05, |
|
"loss": 1.4551, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.1581454023580616e-05, |
|
"loss": 1.4503, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.1571430350857704e-05, |
|
"loss": 1.4528, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.156129250951033e-05, |
|
"loss": 1.3285, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.1551040707281093e-05, |
|
"loss": 1.4585, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.1540675154247845e-05, |
|
"loss": 1.304, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.153019606281941e-05, |
|
"loss": 1.4546, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.1519603647731208e-05, |
|
"loss": 1.3858, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.1508898126040866e-05, |
|
"loss": 1.3672, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.1498079717123774e-05, |
|
"loss": 1.3506, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.1487148642668576e-05, |
|
"loss": 1.4802, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.1476105126672646e-05, |
|
"loss": 1.41, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.1464949395437477e-05, |
|
"loss": 1.4322, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.145368167756406e-05, |
|
"loss": 1.394, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.144230220394819e-05, |
|
"loss": 1.42, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.1430811207775741e-05, |
|
"loss": 1.3121, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.1419208924517886e-05, |
|
"loss": 1.3474, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.1407495591926263e-05, |
|
"loss": 1.2316, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.1395671450028118e-05, |
|
"loss": 1.4253, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.138373674112138e-05, |
|
"loss": 1.4087, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.1371691709769685e-05, |
|
"loss": 1.3573, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.1359536602797386e-05, |
|
"loss": 1.4106, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.1347271669284475e-05, |
|
"loss": 1.3924, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.133489716056149e-05, |
|
"loss": 1.4377, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.1322413330204362e-05, |
|
"loss": 1.4068, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.1309820434029221e-05, |
|
"loss": 1.3437, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.1297118730087148e-05, |
|
"loss": 1.4283, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.1284308478658893e-05, |
|
"loss": 1.4738, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.127138994224954e-05, |
|
"loss": 1.4097, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.1258363385583122e-05, |
|
"loss": 1.3924, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.1245229075597209e-05, |
|
"loss": 1.4808, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.1231987281437423e-05, |
|
"loss": 1.3838, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.1218638274451936e-05, |
|
"loss": 1.4327, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.1205182328185897e-05, |
|
"loss": 1.3547, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.1191619718375839e-05, |
|
"loss": 1.3291, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.117795072294402e-05, |
|
"loss": 1.304, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.116417562199273e-05, |
|
"loss": 1.3836, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.1150294697798553e-05, |
|
"loss": 1.4498, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.1136308234806587e-05, |
|
"loss": 1.5027, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.1122216519624596e-05, |
|
"loss": 1.418, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.1108019841017165e-05, |
|
"loss": 1.3934, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.1093718489899758e-05, |
|
"loss": 1.3492, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.1079312759332772e-05, |
|
"loss": 1.3846, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.106480294451553e-05, |
|
"loss": 1.402, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.1050189342780217e-05, |
|
"loss": 1.4242, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.103547225358581e-05, |
|
"loss": 1.4199, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.1020651978511924e-05, |
|
"loss": 1.4253, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.1005728821252641e-05, |
|
"loss": 1.4173, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.0990703087610283e-05, |
|
"loss": 1.4704, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.0975575085489143e-05, |
|
"loss": 1.4215, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.0960345124889188e-05, |
|
"loss": 1.4109, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.0945013517899686e-05, |
|
"loss": 1.4212, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.092958057869283e-05, |
|
"loss": 1.393, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.0914046623517292e-05, |
|
"loss": 1.3462, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.089841197069174e-05, |
|
"loss": 1.4998, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.0882676940598322e-05, |
|
"loss": 1.4839, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.0866841855676088e-05, |
|
"loss": 1.4445, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.08509070404144e-05, |
|
"loss": 1.419, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.0834872821346274e-05, |
|
"loss": 1.3788, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.0818739527041681e-05, |
|
"loss": 1.4787, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.0802507488100828e-05, |
|
"loss": 1.3911, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.0786177037147378e-05, |
|
"loss": 1.343, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.0769748508821627e-05, |
|
"loss": 1.4525, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.075322223977366e-05, |
|
"loss": 1.4751, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.0736598568656436e-05, |
|
"loss": 1.3912, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.071987783611887e-05, |
|
"loss": 1.3316, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.0703060384798828e-05, |
|
"loss": 1.465, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.0686146559316125e-05, |
|
"loss": 1.3809, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.066913670626545e-05, |
|
"loss": 1.4012, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.0652031174209279e-05, |
|
"loss": 1.5069, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.0634830313670714e-05, |
|
"loss": 1.3887, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.061753447712631e-05, |
|
"loss": 1.3444, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.0600144018998857e-05, |
|
"loss": 1.5074, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.0582659295650102e-05, |
|
"loss": 1.426, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.0565080665373467e-05, |
|
"loss": 1.3974, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.0547408488386689e-05, |
|
"loss": 1.507, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.0529643126824442e-05, |
|
"loss": 1.2848, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.0511784944730928e-05, |
|
"loss": 1.3705, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.04938343080524e-05, |
|
"loss": 1.372, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.0475791584629683e-05, |
|
"loss": 1.3737, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.0457657144190614e-05, |
|
"loss": 1.4055, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.0439431358342477e-05, |
|
"loss": 1.4183, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.0421114600564397e-05, |
|
"loss": 1.3938, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.0402707246199669e-05, |
|
"loss": 1.4279, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.0384209672448079e-05, |
|
"loss": 1.3658, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.0365622258358166e-05, |
|
"loss": 1.4269, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.0346945384819466e-05, |
|
"loss": 1.3599, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0328179434554692e-05, |
|
"loss": 1.3039, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0309324792111906e-05, |
|
"loss": 1.4862, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0290381843856627e-05, |
|
"loss": 1.4285, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0271350977963922e-05, |
|
"loss": 1.3973, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0252232584410449e-05, |
|
"loss": 1.2789, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0233027054966462e-05, |
|
"loss": 1.3758, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0213734783187791e-05, |
|
"loss": 1.343, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0194356164407765e-05, |
|
"loss": 1.4214, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0174891595729128e-05, |
|
"loss": 1.3872, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.015534147601588e-05, |
|
"loss": 1.3549, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.0135706205885127e-05, |
|
"loss": 1.4698, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.0115986187698855e-05, |
|
"loss": 1.4067, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.0096181825555686e-05, |
|
"loss": 1.4373, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.007629352528261e-05, |
|
"loss": 1.3448, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.0056321694426653e-05, |
|
"loss": 1.474, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.0036266742246536e-05, |
|
"loss": 1.3803, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.0016129079704286e-05, |
|
"loss": 1.337, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.995909119456813e-06, |
|
"loss": 1.3436, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.975607275847452e-06, |
|
"loss": 1.2478, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.95522396489748e-06, |
|
"loss": 1.3558, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.934759604297585e-06, |
|
"loss": 1.332, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.91421461339931e-06, |
|
"loss": 1.3528, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.893589413206449e-06, |
|
"loss": 1.4392, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.87288442636644e-06, |
|
"loss": 1.4533, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.852100077161686e-06, |
|
"loss": 1.3014, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.831236791500866e-06, |
|
"loss": 1.4457, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.810294996910218e-06, |
|
"loss": 1.3272, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.789275122524762e-06, |
|
"loss": 1.4885, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.76817759907951e-06, |
|
"loss": 1.3802, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.747002858900654e-06, |
|
"loss": 1.3995, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.725751335896684e-06, |
|
"loss": 1.3889, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.704423465549514e-06, |
|
"loss": 1.404, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.683019684905555e-06, |
|
"loss": 1.4303, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.66154043256675e-06, |
|
"loss": 1.4106, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.639986148681595e-06, |
|
"loss": 1.3556, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.618357274936122e-06, |
|
"loss": 1.3358, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.596654254544837e-06, |
|
"loss": 1.3653, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.574877532241646e-06, |
|
"loss": 1.3943, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.553027554270741e-06, |
|
"loss": 1.2431, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.531104768377454e-06, |
|
"loss": 1.4285, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.509109623799082e-06, |
|
"loss": 1.4397, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.48704257125568e-06, |
|
"loss": 1.3748, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.464904062940827e-06, |
|
"loss": 1.4576, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.442694552512365e-06, |
|
"loss": 1.3955, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.420414495083084e-06, |
|
"loss": 1.5999, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.398064347211423e-06, |
|
"loss": 1.3066, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.37564456689209e-06, |
|
"loss": 1.4238, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.353155613546693e-06, |
|
"loss": 1.3093, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.330597948014321e-06, |
|
"loss": 1.3458, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.30797203254209e-06, |
|
"loss": 1.2949, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.28527833077569e-06, |
|
"loss": 1.3479, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.26251730774987e-06, |
|
"loss": 1.4428, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.23968942987891e-06, |
|
"loss": 1.4494, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.216795164947067e-06, |
|
"loss": 1.513, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.193834982098994e-06, |
|
"loss": 1.3999, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.170809351830119e-06, |
|
"loss": 1.4124, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.147718745976995e-06, |
|
"loss": 1.4184, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.124563637707654e-06, |
|
"loss": 1.4625, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.101344501511893e-06, |
|
"loss": 1.3812, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.078061813191557e-06, |
|
"loss": 1.3352, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.054716049850788e-06, |
|
"loss": 1.373, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.03130768988625e-06, |
|
"loss": 1.4279, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.007837212977322e-06, |
|
"loss": 1.3979, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.984305100076274e-06, |
|
"loss": 1.3694, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.960711833398402e-06, |
|
"loss": 1.4064, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.937057896412163e-06, |
|
"loss": 1.4104, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.913343773829249e-06, |
|
"loss": 1.3844, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.889569951594666e-06, |
|
"loss": 1.3653, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.865736916876774e-06, |
|
"loss": 1.427, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.841845158057302e-06, |
|
"loss": 1.3435, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.817895164721344e-06, |
|
"loss": 1.4427, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.793887427647316e-06, |
|
"loss": 1.3507, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.76982243879692e-06, |
|
"loss": 1.424, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.745700691305039e-06, |
|
"loss": 1.4637, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.721522679469645e-06, |
|
"loss": 1.3721, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.697288898741672e-06, |
|
"loss": 1.3589, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.672999845714856e-06, |
|
"loss": 1.4479, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.648656018115557e-06, |
|
"loss": 1.4105, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.624257914792573e-06, |
|
"loss": 1.3511, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.599806035706903e-06, |
|
"loss": 1.3963, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.575300881921513e-06, |
|
"loss": 1.3543, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.550742955591057e-06, |
|
"loss": 1.4107, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.526132759951598e-06, |
|
"loss": 1.399, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.501470799310294e-06, |
|
"loss": 1.4452, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.476757579035049e-06, |
|
"loss": 1.4045, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.451993605544183e-06, |
|
"loss": 1.4063, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.427179386296034e-06, |
|
"loss": 1.3813, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.402315429778566e-06, |
|
"loss": 1.4301, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.377402245498948e-06, |
|
"loss": 1.361, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.352440343973117e-06, |
|
"loss": 1.4488, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.327430236715313e-06, |
|
"loss": 1.4259, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.3023724362276e-06, |
|
"loss": 1.3858, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.277267455989356e-06, |
|
"loss": 1.429, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.252115810446766e-06, |
|
"loss": 1.4905, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.22691801500226e-06, |
|
"loss": 1.4224, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.201674586003973e-06, |
|
"loss": 1.4441, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.176386040735146e-06, |
|
"loss": 1.3486, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.15105289740354e-06, |
|
"loss": 1.3677, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.125675675130805e-06, |
|
"loss": 1.4015, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.100254893941847e-06, |
|
"loss": 1.434, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.07479107475418e-06, |
|
"loss": 1.4285, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 8.049284739367235e-06, |
|
"loss": 1.3749, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 8.023736410451683e-06, |
|
"loss": 1.3624, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.998146611538716e-06, |
|
"loss": 1.4756, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.972515867009318e-06, |
|
"loss": 1.4431, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.946844702083526e-06, |
|
"loss": 1.376, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.921133642809661e-06, |
|
"loss": 1.3855, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.895383216053555e-06, |
|
"loss": 1.3976, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.869593949487746e-06, |
|
"loss": 1.3488, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.843766371580668e-06, |
|
"loss": 1.2903, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.817901011585828e-06, |
|
"loss": 1.3089, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.791998399530957e-06, |
|
"loss": 1.3641, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.766059066207142e-06, |
|
"loss": 1.3597, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.740083543157959e-06, |
|
"loss": 1.3307, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.714072362668576e-06, |
|
"loss": 1.3195, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.68802605775484e-06, |
|
"loss": 1.4422, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.661945162152376e-06, |
|
"loss": 1.3965, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.635830210305614e-06, |
|
"loss": 1.4662, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.609681737356874e-06, |
|
"loss": 1.3645, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.5835002791353755e-06, |
|
"loss": 1.3993, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.557286372146269e-06, |
|
"loss": 1.3555, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.531040553559637e-06, |
|
"loss": 1.4398, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.5047633611994865e-06, |
|
"loss": 1.3965, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.478455333532731e-06, |
|
"loss": 1.3462, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.45211700965816e-06, |
|
"loss": 1.3702, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.425748929295378e-06, |
|
"loss": 1.3787, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.399351632773757e-06, |
|
"loss": 1.3006, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.372925661021363e-06, |
|
"loss": 1.2651, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.346471555553865e-06, |
|
"loss": 1.3216, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.319989858463447e-06, |
|
"loss": 1.4034, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.2934811124076895e-06, |
|
"loss": 1.2659, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.26694586059846e-06, |
|
"loss": 1.3787, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.240384646790773e-06, |
|
"loss": 1.3123, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.213798015271654e-06, |
|
"loss": 1.4925, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.18718651084898e-06, |
|
"loss": 1.3573, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.160550678840318e-06, |
|
"loss": 1.4239, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.13389106506176e-06, |
|
"loss": 1.3931, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.107208215816714e-06, |
|
"loss": 1.2298, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.0805026778847374e-06, |
|
"loss": 1.3927, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.053774998510315e-06, |
|
"loss": 1.3206, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.027025725391647e-06, |
|
"loss": 1.4185, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.000255406669432e-06, |
|
"loss": 1.3403, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.973464590915631e-06, |
|
"loss": 1.3842, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.9466538271222195e-06, |
|
"loss": 1.437, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.9198236646899554e-06, |
|
"loss": 1.3241, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.8929746534170995e-06, |
|
"loss": 1.4092, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.866107343488164e-06, |
|
"loss": 1.3778, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.839222285462635e-06, |
|
"loss": 1.3312, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.812320030263681e-06, |
|
"loss": 1.3811, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.7854011291668854e-06, |
|
"loss": 1.3, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.75846613378892e-06, |
|
"loss": 1.2676, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.7315155960762695e-06, |
|
"loss": 1.3647, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.704550068293903e-06, |
|
"loss": 1.323, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.677570103013964e-06, |
|
"loss": 1.3604, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.650576253104449e-06, |
|
"loss": 1.3911, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.623569071717872e-06, |
|
"loss": 1.3921, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.596549112279932e-06, |
|
"loss": 1.376, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.56951692847818e-06, |
|
"loss": 1.4246, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.54247307425066e-06, |
|
"loss": 1.2893, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.5154181037745655e-06, |
|
"loss": 1.3598, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.488352571454883e-06, |
|
"loss": 1.5025, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.461277031913027e-06, |
|
"loss": 1.4079, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.434192039975484e-06, |
|
"loss": 1.4089, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.407098150662432e-06, |
|
"loss": 1.3471, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.379995919176371e-06, |
|
"loss": 1.398, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.352885900890754e-06, |
|
"loss": 1.2381, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.3257686513385925e-06, |
|
"loss": 1.3302, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.298644726201083e-06, |
|
"loss": 1.3804, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.271514681296216e-06, |
|
"loss": 1.3909, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.2443790725673895e-06, |
|
"loss": 1.4171, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.217238456072008e-06, |
|
"loss": 1.4425, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.190093387970102e-06, |
|
"loss": 1.3923, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.1629444245129175e-06, |
|
"loss": 1.361, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.135792122031526e-06, |
|
"loss": 1.3116, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.10863703692542e-06, |
|
"loss": 1.3277, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.081479725651116e-06, |
|
"loss": 1.4442, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.054320744710744e-06, |
|
"loss": 1.4354, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.027160650640654e-06, |
|
"loss": 1.3192, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6e-06, |
|
"loss": 1.359, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.359147548675537, |
|
"eval_runtime": 99.243, |
|
"eval_samples_per_second": 9.31, |
|
"eval_steps_per_second": 1.169, |
|
"step": 347 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 694, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100.0, |
|
"total_flos": 1.9566296450924544e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|