|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9986154911160678, |
|
"eval_steps": 500, |
|
"global_step": 812, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.2714, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.306765580733931e-06, |
|
"loss": 1.2731, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.826061944859854e-06, |
|
"loss": 1.2597, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.613531161467863e-06, |
|
"loss": 1.2539, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1e-05, |
|
"loss": 1.195, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1132827525593786e-05, |
|
"loss": 1.2244, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2090619551221676e-05, |
|
"loss": 1.2474, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2920296742201793e-05, |
|
"loss": 1.228, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3652123889719709e-05, |
|
"loss": 1.2463, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4306765580733935e-05, |
|
"loss": 1.2238, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.4898961024049785e-05, |
|
"loss": 1.1884, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.543959310632772e-05, |
|
"loss": 1.2167, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.5936926411670824e-05, |
|
"loss": 1.2671, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.639738513195561e-05, |
|
"loss": 1.3037, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.6826061944859853e-05, |
|
"loss": 1.2237, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.7227062322935725e-05, |
|
"loss": 1.2199, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.7603744277225883e-05, |
|
"loss": 1.2303, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.7958889470453637e-05, |
|
"loss": 1.2202, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8294828004351506e-05, |
|
"loss": 1.1969, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8613531161467863e-05, |
|
"loss": 1.3012, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.891668149608153e-05, |
|
"loss": 1.233, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9205726604783716e-05, |
|
"loss": 1.2878, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.94819209346638e-05, |
|
"loss": 1.2422, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9746358687061647e-05, |
|
"loss": 1.2239, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2178, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2117, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.997458703939009e-05, |
|
"loss": 1.1493, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994917407878018e-05, |
|
"loss": 1.1968, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.992376111817027e-05, |
|
"loss": 1.212, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9898348157560356e-05, |
|
"loss": 1.1543, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9872935196950447e-05, |
|
"loss": 1.2125, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9847522236340535e-05, |
|
"loss": 1.144, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9822109275730623e-05, |
|
"loss": 1.216, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9796696315120714e-05, |
|
"loss": 1.164, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9771283354510802e-05, |
|
"loss": 1.1887, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.974587039390089e-05, |
|
"loss": 1.2175, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.972045743329098e-05, |
|
"loss": 1.1421, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.969504447268107e-05, |
|
"loss": 1.1392, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.966963151207116e-05, |
|
"loss": 1.2004, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9644218551461248e-05, |
|
"loss": 1.1202, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9618805590851335e-05, |
|
"loss": 1.1558, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9593392630241426e-05, |
|
"loss": 1.1174, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9567979669631514e-05, |
|
"loss": 1.1082, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9542566709021602e-05, |
|
"loss": 1.1064, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9517153748411693e-05, |
|
"loss": 1.1546, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.949174078780178e-05, |
|
"loss": 1.0872, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.946632782719187e-05, |
|
"loss": 1.092, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.944091486658196e-05, |
|
"loss": 1.1259, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9415501905972048e-05, |
|
"loss": 1.1238, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9390088945362135e-05, |
|
"loss": 1.0482, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9364675984752227e-05, |
|
"loss": 1.0974, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9339263024142314e-05, |
|
"loss": 1.044, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9313850063532402e-05, |
|
"loss": 1.0646, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9288437102922493e-05, |
|
"loss": 1.0501, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.926302414231258e-05, |
|
"loss": 1.1105, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.923761118170267e-05, |
|
"loss": 1.0487, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.921219822109276e-05, |
|
"loss": 1.0501, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9186785260482848e-05, |
|
"loss": 1.1042, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9161372299872936e-05, |
|
"loss": 1.0771, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9135959339263027e-05, |
|
"loss": 1.053, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9110546378653115e-05, |
|
"loss": 1.0649, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9085133418043206e-05, |
|
"loss": 1.1182, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9059720457433294e-05, |
|
"loss": 1.0695, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.903430749682338e-05, |
|
"loss": 1.0546, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9008894536213473e-05, |
|
"loss": 1.045, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.898348157560356e-05, |
|
"loss": 1.0576, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8958068614993648e-05, |
|
"loss": 1.0122, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.893265565438374e-05, |
|
"loss": 1.0233, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8907242693773827e-05, |
|
"loss": 0.9565, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8881829733163915e-05, |
|
"loss": 1.0739, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8856416772554006e-05, |
|
"loss": 1.0457, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8831003811944094e-05, |
|
"loss": 1.0778, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.880559085133418e-05, |
|
"loss": 1.0676, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8780177890724273e-05, |
|
"loss": 0.9931, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.875476493011436e-05, |
|
"loss": 1.1367, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8729351969504448e-05, |
|
"loss": 1.033, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.870393900889454e-05, |
|
"loss": 1.0783, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8678526048284627e-05, |
|
"loss": 1.044, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8653113087674715e-05, |
|
"loss": 1.0701, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8627700127064806e-05, |
|
"loss": 1.0607, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8602287166454894e-05, |
|
"loss": 1.0216, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8576874205844982e-05, |
|
"loss": 1.0275, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8551461245235073e-05, |
|
"loss": 0.993, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.852604828462516e-05, |
|
"loss": 1.0136, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8500635324015252e-05, |
|
"loss": 1.0212, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.847522236340534e-05, |
|
"loss": 0.9655, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8449809402795427e-05, |
|
"loss": 0.9201, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.842439644218552e-05, |
|
"loss": 1.0115, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8398983481575606e-05, |
|
"loss": 0.9478, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8373570520965694e-05, |
|
"loss": 1.0191, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8348157560355785e-05, |
|
"loss": 0.9982, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8322744599745873e-05, |
|
"loss": 1.0078, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.829733163913596e-05, |
|
"loss": 0.9897, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8271918678526052e-05, |
|
"loss": 0.9662, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.824650571791614e-05, |
|
"loss": 0.9811, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8221092757306228e-05, |
|
"loss": 1.0212, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.819567979669632e-05, |
|
"loss": 0.988, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8170266836086407e-05, |
|
"loss": 0.9707, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8144853875476494e-05, |
|
"loss": 0.947, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8119440914866586e-05, |
|
"loss": 0.9997, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8094027954256673e-05, |
|
"loss": 1.0062, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.806861499364676e-05, |
|
"loss": 0.9728, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8043202033036852e-05, |
|
"loss": 1.0327, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.801778907242694e-05, |
|
"loss": 0.9417, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7992376111817028e-05, |
|
"loss": 1.0016, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.796696315120712e-05, |
|
"loss": 1.0234, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7941550190597207e-05, |
|
"loss": 0.9502, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7916137229987295e-05, |
|
"loss": 1.0206, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7890724269377386e-05, |
|
"loss": 0.9693, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7865311308767473e-05, |
|
"loss": 0.9469, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.783989834815756e-05, |
|
"loss": 1.0111, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7814485387547652e-05, |
|
"loss": 1.0061, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.778907242693774e-05, |
|
"loss": 0.9767, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7763659466327828e-05, |
|
"loss": 0.9918, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.773824650571792e-05, |
|
"loss": 0.9654, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7712833545108007e-05, |
|
"loss": 0.9538, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7687420584498095e-05, |
|
"loss": 0.95, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7662007623888186e-05, |
|
"loss": 0.962, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7636594663278274e-05, |
|
"loss": 0.921, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.761118170266836e-05, |
|
"loss": 1.039, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7585768742058453e-05, |
|
"loss": 0.9869, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.756035578144854e-05, |
|
"loss": 0.9948, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7534942820838628e-05, |
|
"loss": 0.9721, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.750952986022872e-05, |
|
"loss": 0.9617, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7484116899618807e-05, |
|
"loss": 0.9478, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7458703939008895e-05, |
|
"loss": 0.9601, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7433290978398986e-05, |
|
"loss": 0.9106, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7407878017789074e-05, |
|
"loss": 0.957, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.738246505717916e-05, |
|
"loss": 0.9968, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7357052096569253e-05, |
|
"loss": 0.9375, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.733163913595934e-05, |
|
"loss": 0.9716, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.730622617534943e-05, |
|
"loss": 0.9396, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.728081321473952e-05, |
|
"loss": 1.0079, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7255400254129607e-05, |
|
"loss": 0.9845, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7229987293519695e-05, |
|
"loss": 0.9652, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7204574332909786e-05, |
|
"loss": 0.9005, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7179161372299874e-05, |
|
"loss": 0.9646, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7153748411689962e-05, |
|
"loss": 0.9395, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7128335451080053e-05, |
|
"loss": 0.9821, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.710292249047014e-05, |
|
"loss": 1.0239, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.707750952986023e-05, |
|
"loss": 0.9199, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.705209656925032e-05, |
|
"loss": 0.9691, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7026683608640408e-05, |
|
"loss": 0.96, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7001270648030495e-05, |
|
"loss": 0.9668, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6975857687420586e-05, |
|
"loss": 0.9206, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6950444726810674e-05, |
|
"loss": 0.9471, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6925031766200762e-05, |
|
"loss": 0.8846, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6899618805590853e-05, |
|
"loss": 0.9551, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.687420584498094e-05, |
|
"loss": 0.9718, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.684879288437103e-05, |
|
"loss": 0.9214, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.682337992376112e-05, |
|
"loss": 0.9782, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.6797966963151208e-05, |
|
"loss": 0.9211, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6772554002541296e-05, |
|
"loss": 0.9679, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6747141041931387e-05, |
|
"loss": 1.0061, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6721728081321474e-05, |
|
"loss": 0.9618, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6696315120711562e-05, |
|
"loss": 0.9819, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6670902160101653e-05, |
|
"loss": 0.9368, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.664548919949174e-05, |
|
"loss": 0.9408, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.662007623888183e-05, |
|
"loss": 0.975, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.659466327827192e-05, |
|
"loss": 0.9002, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6569250317662008e-05, |
|
"loss": 0.9365, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6543837357052096e-05, |
|
"loss": 0.9229, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6518424396442187e-05, |
|
"loss": 0.9767, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6493011435832275e-05, |
|
"loss": 0.9028, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6467598475222362e-05, |
|
"loss": 0.9277, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6442185514612454e-05, |
|
"loss": 0.8942, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.641677255400254e-05, |
|
"loss": 0.9188, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.639135959339263e-05, |
|
"loss": 0.922, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.636594663278272e-05, |
|
"loss": 0.9868, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6340533672172808e-05, |
|
"loss": 1.0601, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6315120711562896e-05, |
|
"loss": 0.9662, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6289707750952987e-05, |
|
"loss": 0.8814, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6264294790343075e-05, |
|
"loss": 0.9154, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6238881829733166e-05, |
|
"loss": 0.9105, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6213468869123254e-05, |
|
"loss": 0.9316, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.618805590851334e-05, |
|
"loss": 0.9826, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6162642947903433e-05, |
|
"loss": 0.9463, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.613722998729352e-05, |
|
"loss": 0.9222, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6111817026683608e-05, |
|
"loss": 0.9472, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.60864040660737e-05, |
|
"loss": 0.993, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6060991105463787e-05, |
|
"loss": 0.9811, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6035578144853875e-05, |
|
"loss": 0.9373, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6010165184243966e-05, |
|
"loss": 0.9427, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5984752223634054e-05, |
|
"loss": 0.9631, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5959339263024142e-05, |
|
"loss": 0.9885, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5933926302414233e-05, |
|
"loss": 0.9676, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.590851334180432e-05, |
|
"loss": 0.9811, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.588310038119441e-05, |
|
"loss": 0.9626, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.58576874205845e-05, |
|
"loss": 0.9309, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5832274459974587e-05, |
|
"loss": 0.8741, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5806861499364675e-05, |
|
"loss": 0.948, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5781448538754766e-05, |
|
"loss": 0.9777, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5756035578144854e-05, |
|
"loss": 0.9125, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5730622617534942e-05, |
|
"loss": 0.8633, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5705209656925033e-05, |
|
"loss": 0.9841, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.567979669631512e-05, |
|
"loss": 0.9001, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5654383735705212e-05, |
|
"loss": 0.8847, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.56289707750953e-05, |
|
"loss": 0.9182, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5603557814485388e-05, |
|
"loss": 0.8687, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.557814485387548e-05, |
|
"loss": 0.8971, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5552731893265567e-05, |
|
"loss": 0.8979, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5527318932655654e-05, |
|
"loss": 0.9612, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5501905972045746e-05, |
|
"loss": 0.8594, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5476493011435833e-05, |
|
"loss": 0.9126, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.545108005082592e-05, |
|
"loss": 0.964, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5425667090216012e-05, |
|
"loss": 0.9724, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.54002541296061e-05, |
|
"loss": 0.8997, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5374841168996188e-05, |
|
"loss": 0.9879, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.534942820838628e-05, |
|
"loss": 0.9244, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5324015247776367e-05, |
|
"loss": 0.9082, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5298602287166455e-05, |
|
"loss": 0.9517, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5273189326556546e-05, |
|
"loss": 0.9154, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5247776365946634e-05, |
|
"loss": 0.9438, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5222363405336721e-05, |
|
"loss": 0.9317, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5196950444726812e-05, |
|
"loss": 0.9221, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.51715374841169e-05, |
|
"loss": 0.9335, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5146124523506988e-05, |
|
"loss": 0.9308, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.512071156289708e-05, |
|
"loss": 0.8981, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5095298602287167e-05, |
|
"loss": 0.9704, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5069885641677258e-05, |
|
"loss": 0.8496, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5044472681067346e-05, |
|
"loss": 0.9529, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5019059720457434e-05, |
|
"loss": 0.9458, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4993646759847525e-05, |
|
"loss": 0.9852, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4968233799237613e-05, |
|
"loss": 0.9493, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.49428208386277e-05, |
|
"loss": 0.9727, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4917407878017792e-05, |
|
"loss": 0.8589, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.489199491740788e-05, |
|
"loss": 0.9732, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4866581956797967e-05, |
|
"loss": 0.8912, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4841168996188058e-05, |
|
"loss": 1.0013, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4815756035578146e-05, |
|
"loss": 0.9351, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4790343074968234e-05, |
|
"loss": 0.9378, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4764930114358325e-05, |
|
"loss": 0.9673, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4739517153748413e-05, |
|
"loss": 0.9116, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.47141041931385e-05, |
|
"loss": 0.8993, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4688691232528592e-05, |
|
"loss": 0.8393, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.466327827191868e-05, |
|
"loss": 0.9409, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4637865311308767e-05, |
|
"loss": 0.892, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4612452350698859e-05, |
|
"loss": 0.9137, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4587039390088946e-05, |
|
"loss": 0.9525, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4561626429479036e-05, |
|
"loss": 0.9344, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4536213468869125e-05, |
|
"loss": 0.9165, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4510800508259213e-05, |
|
"loss": 0.9044, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4485387547649303e-05, |
|
"loss": 0.9522, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4459974587039392e-05, |
|
"loss": 0.9193, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.443456162642948e-05, |
|
"loss": 0.9556, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.440914866581957e-05, |
|
"loss": 0.9401, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4383735705209659e-05, |
|
"loss": 0.9868, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4358322744599746e-05, |
|
"loss": 0.8796, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4332909783989836e-05, |
|
"loss": 0.9548, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4307496823379925e-05, |
|
"loss": 0.9783, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4282083862770013e-05, |
|
"loss": 0.9485, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4256670902160103e-05, |
|
"loss": 0.937, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4231257941550192e-05, |
|
"loss": 0.9146, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.420584498094028e-05, |
|
"loss": 0.9922, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.418043202033037e-05, |
|
"loss": 0.9094, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4155019059720459e-05, |
|
"loss": 0.9522, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4129606099110547e-05, |
|
"loss": 0.9145, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4104193138500636e-05, |
|
"loss": 0.9051, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4078780177890726e-05, |
|
"loss": 0.9556, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4053367217280813e-05, |
|
"loss": 0.9747, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4027954256670903e-05, |
|
"loss": 0.9377, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4002541296060992e-05, |
|
"loss": 0.927, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3977128335451082e-05, |
|
"loss": 0.8943, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.395171537484117e-05, |
|
"loss": 0.9463, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3926302414231259e-05, |
|
"loss": 0.869, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3900889453621349e-05, |
|
"loss": 0.9611, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3875476493011436e-05, |
|
"loss": 0.934, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3850063532401526e-05, |
|
"loss": 0.9094, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3824650571791615e-05, |
|
"loss": 0.9084, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3799237611181703e-05, |
|
"loss": 0.9296, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3773824650571793e-05, |
|
"loss": 0.9067, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3748411689961882e-05, |
|
"loss": 0.872, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.372299872935197e-05, |
|
"loss": 0.9226, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.369758576874206e-05, |
|
"loss": 0.8842, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3672172808132149e-05, |
|
"loss": 0.9245, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3646759847522237e-05, |
|
"loss": 0.9446, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3621346886912326e-05, |
|
"loss": 1.0056, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3595933926302415e-05, |
|
"loss": 0.8877, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3570520965692503e-05, |
|
"loss": 0.9689, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3545108005082593e-05, |
|
"loss": 0.9429, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3519695044472682e-05, |
|
"loss": 0.901, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.349428208386277e-05, |
|
"loss": 0.9467, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.346886912325286e-05, |
|
"loss": 0.8649, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3443456162642949e-05, |
|
"loss": 0.8985, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3418043202033037e-05, |
|
"loss": 0.8989, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3392630241423128e-05, |
|
"loss": 0.9367, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3367217280813216e-05, |
|
"loss": 0.9243, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3341804320203303e-05, |
|
"loss": 0.8774, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3316391359593395e-05, |
|
"loss": 0.933, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3290978398983482e-05, |
|
"loss": 0.9223, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.326556543837357e-05, |
|
"loss": 0.9663, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3240152477763661e-05, |
|
"loss": 0.8719, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3214739517153749e-05, |
|
"loss": 0.9156, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3189326556543837e-05, |
|
"loss": 0.9526, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3163913595933928e-05, |
|
"loss": 0.9774, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3138500635324016e-05, |
|
"loss": 0.9079, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3113087674714104e-05, |
|
"loss": 0.9008, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3087674714104195e-05, |
|
"loss": 0.8739, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3062261753494283e-05, |
|
"loss": 0.9269, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.303684879288437e-05, |
|
"loss": 0.8676, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3011435832274462e-05, |
|
"loss": 0.9807, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.298602287166455e-05, |
|
"loss": 0.9186, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2960609911054637e-05, |
|
"loss": 0.9592, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2935196950444728e-05, |
|
"loss": 0.9295, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2909783989834816e-05, |
|
"loss": 0.9173, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2884371029224904e-05, |
|
"loss": 0.8829, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2858958068614995e-05, |
|
"loss": 0.9033, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2833545108005083e-05, |
|
"loss": 0.959, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2808132147395174e-05, |
|
"loss": 0.9271, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2782719186785262e-05, |
|
"loss": 0.9262, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.275730622617535e-05, |
|
"loss": 0.8733, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.273189326556544e-05, |
|
"loss": 0.9586, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2706480304955528e-05, |
|
"loss": 0.916, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2681067344345616e-05, |
|
"loss": 0.9345, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2655654383735707e-05, |
|
"loss": 0.8661, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2630241423125795e-05, |
|
"loss": 0.9207, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2604828462515883e-05, |
|
"loss": 0.9101, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2579415501905974e-05, |
|
"loss": 0.9176, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2554002541296062e-05, |
|
"loss": 0.9579, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.252858958068615e-05, |
|
"loss": 0.8747, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2503176620076241e-05, |
|
"loss": 0.9396, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2477763659466329e-05, |
|
"loss": 0.9039, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2452350698856416e-05, |
|
"loss": 0.9159, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2426937738246508e-05, |
|
"loss": 0.8512, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2401524777636595e-05, |
|
"loss": 0.9494, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2376111817026683e-05, |
|
"loss": 0.9306, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2350698856416774e-05, |
|
"loss": 0.9746, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2325285895806862e-05, |
|
"loss": 0.9767, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.229987293519695e-05, |
|
"loss": 0.932, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2274459974587041e-05, |
|
"loss": 0.8571, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2249047013977129e-05, |
|
"loss": 0.8923, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.222363405336722e-05, |
|
"loss": 0.9398, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2198221092757308e-05, |
|
"loss": 0.8615, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2172808132147396e-05, |
|
"loss": 0.9352, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2147395171537487e-05, |
|
"loss": 0.9105, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2121982210927575e-05, |
|
"loss": 0.9337, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2096569250317662e-05, |
|
"loss": 0.9261, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2071156289707754e-05, |
|
"loss": 0.9338, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2045743329097841e-05, |
|
"loss": 0.9012, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2020330368487929e-05, |
|
"loss": 0.8984, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.199491740787802e-05, |
|
"loss": 0.9349, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1969504447268108e-05, |
|
"loss": 0.9448, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1944091486658196e-05, |
|
"loss": 0.8743, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1918678526048287e-05, |
|
"loss": 0.8607, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1893265565438375e-05, |
|
"loss": 0.8932, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1867852604828463e-05, |
|
"loss": 0.9856, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1842439644218554e-05, |
|
"loss": 0.9591, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1817026683608641e-05, |
|
"loss": 0.9081, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.179161372299873e-05, |
|
"loss": 0.9478, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.176620076238882e-05, |
|
"loss": 0.9173, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1740787801778908e-05, |
|
"loss": 0.9244, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1715374841168996e-05, |
|
"loss": 0.9295, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1689961880559087e-05, |
|
"loss": 0.8945, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1664548919949175e-05, |
|
"loss": 0.9214, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1639135959339264e-05, |
|
"loss": 0.9372, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1613722998729354e-05, |
|
"loss": 0.8794, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1588310038119442e-05, |
|
"loss": 0.9333, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1562897077509531e-05, |
|
"loss": 0.9037, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.153748411689962e-05, |
|
"loss": 0.9029, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1512071156289708e-05, |
|
"loss": 0.9323, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1486658195679798e-05, |
|
"loss": 0.9496, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1461245235069887e-05, |
|
"loss": 0.9513, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1435832274459975e-05, |
|
"loss": 0.9046, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1410419313850065e-05, |
|
"loss": 0.8966, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1385006353240154e-05, |
|
"loss": 0.875, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1359593392630242e-05, |
|
"loss": 0.9163, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1334180432020331e-05, |
|
"loss": 0.8779, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.130876747141042e-05, |
|
"loss": 0.8819, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1283354510800509e-05, |
|
"loss": 0.9248, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1257941550190598e-05, |
|
"loss": 0.8654, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1232528589580688e-05, |
|
"loss": 0.9177, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1207115628970775e-05, |
|
"loss": 0.9775, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1181702668360865e-05, |
|
"loss": 0.9444, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1156289707750954e-05, |
|
"loss": 0.9062, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1130876747141044e-05, |
|
"loss": 0.9265, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1105463786531132e-05, |
|
"loss": 0.9046, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1080050825921221e-05, |
|
"loss": 0.9663, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.105463786531131e-05, |
|
"loss": 0.9418, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1029224904701398e-05, |
|
"loss": 0.9097, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1003811944091488e-05, |
|
"loss": 0.9414, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0978398983481577e-05, |
|
"loss": 1.0006, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0952986022871665e-05, |
|
"loss": 0.8908, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0927573062261754e-05, |
|
"loss": 0.897, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0902160101651844e-05, |
|
"loss": 0.9224, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0876747141041932e-05, |
|
"loss": 0.8534, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0851334180432021e-05, |
|
"loss": 0.9053, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.082592121982211e-05, |
|
"loss": 0.8869, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0800508259212198e-05, |
|
"loss": 0.9186, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0775095298602288e-05, |
|
"loss": 0.9, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0749682337992377e-05, |
|
"loss": 0.925, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0724269377382465e-05, |
|
"loss": 0.9087, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0698856416772555e-05, |
|
"loss": 0.893, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0673443456162644e-05, |
|
"loss": 0.9098, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0648030495552732e-05, |
|
"loss": 0.8968, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0622617534942821e-05, |
|
"loss": 0.935, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0597204574332911e-05, |
|
"loss": 0.9474, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0571791613722999e-05, |
|
"loss": 0.8925, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.054637865311309e-05, |
|
"loss": 0.8647, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0520965692503178e-05, |
|
"loss": 0.956, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0495552731893265e-05, |
|
"loss": 0.8782, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0470139771283357e-05, |
|
"loss": 0.9174, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0444726810673444e-05, |
|
"loss": 0.9159, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0419313850063532e-05, |
|
"loss": 0.9495, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0393900889453623e-05, |
|
"loss": 0.8777, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0368487928843711e-05, |
|
"loss": 0.9107, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0343074968233799e-05, |
|
"loss": 0.8912, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.031766200762389e-05, |
|
"loss": 0.9073, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0292249047013978e-05, |
|
"loss": 0.8863, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0266836086404066e-05, |
|
"loss": 0.9456, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0241423125794157e-05, |
|
"loss": 0.898, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0216010165184245e-05, |
|
"loss": 0.9213, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0190597204574332e-05, |
|
"loss": 0.9026, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0165184243964423e-05, |
|
"loss": 0.8689, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0139771283354511e-05, |
|
"loss": 0.9149, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0114358322744599e-05, |
|
"loss": 0.9501, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.008894536213469e-05, |
|
"loss": 0.9594, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0063532401524778e-05, |
|
"loss": 0.9156, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0038119440914866e-05, |
|
"loss": 0.9151, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0012706480304957e-05, |
|
"loss": 0.8682, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.987293519695045e-06, |
|
"loss": 0.8766, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.961880559085134e-06, |
|
"loss": 0.8751, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.936467598475224e-06, |
|
"loss": 0.8936, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.911054637865311e-06, |
|
"loss": 0.866, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.885641677255401e-06, |
|
"loss": 0.9239, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.86022871664549e-06, |
|
"loss": 0.8912, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.83481575603558e-06, |
|
"loss": 0.9041, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.809402795425668e-06, |
|
"loss": 0.8626, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.783989834815757e-06, |
|
"loss": 0.8652, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.758576874205847e-06, |
|
"loss": 0.8612, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.733163913595934e-06, |
|
"loss": 0.9531, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.707750952986024e-06, |
|
"loss": 0.9018, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.682337992376113e-06, |
|
"loss": 0.8632, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.656925031766201e-06, |
|
"loss": 0.9136, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.63151207115629e-06, |
|
"loss": 0.9225, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.60609911054638e-06, |
|
"loss": 0.9339, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.580686149936468e-06, |
|
"loss": 0.9091, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.555273189326557e-06, |
|
"loss": 0.8808, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.529860228716647e-06, |
|
"loss": 0.8709, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.504447268106736e-06, |
|
"loss": 0.9267, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.479034307496824e-06, |
|
"loss": 0.9194, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.453621346886914e-06, |
|
"loss": 0.9038, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.428208386277003e-06, |
|
"loss": 0.9462, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.40279542566709e-06, |
|
"loss": 0.9147, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.37738246505718e-06, |
|
"loss": 0.9617, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.35196950444727e-06, |
|
"loss": 0.8755, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.326556543837358e-06, |
|
"loss": 0.925, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.301143583227447e-06, |
|
"loss": 0.925, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.275730622617536e-06, |
|
"loss": 0.954, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.250317662007626e-06, |
|
"loss": 0.9654, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.224904701397714e-06, |
|
"loss": 0.8622, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.199491740787803e-06, |
|
"loss": 0.8318, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.174078780177893e-06, |
|
"loss": 0.8957, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.14866581956798e-06, |
|
"loss": 0.8662, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.12325285895807e-06, |
|
"loss": 0.9615, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.09783989834816e-06, |
|
"loss": 0.8987, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.072426937738247e-06, |
|
"loss": 0.9358, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.047013977128337e-06, |
|
"loss": 0.8759, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.021601016518426e-06, |
|
"loss": 0.9103, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.996188055908514e-06, |
|
"loss": 0.9523, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.970775095298603e-06, |
|
"loss": 0.9293, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.945362134688693e-06, |
|
"loss": 0.9543, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.91994917407878e-06, |
|
"loss": 0.8727, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.89453621346887e-06, |
|
"loss": 0.9201, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.86912325285896e-06, |
|
"loss": 0.8523, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.843710292249047e-06, |
|
"loss": 0.8547, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.818297331639137e-06, |
|
"loss": 0.9097, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.792884371029226e-06, |
|
"loss": 0.8848, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.767471410419314e-06, |
|
"loss": 0.8989, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.742058449809404e-06, |
|
"loss": 0.9416, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.716645489199493e-06, |
|
"loss": 0.9277, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.69123252858958e-06, |
|
"loss": 0.8788, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.66581956797967e-06, |
|
"loss": 0.9104, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.64040660736976e-06, |
|
"loss": 0.8802, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.614993646759848e-06, |
|
"loss": 0.9053, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.589580686149937e-06, |
|
"loss": 0.9369, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.564167725540027e-06, |
|
"loss": 0.9132, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.538754764930114e-06, |
|
"loss": 0.8774, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.513341804320204e-06, |
|
"loss": 0.8787, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.487928843710293e-06, |
|
"loss": 0.8996, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.462515883100381e-06, |
|
"loss": 0.885, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.43710292249047e-06, |
|
"loss": 0.9656, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.41168996188056e-06, |
|
"loss": 0.9367, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.386277001270648e-06, |
|
"loss": 0.9401, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.360864040660737e-06, |
|
"loss": 0.8978, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.335451080050827e-06, |
|
"loss": 0.8811, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.310038119440914e-06, |
|
"loss": 0.9526, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.284625158831004e-06, |
|
"loss": 0.8767, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.259212198221093e-06, |
|
"loss": 0.9223, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.233799237611181e-06, |
|
"loss": 0.8753, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.20838627700127e-06, |
|
"loss": 0.9929, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.18297331639136e-06, |
|
"loss": 0.8708, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.157560355781448e-06, |
|
"loss": 0.9428, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.132147395171537e-06, |
|
"loss": 0.9729, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.106734434561627e-06, |
|
"loss": 0.9213, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.081321473951716e-06, |
|
"loss": 0.8691, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.055908513341804e-06, |
|
"loss": 0.9184, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.030495552731894e-06, |
|
"loss": 0.8902, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 8.005082592121983e-06, |
|
"loss": 0.9462, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.979669631512071e-06, |
|
"loss": 0.9047, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.95425667090216e-06, |
|
"loss": 0.9615, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.92884371029225e-06, |
|
"loss": 0.8991, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.903430749682338e-06, |
|
"loss": 0.9686, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.878017789072427e-06, |
|
"loss": 0.8724, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.852604828462517e-06, |
|
"loss": 0.8979, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.827191867852606e-06, |
|
"loss": 0.8641, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.801778907242694e-06, |
|
"loss": 0.9039, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.776365946632783e-06, |
|
"loss": 0.8821, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.750952986022873e-06, |
|
"loss": 0.962, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.72554002541296e-06, |
|
"loss": 0.9361, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.70012706480305e-06, |
|
"loss": 0.94, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.67471410419314e-06, |
|
"loss": 0.8967, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.649301143583227e-06, |
|
"loss": 0.917, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.623888182973317e-06, |
|
"loss": 0.8491, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.598475222363406e-06, |
|
"loss": 0.9025, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.573062261753494e-06, |
|
"loss": 0.9148, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.5476493011435835e-06, |
|
"loss": 0.9138, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.522236340533673e-06, |
|
"loss": 0.9163, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.4968233799237624e-06, |
|
"loss": 0.8739, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.47141041931385e-06, |
|
"loss": 0.981, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.44599745870394e-06, |
|
"loss": 0.8234, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.420584498094029e-06, |
|
"loss": 0.9664, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.395171537484117e-06, |
|
"loss": 0.9184, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.369758576874206e-06, |
|
"loss": 0.9116, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.344345616264296e-06, |
|
"loss": 0.9227, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.318932655654384e-06, |
|
"loss": 0.903, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.293519695044473e-06, |
|
"loss": 0.9024, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.268106734434563e-06, |
|
"loss": 0.9481, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.242693773824651e-06, |
|
"loss": 0.8902, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.21728081321474e-06, |
|
"loss": 0.8884, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.191867852604829e-06, |
|
"loss": 0.8949, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.166454891994918e-06, |
|
"loss": 0.925, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.141041931385007e-06, |
|
"loss": 0.907, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.115628970775096e-06, |
|
"loss": 0.8819, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.090216010165185e-06, |
|
"loss": 0.9301, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.064803049555273e-06, |
|
"loss": 0.9156, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.039390088945363e-06, |
|
"loss": 0.9251, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.0139771283354514e-06, |
|
"loss": 0.8643, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.988564167725541e-06, |
|
"loss": 0.8884, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.9631512071156295e-06, |
|
"loss": 0.9649, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.937738246505718e-06, |
|
"loss": 0.8903, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.912325285895808e-06, |
|
"loss": 0.8832, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.886912325285896e-06, |
|
"loss": 0.9229, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.861499364675985e-06, |
|
"loss": 0.8849, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.836086404066074e-06, |
|
"loss": 0.9353, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.810673443456163e-06, |
|
"loss": 0.8864, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.785260482846252e-06, |
|
"loss": 0.8743, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.759847522236341e-06, |
|
"loss": 0.8957, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.73443456162643e-06, |
|
"loss": 0.929, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.709021601016518e-06, |
|
"loss": 0.9177, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.683608640406608e-06, |
|
"loss": 0.9294, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.658195679796697e-06, |
|
"loss": 0.9406, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.632782719186785e-06, |
|
"loss": 0.8592, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.6073697585768746e-06, |
|
"loss": 0.9485, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.581956797966964e-06, |
|
"loss": 0.9701, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.556543837357052e-06, |
|
"loss": 0.8681, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.531130876747141e-06, |
|
"loss": 0.8554, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.505717916137231e-06, |
|
"loss": 0.9324, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.4803049555273186e-06, |
|
"loss": 0.8902, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.454891994917408e-06, |
|
"loss": 0.9581, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.4294790343074975e-06, |
|
"loss": 0.9389, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.404066073697587e-06, |
|
"loss": 0.8816, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.378653113087675e-06, |
|
"loss": 0.9035, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.353240152477764e-06, |
|
"loss": 0.9314, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.327827191867854e-06, |
|
"loss": 0.8932, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.3024142312579415e-06, |
|
"loss": 0.9151, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.277001270648031e-06, |
|
"loss": 0.9135, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.2515883100381204e-06, |
|
"loss": 0.9079, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.226175349428208e-06, |
|
"loss": 0.9491, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.200762388818298e-06, |
|
"loss": 0.9074, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.175349428208387e-06, |
|
"loss": 0.9579, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.149936467598475e-06, |
|
"loss": 0.8935, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.1245235069885644e-06, |
|
"loss": 0.912, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.099110546378654e-06, |
|
"loss": 0.8942, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.073697585768743e-06, |
|
"loss": 0.8501, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6.048284625158831e-06, |
|
"loss": 0.9035, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6.022871664548921e-06, |
|
"loss": 0.9279, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.99745870393901e-06, |
|
"loss": 0.9095, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.972045743329098e-06, |
|
"loss": 0.9216, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.946632782719187e-06, |
|
"loss": 0.8513, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.921219822109277e-06, |
|
"loss": 0.9466, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.895806861499365e-06, |
|
"loss": 0.9125, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.870393900889454e-06, |
|
"loss": 0.8851, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.844980940279544e-06, |
|
"loss": 0.8598, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.819567979669632e-06, |
|
"loss": 0.8558, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.794155019059721e-06, |
|
"loss": 0.8894, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.76874205844981e-06, |
|
"loss": 0.9182, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.743329097839899e-06, |
|
"loss": 0.9183, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.7179161372299876e-06, |
|
"loss": 0.8607, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.692503176620077e-06, |
|
"loss": 0.8763, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.667090216010166e-06, |
|
"loss": 0.9085, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.641677255400254e-06, |
|
"loss": 0.915, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.616264294790344e-06, |
|
"loss": 0.9223, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.590851334180432e-06, |
|
"loss": 0.8925, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.565438373570522e-06, |
|
"loss": 0.8922, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.5400254129606105e-06, |
|
"loss": 0.9387, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.514612452350699e-06, |
|
"loss": 0.9709, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.489199491740789e-06, |
|
"loss": 0.8944, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.463786531130877e-06, |
|
"loss": 0.8472, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.438373570520966e-06, |
|
"loss": 0.8898, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.412960609911055e-06, |
|
"loss": 0.8997, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.387547649301144e-06, |
|
"loss": 0.8993, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.362134688691233e-06, |
|
"loss": 0.8603, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.336721728081322e-06, |
|
"loss": 0.898, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.311308767471411e-06, |
|
"loss": 0.9143, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.285895806861499e-06, |
|
"loss": 0.8645, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.260482846251589e-06, |
|
"loss": 0.8627, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.235069885641678e-06, |
|
"loss": 0.8801, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.209656925031766e-06, |
|
"loss": 0.9951, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.1842439644218555e-06, |
|
"loss": 0.8789, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.158831003811945e-06, |
|
"loss": 0.9284, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.133418043202033e-06, |
|
"loss": 0.933, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.108005082592122e-06, |
|
"loss": 0.8823, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.082592121982212e-06, |
|
"loss": 0.8636, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.0571791613722995e-06, |
|
"loss": 0.8895, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.031766200762389e-06, |
|
"loss": 0.9017, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 5.0063532401524785e-06, |
|
"loss": 0.916, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.980940279542567e-06, |
|
"loss": 0.9312, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.955527318932656e-06, |
|
"loss": 0.8643, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.930114358322745e-06, |
|
"loss": 0.8955, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.904701397712834e-06, |
|
"loss": 0.9124, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.879288437102923e-06, |
|
"loss": 0.8894, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.853875476493012e-06, |
|
"loss": 0.8905, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.8284625158831006e-06, |
|
"loss": 0.9247, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.80304955527319e-06, |
|
"loss": 0.8916, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.777636594663279e-06, |
|
"loss": 0.8566, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.752223634053368e-06, |
|
"loss": 0.9239, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.726810673443457e-06, |
|
"loss": 0.8816, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.701397712833545e-06, |
|
"loss": 0.8868, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.675984752223635e-06, |
|
"loss": 0.9185, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.6505717916137235e-06, |
|
"loss": 0.8751, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.625158831003813e-06, |
|
"loss": 0.8937, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.599745870393902e-06, |
|
"loss": 0.9348, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.57433290978399e-06, |
|
"loss": 0.9084, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.54891994917408e-06, |
|
"loss": 0.8584, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.523506988564168e-06, |
|
"loss": 0.8494, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.498094027954257e-06, |
|
"loss": 0.9062, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.4726810673443464e-06, |
|
"loss": 0.8797, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.447268106734435e-06, |
|
"loss": 0.9349, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.421855146124524e-06, |
|
"loss": 0.8694, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.396442185514613e-06, |
|
"loss": 0.8947, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.371029224904702e-06, |
|
"loss": 0.9076, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.34561626429479e-06, |
|
"loss": 0.898, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.32020330368488e-06, |
|
"loss": 0.8875, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2947903430749685e-06, |
|
"loss": 0.9392, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.269377382465057e-06, |
|
"loss": 0.9018, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.243964421855147e-06, |
|
"loss": 0.9598, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.218551461245235e-06, |
|
"loss": 0.8319, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.193138500635324e-06, |
|
"loss": 0.9278, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.167725540025413e-06, |
|
"loss": 0.8702, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.142312579415502e-06, |
|
"loss": 0.8546, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.116899618805591e-06, |
|
"loss": 0.8479, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.09148665819568e-06, |
|
"loss": 0.9314, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.066073697585769e-06, |
|
"loss": 0.899, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.040660736975858e-06, |
|
"loss": 0.8866, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.015247776365947e-06, |
|
"loss": 0.8611, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.9898348157560354e-06, |
|
"loss": 0.9076, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.964421855146125e-06, |
|
"loss": 0.9072, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.9390088945362135e-06, |
|
"loss": 0.9632, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.913595933926303e-06, |
|
"loss": 0.8682, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.888182973316392e-06, |
|
"loss": 0.9311, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.86277001270648e-06, |
|
"loss": 0.9267, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.83735705209657e-06, |
|
"loss": 0.903, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.8119440914866584e-06, |
|
"loss": 0.9127, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.786531130876747e-06, |
|
"loss": 0.9416, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.7611181702668365e-06, |
|
"loss": 0.8622, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.735705209656925e-06, |
|
"loss": 0.8965, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.7102922490470146e-06, |
|
"loss": 0.9384, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.684879288437103e-06, |
|
"loss": 0.8916, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.659466327827192e-06, |
|
"loss": 0.8651, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.6340533672172813e-06, |
|
"loss": 0.9288, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.60864040660737e-06, |
|
"loss": 0.8598, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.583227445997459e-06, |
|
"loss": 0.8746, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.557814485387548e-06, |
|
"loss": 0.9762, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.5324015247776367e-06, |
|
"loss": 0.8691, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.5069885641677257e-06, |
|
"loss": 0.8146, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.4815756035578148e-06, |
|
"loss": 0.8794, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.456162642947904e-06, |
|
"loss": 0.9462, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.4307496823379925e-06, |
|
"loss": 0.9538, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.4053367217280815e-06, |
|
"loss": 0.8914, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.3799237611181706e-06, |
|
"loss": 0.9203, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.354510800508259e-06, |
|
"loss": 0.8648, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.3290978398983487e-06, |
|
"loss": 0.8671, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.3036848792884373e-06, |
|
"loss": 0.9238, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.278271918678526e-06, |
|
"loss": 0.861, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.2528589580686154e-06, |
|
"loss": 0.8817, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.227445997458704e-06, |
|
"loss": 0.8849, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.2020330368487935e-06, |
|
"loss": 0.9165, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.176620076238882e-06, |
|
"loss": 0.8903, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.1512071156289707e-06, |
|
"loss": 0.8739, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.1257941550190602e-06, |
|
"loss": 0.9493, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.100381194409149e-06, |
|
"loss": 0.8879, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.0749682337992375e-06, |
|
"loss": 0.9267, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.049555273189327e-06, |
|
"loss": 0.8913, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.0241423125794156e-06, |
|
"loss": 0.9016, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.998729351969505e-06, |
|
"loss": 0.8867, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.9733163913595937e-06, |
|
"loss": 0.891, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.9479034307496823e-06, |
|
"loss": 0.9081, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.922490470139772e-06, |
|
"loss": 0.9216, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.8970775095298604e-06, |
|
"loss": 0.8596, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.8716645489199495e-06, |
|
"loss": 0.9209, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.8462515883100385e-06, |
|
"loss": 0.9025, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.820838627700127e-06, |
|
"loss": 0.9032, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.795425667090216e-06, |
|
"loss": 0.9328, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.7700127064803053e-06, |
|
"loss": 0.9124, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.7445997458703943e-06, |
|
"loss": 0.9091, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.719186785260483e-06, |
|
"loss": 0.9216, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.693773824650572e-06, |
|
"loss": 0.8862, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.668360864040661e-06, |
|
"loss": 0.8991, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.6429479034307497e-06, |
|
"loss": 0.8646, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.617534942820839e-06, |
|
"loss": 0.8904, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.5921219822109278e-06, |
|
"loss": 0.9024, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.5667090216010164e-06, |
|
"loss": 0.8931, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.541296060991106e-06, |
|
"loss": 0.9145, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.5158831003811945e-06, |
|
"loss": 0.9098, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.4904701397712835e-06, |
|
"loss": 0.9412, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.4650571791613726e-06, |
|
"loss": 0.9008, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.4396442185514616e-06, |
|
"loss": 0.9302, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.4142312579415503e-06, |
|
"loss": 0.9403, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.3888182973316393e-06, |
|
"loss": 0.9213, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.3634053367217284e-06, |
|
"loss": 0.9323, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.3379923761118174e-06, |
|
"loss": 0.8698, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.3125794155019065e-06, |
|
"loss": 0.9137, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.287166454891995e-06, |
|
"loss": 0.8676, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.261753494282084e-06, |
|
"loss": 0.9802, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.2363405336721732e-06, |
|
"loss": 0.9065, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.210927573062262e-06, |
|
"loss": 0.9105, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.185514612452351e-06, |
|
"loss": 0.9486, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.16010165184244e-06, |
|
"loss": 0.855, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.1346886912325286e-06, |
|
"loss": 0.8991, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.1092757306226176e-06, |
|
"loss": 0.9375, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0838627700127067e-06, |
|
"loss": 0.8984, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0584498094027953e-06, |
|
"loss": 0.8584, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0330368487928844e-06, |
|
"loss": 0.9366, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.0076238881829734e-06, |
|
"loss": 0.8602, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9822109275730625e-06, |
|
"loss": 0.8626, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9567979669631515e-06, |
|
"loss": 0.8412, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.93138500635324e-06, |
|
"loss": 0.89, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9059720457433292e-06, |
|
"loss": 0.922, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.8805590851334182e-06, |
|
"loss": 0.9653, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.8551461245235073e-06, |
|
"loss": 0.8704, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.829733163913596e-06, |
|
"loss": 0.9589, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.804320203303685e-06, |
|
"loss": 0.9157, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.778907242693774e-06, |
|
"loss": 0.9152, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.7534942820838629e-06, |
|
"loss": 0.9113, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.728081321473952e-06, |
|
"loss": 0.9362, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.7026683608640408e-06, |
|
"loss": 0.9425, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.6772554002541296e-06, |
|
"loss": 0.9335, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.6518424396442186e-06, |
|
"loss": 0.8774, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.6264294790343077e-06, |
|
"loss": 0.9053, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.6010165184243967e-06, |
|
"loss": 0.8887, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.5756035578144854e-06, |
|
"loss": 0.9499, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.5501905972045744e-06, |
|
"loss": 0.8797, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.5247776365946635e-06, |
|
"loss": 0.8921, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.4993646759847525e-06, |
|
"loss": 0.8819, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.4739517153748412e-06, |
|
"loss": 0.9942, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.4485387547649302e-06, |
|
"loss": 0.9063, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.4231257941550193e-06, |
|
"loss": 0.954, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.397712833545108e-06, |
|
"loss": 0.9585, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.3722998729351972e-06, |
|
"loss": 0.8951, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.346886912325286e-06, |
|
"loss": 0.8915, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.3214739517153748e-06, |
|
"loss": 0.8698, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.2960609911054639e-06, |
|
"loss": 0.9078, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.270648030495553e-06, |
|
"loss": 0.9327, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.2452350698856418e-06, |
|
"loss": 0.8728, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.2198221092757308e-06, |
|
"loss": 0.8542, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.1944091486658197e-06, |
|
"loss": 0.9182, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.1689961880559087e-06, |
|
"loss": 0.8977, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.1435832274459976e-06, |
|
"loss": 0.9195, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.1181702668360866e-06, |
|
"loss": 0.865, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.0927573062261754e-06, |
|
"loss": 0.8574, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.0673443456162643e-06, |
|
"loss": 0.8613, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.0419313850063533e-06, |
|
"loss": 0.9322, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.0165184243964422e-06, |
|
"loss": 0.8892, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.911054637865312e-07, |
|
"loss": 0.9579, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.6569250317662e-07, |
|
"loss": 0.8679, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.402795425667091e-07, |
|
"loss": 0.918, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.14866581956798e-07, |
|
"loss": 0.9623, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.89453621346887e-07, |
|
"loss": 0.8996, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.64040660736976e-07, |
|
"loss": 0.8026, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.386277001270648e-07, |
|
"loss": 0.9083, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.132147395171538e-07, |
|
"loss": 0.8908, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.878017789072427e-07, |
|
"loss": 0.8784, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.623888182973317e-07, |
|
"loss": 0.9214, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.369758576874206e-07, |
|
"loss": 0.9088, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.115628970775096e-07, |
|
"loss": 0.9421, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.861499364675986e-07, |
|
"loss": 0.9967, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.607369758576874e-07, |
|
"loss": 0.864, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.353240152477765e-07, |
|
"loss": 0.8443, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.099110546378654e-07, |
|
"loss": 0.9093, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.844980940279544e-07, |
|
"loss": 0.9415, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.590851334180433e-07, |
|
"loss": 0.893, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.336721728081321e-07, |
|
"loss": 0.88, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.082592121982211e-07, |
|
"loss": 0.9082, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.8284625158831e-07, |
|
"loss": 0.8675, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.57433290978399e-07, |
|
"loss": 0.913, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.32020330368488e-07, |
|
"loss": 0.9341, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.066073697585769e-07, |
|
"loss": 0.9193, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.8119440914866587e-07, |
|
"loss": 0.8515, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.557814485387548e-07, |
|
"loss": 0.8694, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.303684879288437e-07, |
|
"loss": 0.8902, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.049555273189327e-07, |
|
"loss": 0.8532, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.7954256670902165e-07, |
|
"loss": 0.9143, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.5412960609911054e-07, |
|
"loss": 0.9194, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.287166454891995e-07, |
|
"loss": 0.8586, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.0330368487928846e-07, |
|
"loss": 0.8637, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.778907242693774e-07, |
|
"loss": 0.9221, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.5247776365946635e-07, |
|
"loss": 0.8892, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.2706480304955527e-07, |
|
"loss": 0.8547, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0165184243964423e-07, |
|
"loss": 0.8664, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 7.623888182973318e-08, |
|
"loss": 0.9031, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.0825921219822115e-08, |
|
"loss": 0.9136, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.5412960609911058e-08, |
|
"loss": 0.8778, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 812, |
|
"total_flos": 2.2403489925190124e+18, |
|
"train_loss": 0.9405938908090732, |
|
"train_runtime": 9197.6465, |
|
"train_samples_per_second": 11.308, |
|
"train_steps_per_second": 0.088 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 812, |
|
"num_train_epochs": 2, |
|
"save_steps": 4000, |
|
"total_flos": 2.2403489925190124e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|