|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9956709956709957, |
|
"eval_steps": 500, |
|
"global_step": 115, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1e-05, |
|
"loss": 2.0642, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2e-05, |
|
"loss": 2.0704, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3e-05, |
|
"loss": 2.0758, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4e-05, |
|
"loss": 2.0031, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5e-05, |
|
"loss": 2.0466, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6e-05, |
|
"loss": 2.027, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7e-05, |
|
"loss": 2.0737, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8e-05, |
|
"loss": 2.1039, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9e-05, |
|
"loss": 2.0212, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001, |
|
"loss": 2.047, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.999780139628657e-05, |
|
"loss": 2.0435, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.99912057785006e-05, |
|
"loss": 2.0305, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998021372668808e-05, |
|
"loss": 1.9602, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.996482620753565e-05, |
|
"loss": 2.0138, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.994504457428558e-05, |
|
"loss": 2.065, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.992087056661677e-05, |
|
"loss": 2.0051, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.989230631049171e-05, |
|
"loss": 2.008, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.985935431796962e-05, |
|
"loss": 1.9578, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.982201748698542e-05, |
|
"loss": 1.9591, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.978029910109491e-05, |
|
"loss": 1.9495, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.973420282918601e-05, |
|
"loss": 1.9121, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.968373272515612e-05, |
|
"loss": 1.9639, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.962889322755555e-05, |
|
"loss": 1.9431, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.956968915919725e-05, |
|
"loss": 1.9283, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.950612572673255e-05, |
|
"loss": 1.9112, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.943820852019344e-05, |
|
"loss": 1.9942, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.936594351250082e-05, |
|
"loss": 1.9536, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.928933705893924e-05, |
|
"loss": 1.9364, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.920839589659803e-05, |
|
"loss": 1.9485, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.91231271437788e-05, |
|
"loss": 1.9743, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.903353829936943e-05, |
|
"loss": 1.9492, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.893963724218455e-05, |
|
"loss": 1.9612, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.884143223027266e-05, |
|
"loss": 1.8852, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.873893190018995e-05, |
|
"loss": 1.9104, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.863214526624065e-05, |
|
"loss": 1.9762, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.852108171968436e-05, |
|
"loss": 1.9602, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.840575102791013e-05, |
|
"loss": 1.9283, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.828616333357743e-05, |
|
"loss": 1.8863, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.816232915372423e-05, |
|
"loss": 1.9776, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.8034259378842e-05, |
|
"loss": 1.8874, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.790196527191811e-05, |
|
"loss": 1.8779, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.776545846744509e-05, |
|
"loss": 1.9399, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.762475097039767e-05, |
|
"loss": 1.8758, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.747985515517683e-05, |
|
"loss": 1.903, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.733078376452171e-05, |
|
"loss": 1.9438, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.717754990838881e-05, |
|
"loss": 1.885, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.702016706279913e-05, |
|
"loss": 1.9278, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.685864906865303e-05, |
|
"loss": 1.9612, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.669301013051297e-05, |
|
"loss": 1.9352, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.652326481535435e-05, |
|
"loss": 1.9198, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.634942805128433e-05, |
|
"loss": 1.9552, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.617151512622917e-05, |
|
"loss": 1.9767, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.598954168658955e-05, |
|
"loss": 1.9933, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.580352373586467e-05, |
|
"loss": 1.9751, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.561347763324484e-05, |
|
"loss": 1.8376, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.541942009217273e-05, |
|
"loss": 1.9485, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.522136817887353e-05, |
|
"loss": 1.885, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.501933931085416e-05, |
|
"loss": 1.9241, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.481335125537138e-05, |
|
"loss": 1.8931, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.460342212786932e-05, |
|
"loss": 1.9544, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.43895703903864e-05, |
|
"loss": 1.8637, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.417181484993154e-05, |
|
"loss": 1.964, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.395017465683036e-05, |
|
"loss": 1.8605, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.372466930304091e-05, |
|
"loss": 1.9388, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.349531862043952e-05, |
|
"loss": 1.9195, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.32621427790767e-05, |
|
"loss": 1.9328, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.302516228540327e-05, |
|
"loss": 1.8901, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.278439798046697e-05, |
|
"loss": 1.9296, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.253987103807958e-05, |
|
"loss": 1.9282, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.229160296295488e-05, |
|
"loss": 1.9448, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.203961558881731e-05, |
|
"loss": 1.9211, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.178393107648193e-05, |
|
"loss": 1.8658, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.15245719119055e-05, |
|
"loss": 1.9358, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.126156090420888e-05, |
|
"loss": 1.9446, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.099492118367123e-05, |
|
"loss": 1.8403, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.072467619969572e-05, |
|
"loss": 1.9857, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.878, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.017346582226289e-05, |
|
"loss": 1.9484, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.98925489045329e-05, |
|
"loss": 1.8543, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.960812367055646e-05, |
|
"loss": 1.8613, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.93202151338687e-05, |
|
"loss": 1.887, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.902884861434065e-05, |
|
"loss": 1.8897, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.873404973595285e-05, |
|
"loss": 1.93, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.843584442454158e-05, |
|
"loss": 1.951, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.81342589055191e-05, |
|
"loss": 1.9591, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.782931970156707e-05, |
|
"loss": 1.8649, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.752105363030414e-05, |
|
"loss": 1.9752, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.720948780192746e-05, |
|
"loss": 1.9189, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.689464961682852e-05, |
|
"loss": 1.9188, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 8.657656676318346e-05, |
|
"loss": 1.9376, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.625526721451798e-05, |
|
"loss": 1.8662, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.593077922724733e-05, |
|
"loss": 1.8113, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.560313133819125e-05, |
|
"loss": 1.8575, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.527235236206436e-05, |
|
"loss": 1.8991, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.493847138894209e-05, |
|
"loss": 1.8475, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.46015177817023e-05, |
|
"loss": 1.8795, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.426152117344313e-05, |
|
"loss": 1.8596, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.391851146487675e-05, |
|
"loss": 1.9481, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 8.357251882169994e-05, |
|
"loss": 1.9213, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.322357367194109e-05, |
|
"loss": 1.9243, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.28717067032843e-05, |
|
"loss": 1.8764, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.251694886037052e-05, |
|
"loss": 1.9728, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 8.215933134207618e-05, |
|
"loss": 1.884, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 8.179888559876943e-05, |
|
"loss": 1.9589, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.143564332954425e-05, |
|
"loss": 2.017, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 8.106963647943274e-05, |
|
"loss": 1.9672, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 8.070089723659566e-05, |
|
"loss": 1.8711, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.032945802949179e-05, |
|
"loss": 1.9211, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 7.995535152402591e-05, |
|
"loss": 1.9019, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.957861062067614e-05, |
|
"loss": 1.8887, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.919926845160037e-05, |
|
"loss": 1.9923, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.881735837772274e-05, |
|
"loss": 1.8621, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.843291398579946e-05, |
|
"loss": 1.9584, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.804596908546529e-05, |
|
"loss": 1.943, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.765655770625997e-05, |
|
"loss": 1.8953, |
|
"step": 115 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 345, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 115, |
|
"total_flos": 2.6099686427118797e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|