|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 0.8547008547008547,
|
|
"eval_steps": 500,
|
|
"global_step": 100,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.008547008547008548,
|
|
"grad_norm": 45.01607894897461,
|
|
"learning_rate": 5e-06,
|
|
"loss": 1.9033,
|
|
"step": 1
|
|
},
|
|
{
|
|
"epoch": 0.017094017094017096,
|
|
"grad_norm": 28.609533309936523,
|
|
"learning_rate": 1e-05,
|
|
"loss": 1.8163,
|
|
"step": 2
|
|
},
|
|
{
|
|
"epoch": 0.02564102564102564,
|
|
"grad_norm": 58.36526870727539,
|
|
"learning_rate": 9.89795918367347e-06,
|
|
"loss": 2.5933,
|
|
"step": 3
|
|
},
|
|
{
|
|
"epoch": 0.03418803418803419,
|
|
"grad_norm": 39.587982177734375,
|
|
"learning_rate": 9.795918367346939e-06,
|
|
"loss": 1.7991,
|
|
"step": 4
|
|
},
|
|
{
|
|
"epoch": 0.042735042735042736,
|
|
"grad_norm": 90.51757049560547,
|
|
"learning_rate": 9.693877551020408e-06,
|
|
"loss": 2.6511,
|
|
"step": 5
|
|
},
|
|
{
|
|
"epoch": 0.05128205128205128,
|
|
"grad_norm": 33.15379333496094,
|
|
"learning_rate": 9.591836734693878e-06,
|
|
"loss": 2.0931,
|
|
"step": 6
|
|
},
|
|
{
|
|
"epoch": 0.05982905982905983,
|
|
"grad_norm": 45.139949798583984,
|
|
"learning_rate": 9.489795918367348e-06,
|
|
"loss": 2.0398,
|
|
"step": 7
|
|
},
|
|
{
|
|
"epoch": 0.06837606837606838,
|
|
"grad_norm": 63.17831039428711,
|
|
"learning_rate": 9.387755102040818e-06,
|
|
"loss": 2.0816,
|
|
"step": 8
|
|
},
|
|
{
|
|
"epoch": 0.07692307692307693,
|
|
"grad_norm": 24.13720703125,
|
|
"learning_rate": 9.285714285714288e-06,
|
|
"loss": 1.3469,
|
|
"step": 9
|
|
},
|
|
{
|
|
"epoch": 0.08547008547008547,
|
|
"grad_norm": 130.69837951660156,
|
|
"learning_rate": 9.183673469387756e-06,
|
|
"loss": 1.8606,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.09401709401709402,
|
|
"grad_norm": 41.71373748779297,
|
|
"learning_rate": 9.081632653061225e-06,
|
|
"loss": 2.0109,
|
|
"step": 11
|
|
},
|
|
{
|
|
"epoch": 0.10256410256410256,
|
|
"grad_norm": 41.24937438964844,
|
|
"learning_rate": 8.979591836734695e-06,
|
|
"loss": 1.929,
|
|
"step": 12
|
|
},
|
|
{
|
|
"epoch": 0.1111111111111111,
|
|
"grad_norm": 51.960426330566406,
|
|
"learning_rate": 8.877551020408163e-06,
|
|
"loss": 1.866,
|
|
"step": 13
|
|
},
|
|
{
|
|
"epoch": 0.11965811965811966,
|
|
"grad_norm": 42.9802360534668,
|
|
"learning_rate": 8.775510204081633e-06,
|
|
"loss": 1.8252,
|
|
"step": 14
|
|
},
|
|
{
|
|
"epoch": 0.1282051282051282,
|
|
"grad_norm": 50.58588409423828,
|
|
"learning_rate": 8.673469387755103e-06,
|
|
"loss": 2.1206,
|
|
"step": 15
|
|
},
|
|
{
|
|
"epoch": 0.13675213675213677,
|
|
"grad_norm": 98.52228546142578,
|
|
"learning_rate": 8.571428571428571e-06,
|
|
"loss": 2.1851,
|
|
"step": 16
|
|
},
|
|
{
|
|
"epoch": 0.1452991452991453,
|
|
"grad_norm": 187.91094970703125,
|
|
"learning_rate": 8.469387755102042e-06,
|
|
"loss": 2.2665,
|
|
"step": 17
|
|
},
|
|
{
|
|
"epoch": 0.15384615384615385,
|
|
"grad_norm": 44.042022705078125,
|
|
"learning_rate": 8.36734693877551e-06,
|
|
"loss": 2.1688,
|
|
"step": 18
|
|
},
|
|
{
|
|
"epoch": 0.1623931623931624,
|
|
"grad_norm": 36.91716766357422,
|
|
"learning_rate": 8.26530612244898e-06,
|
|
"loss": 2.0421,
|
|
"step": 19
|
|
},
|
|
{
|
|
"epoch": 0.17094017094017094,
|
|
"grad_norm": 27.246334075927734,
|
|
"learning_rate": 8.16326530612245e-06,
|
|
"loss": 1.9751,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.1794871794871795,
|
|
"grad_norm": 32.8808479309082,
|
|
"learning_rate": 8.06122448979592e-06,
|
|
"loss": 2.1734,
|
|
"step": 21
|
|
},
|
|
{
|
|
"epoch": 0.18803418803418803,
|
|
"grad_norm": 33.08450698852539,
|
|
"learning_rate": 7.959183673469388e-06,
|
|
"loss": 2.3046,
|
|
"step": 22
|
|
},
|
|
{
|
|
"epoch": 0.19658119658119658,
|
|
"grad_norm": 34.17445373535156,
|
|
"learning_rate": 7.857142857142858e-06,
|
|
"loss": 2.122,
|
|
"step": 23
|
|
},
|
|
{
|
|
"epoch": 0.20512820512820512,
|
|
"grad_norm": 55.36966323852539,
|
|
"learning_rate": 7.755102040816327e-06,
|
|
"loss": 1.6824,
|
|
"step": 24
|
|
},
|
|
{
|
|
"epoch": 0.21367521367521367,
|
|
"grad_norm": 50.512596130371094,
|
|
"learning_rate": 7.653061224489796e-06,
|
|
"loss": 2.1274,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.2222222222222222,
|
|
"grad_norm": 32.29758834838867,
|
|
"learning_rate": 7.551020408163265e-06,
|
|
"loss": 1.8956,
|
|
"step": 26
|
|
},
|
|
{
|
|
"epoch": 0.23076923076923078,
|
|
"grad_norm": 26.03915786743164,
|
|
"learning_rate": 7.448979591836736e-06,
|
|
"loss": 1.8013,
|
|
"step": 27
|
|
},
|
|
{
|
|
"epoch": 0.23931623931623933,
|
|
"grad_norm": 29.17389678955078,
|
|
"learning_rate": 7.346938775510205e-06,
|
|
"loss": 2.0998,
|
|
"step": 28
|
|
},
|
|
{
|
|
"epoch": 0.24786324786324787,
|
|
"grad_norm": 25.569332122802734,
|
|
"learning_rate": 7.244897959183675e-06,
|
|
"loss": 0.8649,
|
|
"step": 29
|
|
},
|
|
{
|
|
"epoch": 0.2564102564102564,
|
|
"grad_norm": 24.709575653076172,
|
|
"learning_rate": 7.1428571428571436e-06,
|
|
"loss": 1.4284,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.26495726495726496,
|
|
"grad_norm": 40.75468063354492,
|
|
"learning_rate": 7.0408163265306125e-06,
|
|
"loss": 2.4333,
|
|
"step": 31
|
|
},
|
|
{
|
|
"epoch": 0.27350427350427353,
|
|
"grad_norm": 28.9516544342041,
|
|
"learning_rate": 6.938775510204082e-06,
|
|
"loss": 1.8231,
|
|
"step": 32
|
|
},
|
|
{
|
|
"epoch": 0.28205128205128205,
|
|
"grad_norm": 31.57037925720215,
|
|
"learning_rate": 6.836734693877551e-06,
|
|
"loss": 1.9139,
|
|
"step": 33
|
|
},
|
|
{
|
|
"epoch": 0.2905982905982906,
|
|
"grad_norm": 28.325578689575195,
|
|
"learning_rate": 6.734693877551021e-06,
|
|
"loss": 2.2195,
|
|
"step": 34
|
|
},
|
|
{
|
|
"epoch": 0.29914529914529914,
|
|
"grad_norm": 39.064449310302734,
|
|
"learning_rate": 6.63265306122449e-06,
|
|
"loss": 2.1197,
|
|
"step": 35
|
|
},
|
|
{
|
|
"epoch": 0.3076923076923077,
|
|
"grad_norm": 26.608673095703125,
|
|
"learning_rate": 6.530612244897959e-06,
|
|
"loss": 1.6402,
|
|
"step": 36
|
|
},
|
|
{
|
|
"epoch": 0.3162393162393162,
|
|
"grad_norm": 24.13069725036621,
|
|
"learning_rate": 6.4285714285714295e-06,
|
|
"loss": 2.3671,
|
|
"step": 37
|
|
},
|
|
{
|
|
"epoch": 0.3247863247863248,
|
|
"grad_norm": 25.87238311767578,
|
|
"learning_rate": 6.326530612244899e-06,
|
|
"loss": 1.5874,
|
|
"step": 38
|
|
},
|
|
{
|
|
"epoch": 0.3333333333333333,
|
|
"grad_norm": 29.62607765197754,
|
|
"learning_rate": 6.224489795918368e-06,
|
|
"loss": 1.6643,
|
|
"step": 39
|
|
},
|
|
{
|
|
"epoch": 0.3418803418803419,
|
|
"grad_norm": 30.726987838745117,
|
|
"learning_rate": 6.122448979591837e-06,
|
|
"loss": 1.583,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.3504273504273504,
|
|
"grad_norm": 26.581026077270508,
|
|
"learning_rate": 6.020408163265307e-06,
|
|
"loss": 2.0036,
|
|
"step": 41
|
|
},
|
|
{
|
|
"epoch": 0.358974358974359,
|
|
"grad_norm": 26.827810287475586,
|
|
"learning_rate": 5.918367346938776e-06,
|
|
"loss": 1.4689,
|
|
"step": 42
|
|
},
|
|
{
|
|
"epoch": 0.36752136752136755,
|
|
"grad_norm": 25.676605224609375,
|
|
"learning_rate": 5.816326530612246e-06,
|
|
"loss": 1.875,
|
|
"step": 43
|
|
},
|
|
{
|
|
"epoch": 0.37606837606837606,
|
|
"grad_norm": 29.468841552734375,
|
|
"learning_rate": 5.7142857142857145e-06,
|
|
"loss": 1.5171,
|
|
"step": 44
|
|
},
|
|
{
|
|
"epoch": 0.38461538461538464,
|
|
"grad_norm": 23.370439529418945,
|
|
"learning_rate": 5.6122448979591834e-06,
|
|
"loss": 1.1047,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.39316239316239315,
|
|
"grad_norm": 24.33774757385254,
|
|
"learning_rate": 5.510204081632653e-06,
|
|
"loss": 1.6296,
|
|
"step": 46
|
|
},
|
|
{
|
|
"epoch": 0.4017094017094017,
|
|
"grad_norm": 30.425437927246094,
|
|
"learning_rate": 5.408163265306123e-06,
|
|
"loss": 1.478,
|
|
"step": 47
|
|
},
|
|
{
|
|
"epoch": 0.41025641025641024,
|
|
"grad_norm": 19.86203956604004,
|
|
"learning_rate": 5.306122448979593e-06,
|
|
"loss": 1.846,
|
|
"step": 48
|
|
},
|
|
{
|
|
"epoch": 0.4188034188034188,
|
|
"grad_norm": 31.836505889892578,
|
|
"learning_rate": 5.204081632653062e-06,
|
|
"loss": 2.0027,
|
|
"step": 49
|
|
},
|
|
{
|
|
"epoch": 0.42735042735042733,
|
|
"grad_norm": 30.679075241088867,
|
|
"learning_rate": 5.1020408163265315e-06,
|
|
"loss": 1.6028,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.4358974358974359,
|
|
"grad_norm": 25.78423309326172,
|
|
"learning_rate": 5e-06,
|
|
"loss": 1.2363,
|
|
"step": 51
|
|
},
|
|
{
|
|
"epoch": 0.4444444444444444,
|
|
"grad_norm": 27.172372817993164,
|
|
"learning_rate": 4.897959183673469e-06,
|
|
"loss": 1.6288,
|
|
"step": 52
|
|
},
|
|
{
|
|
"epoch": 0.452991452991453,
|
|
"grad_norm": 28.951725006103516,
|
|
"learning_rate": 4.795918367346939e-06,
|
|
"loss": 1.8334,
|
|
"step": 53
|
|
},
|
|
{
|
|
"epoch": 0.46153846153846156,
|
|
"grad_norm": 46.81833267211914,
|
|
"learning_rate": 4.693877551020409e-06,
|
|
"loss": 1.1823,
|
|
"step": 54
|
|
},
|
|
{
|
|
"epoch": 0.4700854700854701,
|
|
"grad_norm": 26.226045608520508,
|
|
"learning_rate": 4.591836734693878e-06,
|
|
"loss": 1.8236,
|
|
"step": 55
|
|
},
|
|
{
|
|
"epoch": 0.47863247863247865,
|
|
"grad_norm": 22.57502555847168,
|
|
"learning_rate": 4.489795918367348e-06,
|
|
"loss": 1.7485,
|
|
"step": 56
|
|
},
|
|
{
|
|
"epoch": 0.48717948717948717,
|
|
"grad_norm": 38.75823974609375,
|
|
"learning_rate": 4.3877551020408165e-06,
|
|
"loss": 2.315,
|
|
"step": 57
|
|
},
|
|
{
|
|
"epoch": 0.49572649572649574,
|
|
"grad_norm": 30.443164825439453,
|
|
"learning_rate": 4.2857142857142855e-06,
|
|
"loss": 1.4465,
|
|
"step": 58
|
|
},
|
|
{
|
|
"epoch": 0.5042735042735043,
|
|
"grad_norm": 30.137744903564453,
|
|
"learning_rate": 4.183673469387755e-06,
|
|
"loss": 1.8649,
|
|
"step": 59
|
|
},
|
|
{
|
|
"epoch": 0.5128205128205128,
|
|
"grad_norm": 30.10780143737793,
|
|
"learning_rate": 4.081632653061225e-06,
|
|
"loss": 1.2203,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.5213675213675214,
|
|
"grad_norm": 30.356447219848633,
|
|
"learning_rate": 3.979591836734694e-06,
|
|
"loss": 1.5023,
|
|
"step": 61
|
|
},
|
|
{
|
|
"epoch": 0.5299145299145299,
|
|
"grad_norm": 23.183887481689453,
|
|
"learning_rate": 3.877551020408164e-06,
|
|
"loss": 2.1429,
|
|
"step": 62
|
|
},
|
|
{
|
|
"epoch": 0.5384615384615384,
|
|
"grad_norm": 23.465574264526367,
|
|
"learning_rate": 3.7755102040816327e-06,
|
|
"loss": 2.482,
|
|
"step": 63
|
|
},
|
|
{
|
|
"epoch": 0.5470085470085471,
|
|
"grad_norm": 27.807279586791992,
|
|
"learning_rate": 3.6734693877551024e-06,
|
|
"loss": 1.5094,
|
|
"step": 64
|
|
},
|
|
{
|
|
"epoch": 0.5555555555555556,
|
|
"grad_norm": 24.647205352783203,
|
|
"learning_rate": 3.5714285714285718e-06,
|
|
"loss": 1.3724,
|
|
"step": 65
|
|
},
|
|
{
|
|
"epoch": 0.5641025641025641,
|
|
"grad_norm": 23.685543060302734,
|
|
"learning_rate": 3.469387755102041e-06,
|
|
"loss": 1.5686,
|
|
"step": 66
|
|
},
|
|
{
|
|
"epoch": 0.5726495726495726,
|
|
"grad_norm": 44.072669982910156,
|
|
"learning_rate": 3.3673469387755105e-06,
|
|
"loss": 2.7541,
|
|
"step": 67
|
|
},
|
|
{
|
|
"epoch": 0.5811965811965812,
|
|
"grad_norm": 29.146209716796875,
|
|
"learning_rate": 3.2653061224489794e-06,
|
|
"loss": 1.6415,
|
|
"step": 68
|
|
},
|
|
{
|
|
"epoch": 0.5897435897435898,
|
|
"grad_norm": 31.696557998657227,
|
|
"learning_rate": 3.1632653061224496e-06,
|
|
"loss": 1.7669,
|
|
"step": 69
|
|
},
|
|
{
|
|
"epoch": 0.5982905982905983,
|
|
"grad_norm": 18.153284072875977,
|
|
"learning_rate": 3.0612244897959185e-06,
|
|
"loss": 1.763,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.6068376068376068,
|
|
"grad_norm": 17.51633644104004,
|
|
"learning_rate": 2.959183673469388e-06,
|
|
"loss": 1.7882,
|
|
"step": 71
|
|
},
|
|
{
|
|
"epoch": 0.6153846153846154,
|
|
"grad_norm": 21.281356811523438,
|
|
"learning_rate": 2.8571428571428573e-06,
|
|
"loss": 1.2255,
|
|
"step": 72
|
|
},
|
|
{
|
|
"epoch": 0.6239316239316239,
|
|
"grad_norm": 30.624502182006836,
|
|
"learning_rate": 2.7551020408163266e-06,
|
|
"loss": 1.2902,
|
|
"step": 73
|
|
},
|
|
{
|
|
"epoch": 0.6324786324786325,
|
|
"grad_norm": 32.38172149658203,
|
|
"learning_rate": 2.6530612244897964e-06,
|
|
"loss": 1.8834,
|
|
"step": 74
|
|
},
|
|
{
|
|
"epoch": 0.6410256410256411,
|
|
"grad_norm": 29.069534301757812,
|
|
"learning_rate": 2.5510204081632657e-06,
|
|
"loss": 1.7467,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 0.6495726495726496,
|
|
"grad_norm": 56.46368408203125,
|
|
"learning_rate": 2.4489795918367347e-06,
|
|
"loss": 1.6087,
|
|
"step": 76
|
|
},
|
|
{
|
|
"epoch": 0.6581196581196581,
|
|
"grad_norm": 29.552095413208008,
|
|
"learning_rate": 2.3469387755102044e-06,
|
|
"loss": 1.4008,
|
|
"step": 77
|
|
},
|
|
{
|
|
"epoch": 0.6666666666666666,
|
|
"grad_norm": 30.940513610839844,
|
|
"learning_rate": 2.244897959183674e-06,
|
|
"loss": 1.1384,
|
|
"step": 78
|
|
},
|
|
{
|
|
"epoch": 0.6752136752136753,
|
|
"grad_norm": 26.42818832397461,
|
|
"learning_rate": 2.1428571428571427e-06,
|
|
"loss": 1.6209,
|
|
"step": 79
|
|
},
|
|
{
|
|
"epoch": 0.6837606837606838,
|
|
"grad_norm": 23.604707717895508,
|
|
"learning_rate": 2.0408163265306125e-06,
|
|
"loss": 1.2335,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.6923076923076923,
|
|
"grad_norm": 33.276771545410156,
|
|
"learning_rate": 1.938775510204082e-06,
|
|
"loss": 1.8955,
|
|
"step": 81
|
|
},
|
|
{
|
|
"epoch": 0.7008547008547008,
|
|
"grad_norm": 17.27667808532715,
|
|
"learning_rate": 1.8367346938775512e-06,
|
|
"loss": 1.8708,
|
|
"step": 82
|
|
},
|
|
{
|
|
"epoch": 0.7094017094017094,
|
|
"grad_norm": 34.4437255859375,
|
|
"learning_rate": 1.7346938775510206e-06,
|
|
"loss": 1.8406,
|
|
"step": 83
|
|
},
|
|
{
|
|
"epoch": 0.717948717948718,
|
|
"grad_norm": 27.164016723632812,
|
|
"learning_rate": 1.6326530612244897e-06,
|
|
"loss": 1.6318,
|
|
"step": 84
|
|
},
|
|
{
|
|
"epoch": 0.7264957264957265,
|
|
"grad_norm": 30.789165496826172,
|
|
"learning_rate": 1.5306122448979593e-06,
|
|
"loss": 2.1226,
|
|
"step": 85
|
|
},
|
|
{
|
|
"epoch": 0.7350427350427351,
|
|
"grad_norm": 24.16193962097168,
|
|
"learning_rate": 1.4285714285714286e-06,
|
|
"loss": 0.8153,
|
|
"step": 86
|
|
},
|
|
{
|
|
"epoch": 0.7435897435897436,
|
|
"grad_norm": 19.538379669189453,
|
|
"learning_rate": 1.3265306122448982e-06,
|
|
"loss": 1.9163,
|
|
"step": 87
|
|
},
|
|
{
|
|
"epoch": 0.7521367521367521,
|
|
"grad_norm": 26.798044204711914,
|
|
"learning_rate": 1.2244897959183673e-06,
|
|
"loss": 1.7253,
|
|
"step": 88
|
|
},
|
|
{
|
|
"epoch": 0.7606837606837606,
|
|
"grad_norm": 26.220504760742188,
|
|
"learning_rate": 1.122448979591837e-06,
|
|
"loss": 1.2386,
|
|
"step": 89
|
|
},
|
|
{
|
|
"epoch": 0.7692307692307693,
|
|
"grad_norm": 27.829275131225586,
|
|
"learning_rate": 1.0204081632653063e-06,
|
|
"loss": 2.0316,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.7777777777777778,
|
|
"grad_norm": 19.041147232055664,
|
|
"learning_rate": 9.183673469387756e-07,
|
|
"loss": 1.8049,
|
|
"step": 91
|
|
},
|
|
{
|
|
"epoch": 0.7863247863247863,
|
|
"grad_norm": 18.105703353881836,
|
|
"learning_rate": 8.163265306122449e-07,
|
|
"loss": 1.5467,
|
|
"step": 92
|
|
},
|
|
{
|
|
"epoch": 0.7948717948717948,
|
|
"grad_norm": 25.5799560546875,
|
|
"learning_rate": 7.142857142857143e-07,
|
|
"loss": 1.5628,
|
|
"step": 93
|
|
},
|
|
{
|
|
"epoch": 0.8034188034188035,
|
|
"grad_norm": 24.031326293945312,
|
|
"learning_rate": 6.122448979591837e-07,
|
|
"loss": 1.8895,
|
|
"step": 94
|
|
},
|
|
{
|
|
"epoch": 0.811965811965812,
|
|
"grad_norm": 14.778085708618164,
|
|
"learning_rate": 5.102040816326531e-07,
|
|
"loss": 1.4871,
|
|
"step": 95
|
|
},
|
|
{
|
|
"epoch": 0.8205128205128205,
|
|
"grad_norm": 24.120147705078125,
|
|
"learning_rate": 4.0816326530612243e-07,
|
|
"loss": 1.507,
|
|
"step": 96
|
|
},
|
|
{
|
|
"epoch": 0.8290598290598291,
|
|
"grad_norm": 19.402923583984375,
|
|
"learning_rate": 3.0612244897959183e-07,
|
|
"loss": 1.2183,
|
|
"step": 97
|
|
},
|
|
{
|
|
"epoch": 0.8376068376068376,
|
|
"grad_norm": 26.27826499938965,
|
|
"learning_rate": 2.0408163265306121e-07,
|
|
"loss": 1.7308,
|
|
"step": 98
|
|
},
|
|
{
|
|
"epoch": 0.8461538461538461,
|
|
"grad_norm": 23.33223533630371,
|
|
"learning_rate": 1.0204081632653061e-07,
|
|
"loss": 1.3037,
|
|
"step": 99
|
|
},
|
|
{
|
|
"epoch": 0.8547008547008547,
|
|
"grad_norm": 24.819562911987305,
|
|
"learning_rate": 0.0,
|
|
"loss": 1.2484,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.8547008547008547,
|
|
"step": 100,
|
|
"total_flos": 2305515375820800.0,
|
|
"train_loss": 1.7708107882738113,
|
|
"train_runtime": 5686.4644,
|
|
"train_samples_per_second": 0.018,
|
|
"train_steps_per_second": 0.018
|
|
}
|
|
],
|
|
"logging_steps": 1,
|
|
"max_steps": 100,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 1,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 2305515375820800.0,
|
|
"train_batch_size": 1,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|