|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.8561484918793503, |
|
"eval_steps": 200, |
|
"global_step": 600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.5968947410583496, |
|
"learning_rate": 9.896800825593395e-05, |
|
"loss": 2.3935, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6443502306938171, |
|
"learning_rate": 9.793601651186791e-05, |
|
"loss": 2.1763, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4056578576564789, |
|
"learning_rate": 9.690402476780186e-05, |
|
"loss": 1.9601, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.24697279930114746, |
|
"learning_rate": 9.587203302373582e-05, |
|
"loss": 1.9009, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.24417832493782043, |
|
"learning_rate": 9.484004127966977e-05, |
|
"loss": 1.8698, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.19971579313278198, |
|
"learning_rate": 9.380804953560372e-05, |
|
"loss": 1.8502, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.21794500946998596, |
|
"learning_rate": 9.277605779153768e-05, |
|
"loss": 1.8139, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.20081552863121033, |
|
"learning_rate": 9.174406604747162e-05, |
|
"loss": 1.8246, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.2045777291059494, |
|
"learning_rate": 9.071207430340559e-05, |
|
"loss": 1.8009, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.21513579785823822, |
|
"learning_rate": 8.968008255933953e-05, |
|
"loss": 1.7745, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.2838321924209595, |
|
"learning_rate": 8.864809081527348e-05, |
|
"loss": 1.7831, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.19812746345996857, |
|
"learning_rate": 8.761609907120744e-05, |
|
"loss": 1.7554, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.30143260955810547, |
|
"learning_rate": 8.658410732714138e-05, |
|
"loss": 1.7707, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.21341949701309204, |
|
"learning_rate": 8.555211558307535e-05, |
|
"loss": 1.7476, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.24006041884422302, |
|
"learning_rate": 8.452012383900929e-05, |
|
"loss": 1.7653, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.25095027685165405, |
|
"learning_rate": 8.348813209494324e-05, |
|
"loss": 1.7438, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.2602318525314331, |
|
"learning_rate": 8.24561403508772e-05, |
|
"loss": 1.7728, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.26738253235816956, |
|
"learning_rate": 8.142414860681114e-05, |
|
"loss": 1.7433, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.25230053067207336, |
|
"learning_rate": 8.039215686274511e-05, |
|
"loss": 1.7304, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.2360549122095108, |
|
"learning_rate": 7.936016511867905e-05, |
|
"loss": 1.7297, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"eval_loss": 1.744746446609497, |
|
"eval_runtime": 294.0669, |
|
"eval_samples_per_second": 35.172, |
|
"eval_steps_per_second": 4.397, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2905316948890686, |
|
"learning_rate": 6.74922600619195e-05, |
|
"loss": 1.727, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.25649935007095337, |
|
"learning_rate": 6.594427244582044e-05, |
|
"loss": 1.7224, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.23987528681755066, |
|
"learning_rate": 6.439628482972137e-05, |
|
"loss": 1.7389, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2479698807001114, |
|
"learning_rate": 6.28482972136223e-05, |
|
"loss": 1.7255, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.25272852182388306, |
|
"learning_rate": 6.130030959752322e-05, |
|
"loss": 1.7354, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2447136789560318, |
|
"learning_rate": 5.9752321981424155e-05, |
|
"loss": 1.7443, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.26579201221466064, |
|
"learning_rate": 5.8204334365325074e-05, |
|
"loss": 1.7462, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.31005680561065674, |
|
"learning_rate": 5.6656346749226006e-05, |
|
"loss": 1.7144, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2663085460662842, |
|
"learning_rate": 5.510835913312694e-05, |
|
"loss": 1.7094, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.28601768612861633, |
|
"learning_rate": 5.3560371517027864e-05, |
|
"loss": 1.6838, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2900325059890747, |
|
"learning_rate": 5.20123839009288e-05, |
|
"loss": 1.7042, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.28358617424964905, |
|
"learning_rate": 5.046439628482973e-05, |
|
"loss": 1.7057, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.3409838378429413, |
|
"learning_rate": 4.891640866873065e-05, |
|
"loss": 1.7228, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.28400272130966187, |
|
"learning_rate": 4.736842105263158e-05, |
|
"loss": 1.7157, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.33671486377716064, |
|
"learning_rate": 4.582043343653251e-05, |
|
"loss": 1.709, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.29089200496673584, |
|
"learning_rate": 4.427244582043344e-05, |
|
"loss": 1.7018, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.2736106514930725, |
|
"learning_rate": 4.2724458204334365e-05, |
|
"loss": 1.6888, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.272792786359787, |
|
"learning_rate": 4.11764705882353e-05, |
|
"loss": 1.7191, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.282695472240448, |
|
"learning_rate": 3.962848297213623e-05, |
|
"loss": 1.688, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.29477787017822266, |
|
"learning_rate": 3.8080495356037155e-05, |
|
"loss": 1.7292, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"eval_loss": 1.7159068584442139, |
|
"eval_runtime": 296.4339, |
|
"eval_samples_per_second": 34.891, |
|
"eval_steps_per_second": 4.362, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.29601866006851196, |
|
"learning_rate": 3.653250773993808e-05, |
|
"loss": 1.7035, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.23885053396224976, |
|
"learning_rate": 3.498452012383901e-05, |
|
"loss": 1.6846, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.2934260666370392, |
|
"learning_rate": 3.343653250773994e-05, |
|
"loss": 1.6728, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.2752627730369568, |
|
"learning_rate": 3.188854489164087e-05, |
|
"loss": 1.7108, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.26525211334228516, |
|
"learning_rate": 3.0340557275541798e-05, |
|
"loss": 1.7035, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.3212621510028839, |
|
"learning_rate": 2.8792569659442727e-05, |
|
"loss": 1.7212, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.2639882564544678, |
|
"learning_rate": 2.7244582043343652e-05, |
|
"loss": 1.7058, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.3098365068435669, |
|
"learning_rate": 2.5696594427244585e-05, |
|
"loss": 1.6856, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.26433393359184265, |
|
"learning_rate": 2.4148606811145514e-05, |
|
"loss": 1.6728, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.30506235361099243, |
|
"learning_rate": 2.260061919504644e-05, |
|
"loss": 1.7096, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.28906792402267456, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 1.6829, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.3350558876991272, |
|
"learning_rate": 1.9504643962848298e-05, |
|
"loss": 1.7045, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.29045575857162476, |
|
"learning_rate": 1.7956656346749227e-05, |
|
"loss": 1.6696, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.2855425775051117, |
|
"learning_rate": 1.6408668730650156e-05, |
|
"loss": 1.6791, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.2906227111816406, |
|
"learning_rate": 1.4860681114551084e-05, |
|
"loss": 1.6755, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.293618768453598, |
|
"learning_rate": 1.3312693498452014e-05, |
|
"loss": 1.6993, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.303643137216568, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 1.6825, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.29694506525993347, |
|
"learning_rate": 1.0216718266253871e-05, |
|
"loss": 1.6921, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.28675130009651184, |
|
"learning_rate": 8.6687306501548e-06, |
|
"loss": 1.6908, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.2960526943206787, |
|
"learning_rate": 7.120743034055728e-06, |
|
"loss": 1.6915, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"eval_loss": 1.707315444946289, |
|
"eval_runtime": 297.2939, |
|
"eval_samples_per_second": 34.79, |
|
"eval_steps_per_second": 4.349, |
|
"step": 600 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 646, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 200, |
|
"total_flos": 2.6435633007034368e+17, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|