|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 733, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0068212824010914054, |
|
"grad_norm": 1.5664408206939697, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 0.8612, |
|
"num_tokens": 3753458.0, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013642564802182811, |
|
"grad_norm": 1.5331162214279175, |
|
"learning_rate": 1.2162162162162164e-05, |
|
"loss": 0.8129, |
|
"num_tokens": 7657459.0, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.020463847203274217, |
|
"grad_norm": 0.7885063886642456, |
|
"learning_rate": 1.891891891891892e-05, |
|
"loss": 0.7307, |
|
"num_tokens": 11351827.0, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.027285129604365622, |
|
"grad_norm": 0.5782262682914734, |
|
"learning_rate": 2.5675675675675675e-05, |
|
"loss": 0.686, |
|
"num_tokens": 15095356.0, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.034106412005457026, |
|
"grad_norm": 0.49164527654647827, |
|
"learning_rate": 3.2432432432432436e-05, |
|
"loss": 0.6616, |
|
"num_tokens": 18878489.0, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.040927694406548434, |
|
"grad_norm": 0.47542551159858704, |
|
"learning_rate": 3.918918918918919e-05, |
|
"loss": 0.6376, |
|
"num_tokens": 22607726.0, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.047748976807639835, |
|
"grad_norm": 0.3627755045890808, |
|
"learning_rate": 4.594594594594595e-05, |
|
"loss": 0.6316, |
|
"num_tokens": 26596975.0, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.054570259208731244, |
|
"grad_norm": 0.4120884835720062, |
|
"learning_rate": 4.999908316574644e-05, |
|
"loss": 0.5997, |
|
"num_tokens": 30372607.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.061391541609822645, |
|
"grad_norm": 0.3788425028324127, |
|
"learning_rate": 4.998876963847189e-05, |
|
"loss": 0.6009, |
|
"num_tokens": 34180321.0, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06821282401091405, |
|
"grad_norm": 0.3772900700569153, |
|
"learning_rate": 4.996700181165029e-05, |
|
"loss": 0.6005, |
|
"num_tokens": 37904755.0, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07503410641200546, |
|
"grad_norm": 0.42741668224334717, |
|
"learning_rate": 4.993379077238036e-05, |
|
"loss": 0.601, |
|
"num_tokens": 41764548.0, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08185538881309687, |
|
"grad_norm": 0.4014478027820587, |
|
"learning_rate": 4.9889153436180295e-05, |
|
"loss": 0.5889, |
|
"num_tokens": 45475394.0, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08867667121418826, |
|
"grad_norm": 0.41353562474250793, |
|
"learning_rate": 4.983311253837213e-05, |
|
"loss": 0.5926, |
|
"num_tokens": 49295807.0, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09549795361527967, |
|
"grad_norm": 0.34759485721588135, |
|
"learning_rate": 4.9765696622501846e-05, |
|
"loss": 0.582, |
|
"num_tokens": 52931516.0, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10231923601637108, |
|
"grad_norm": 0.383974552154541, |
|
"learning_rate": 4.968694002580118e-05, |
|
"loss": 0.5839, |
|
"num_tokens": 56824861.0, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.10914051841746249, |
|
"grad_norm": 0.4282758831977844, |
|
"learning_rate": 4.959688286169851e-05, |
|
"loss": 0.5676, |
|
"num_tokens": 60559854.0, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11596180081855388, |
|
"grad_norm": 0.4353692829608917, |
|
"learning_rate": 4.9495570999387685e-05, |
|
"loss": 0.5613, |
|
"num_tokens": 64468301.0, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12278308321964529, |
|
"grad_norm": 0.4118047058582306, |
|
"learning_rate": 4.9383056040465276e-05, |
|
"loss": 0.5793, |
|
"num_tokens": 68324862.0, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1296043656207367, |
|
"grad_norm": 0.36806777119636536, |
|
"learning_rate": 4.925939529264815e-05, |
|
"loss": 0.5747, |
|
"num_tokens": 72145111.0, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1364256480218281, |
|
"grad_norm": 0.3960418403148651, |
|
"learning_rate": 4.9124651740584684e-05, |
|
"loss": 0.561, |
|
"num_tokens": 76047530.0, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1432469304229195, |
|
"grad_norm": 0.39603474736213684, |
|
"learning_rate": 4.897889401377447e-05, |
|
"loss": 0.5629, |
|
"num_tokens": 80034081.0, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.15006821282401092, |
|
"grad_norm": 0.34311339259147644, |
|
"learning_rate": 4.882219635161306e-05, |
|
"loss": 0.5667, |
|
"num_tokens": 83777176.0, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15688949522510232, |
|
"grad_norm": 0.4239673614501953, |
|
"learning_rate": 4.865463856557922e-05, |
|
"loss": 0.5656, |
|
"num_tokens": 87560813.0, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.16371077762619374, |
|
"grad_norm": 0.37248337268829346, |
|
"learning_rate": 4.847630599858426e-05, |
|
"loss": 0.5547, |
|
"num_tokens": 91406508.0, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17053206002728513, |
|
"grad_norm": 0.3312360346317291, |
|
"learning_rate": 4.8287289481503954e-05, |
|
"loss": 0.5616, |
|
"num_tokens": 95296652.0, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17735334242837653, |
|
"grad_norm": 0.3752099871635437, |
|
"learning_rate": 4.8087685286915276e-05, |
|
"loss": 0.5569, |
|
"num_tokens": 99236540.0, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18417462482946795, |
|
"grad_norm": 0.3752726912498474, |
|
"learning_rate": 4.787759508006147e-05, |
|
"loss": 0.5565, |
|
"num_tokens": 103070715.0, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.19099590723055934, |
|
"grad_norm": 0.40951457619667053, |
|
"learning_rate": 4.765712586707048e-05, |
|
"loss": 0.5694, |
|
"num_tokens": 106743213.0, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19781718963165076, |
|
"grad_norm": 0.39596056938171387, |
|
"learning_rate": 4.7426389940453065e-05, |
|
"loss": 0.5418, |
|
"num_tokens": 110676628.0, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.20463847203274216, |
|
"grad_norm": 0.4065423309803009, |
|
"learning_rate": 4.718550482190837e-05, |
|
"loss": 0.5578, |
|
"num_tokens": 114351641.0, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21145975443383355, |
|
"grad_norm": 0.3387225568294525, |
|
"learning_rate": 4.6934593202466127e-05, |
|
"loss": 0.5424, |
|
"num_tokens": 118270253.0, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.21828103683492497, |
|
"grad_norm": 0.3324713110923767, |
|
"learning_rate": 4.6673782879995896e-05, |
|
"loss": 0.5511, |
|
"num_tokens": 122130553.0, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.22510231923601637, |
|
"grad_norm": 0.34876254200935364, |
|
"learning_rate": 4.640320669411526e-05, |
|
"loss": 0.5539, |
|
"num_tokens": 125907714.0, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.23192360163710776, |
|
"grad_norm": 0.3520093858242035, |
|
"learning_rate": 4.612300245853004e-05, |
|
"loss": 0.5473, |
|
"num_tokens": 129778594.0, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23874488403819918, |
|
"grad_norm": 0.4421618580818176, |
|
"learning_rate": 4.5833312890841085e-05, |
|
"loss": 0.562, |
|
"num_tokens": 133567859.0, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.24556616643929058, |
|
"grad_norm": 0.3951200544834137, |
|
"learning_rate": 4.553428553985329e-05, |
|
"loss": 0.5416, |
|
"num_tokens": 137318470.0, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.252387448840382, |
|
"grad_norm": 0.37818825244903564, |
|
"learning_rate": 4.522607271042399e-05, |
|
"loss": 0.5366, |
|
"num_tokens": 140986549.0, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2592087312414734, |
|
"grad_norm": 0.43682247400283813, |
|
"learning_rate": 4.490883138588882e-05, |
|
"loss": 0.5479, |
|
"num_tokens": 144921526.0, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2660300136425648, |
|
"grad_norm": 0.32613101601600647, |
|
"learning_rate": 4.458272314810479e-05, |
|
"loss": 0.5358, |
|
"num_tokens": 148719256.0, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2728512960436562, |
|
"grad_norm": 0.31748515367507935, |
|
"learning_rate": 4.4247914095151086e-05, |
|
"loss": 0.5457, |
|
"num_tokens": 152583124.0, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.27967257844474763, |
|
"grad_norm": 0.3875013589859009, |
|
"learning_rate": 4.390457475672966e-05, |
|
"loss": 0.5393, |
|
"num_tokens": 156451358.0, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.286493860845839, |
|
"grad_norm": 0.33466753363609314, |
|
"learning_rate": 4.35528800073086e-05, |
|
"loss": 0.5408, |
|
"num_tokens": 160195441.0, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2933151432469304, |
|
"grad_norm": 0.3655596077442169, |
|
"learning_rate": 4.31930089770526e-05, |
|
"loss": 0.5442, |
|
"num_tokens": 164130734.0, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.30013642564802184, |
|
"grad_norm": 0.35074111819267273, |
|
"learning_rate": 4.282514496058582e-05, |
|
"loss": 0.5236, |
|
"num_tokens": 167974083.0, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3069577080491132, |
|
"grad_norm": 0.3069048225879669, |
|
"learning_rate": 4.24494753236337e-05, |
|
"loss": 0.5363, |
|
"num_tokens": 171877159.0, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.31377899045020463, |
|
"grad_norm": 0.3370104134082794, |
|
"learning_rate": 4.2066191407591125e-05, |
|
"loss": 0.5319, |
|
"num_tokens": 175825874.0, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.32060027285129605, |
|
"grad_norm": 0.30355367064476013, |
|
"learning_rate": 4.1675488432065785e-05, |
|
"loss": 0.5242, |
|
"num_tokens": 179651504.0, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3274215552523875, |
|
"grad_norm": 0.29714441299438477, |
|
"learning_rate": 4.127756539544609e-05, |
|
"loss": 0.5368, |
|
"num_tokens": 183474332.0, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.33424283765347884, |
|
"grad_norm": 0.34279632568359375, |
|
"learning_rate": 4.087262497354452e-05, |
|
"loss": 0.5453, |
|
"num_tokens": 187421939.0, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.34106412005457026, |
|
"grad_norm": 0.3006781339645386, |
|
"learning_rate": 4.046087341636789e-05, |
|
"loss": 0.5278, |
|
"num_tokens": 191229072.0, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3478854024556617, |
|
"grad_norm": 0.30111509561538696, |
|
"learning_rate": 4.0042520443067176e-05, |
|
"loss": 0.529, |
|
"num_tokens": 195078991.0, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.35470668485675305, |
|
"grad_norm": 0.3227461576461792, |
|
"learning_rate": 3.961777913512035e-05, |
|
"loss": 0.5181, |
|
"num_tokens": 198920961.0, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3615279672578445, |
|
"grad_norm": 0.30752789974212646, |
|
"learning_rate": 3.9186865827802724e-05, |
|
"loss": 0.5377, |
|
"num_tokens": 202602950.0, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3683492496589359, |
|
"grad_norm": 0.32034164667129517, |
|
"learning_rate": 3.875e-05, |
|
"loss": 0.5265, |
|
"num_tokens": 206455454.0, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.37517053206002726, |
|
"grad_norm": 0.28475409746170044, |
|
"learning_rate": 3.830740416242014e-05, |
|
"loss": 0.5223, |
|
"num_tokens": 210274230.0, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3819918144611187, |
|
"grad_norm": 0.30314069986343384, |
|
"learning_rate": 3.7859303744261064e-05, |
|
"loss": 0.5281, |
|
"num_tokens": 213944648.0, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3888130968622101, |
|
"grad_norm": 0.28140079975128174, |
|
"learning_rate": 3.740592697839185e-05, |
|
"loss": 0.5328, |
|
"num_tokens": 217821264.0, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3956343792633015, |
|
"grad_norm": 0.26929807662963867, |
|
"learning_rate": 3.694750478510596e-05, |
|
"loss": 0.5284, |
|
"num_tokens": 221728874.0, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4024556616643929, |
|
"grad_norm": 0.26649391651153564, |
|
"learning_rate": 3.648427065450555e-05, |
|
"loss": 0.5196, |
|
"num_tokens": 225494536.0, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4092769440654843, |
|
"grad_norm": 0.29355040192604065, |
|
"learning_rate": 3.601646052757707e-05, |
|
"loss": 0.5188, |
|
"num_tokens": 229370762.0, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.41609822646657574, |
|
"grad_norm": 0.3135109543800354, |
|
"learning_rate": 3.55443126760184e-05, |
|
"loss": 0.5343, |
|
"num_tokens": 233272121.0, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.4229195088676671, |
|
"grad_norm": 0.3388509154319763, |
|
"learning_rate": 3.506806758087894e-05, |
|
"loss": 0.5319, |
|
"num_tokens": 237043352.0, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4297407912687585, |
|
"grad_norm": 0.30362528562545776, |
|
"learning_rate": 3.458796781007437e-05, |
|
"loss": 0.5266, |
|
"num_tokens": 240757499.0, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.43656207366984995, |
|
"grad_norm": 0.28180354833602905, |
|
"learning_rate": 3.410425789483854e-05, |
|
"loss": 0.527, |
|
"num_tokens": 244605555.0, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4433833560709413, |
|
"grad_norm": 0.28672918677330017, |
|
"learning_rate": 3.3617184205175304e-05, |
|
"loss": 0.5334, |
|
"num_tokens": 248382993.0, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.45020463847203274, |
|
"grad_norm": 0.33284738659858704, |
|
"learning_rate": 3.312699482437392e-05, |
|
"loss": 0.5206, |
|
"num_tokens": 252233466.0, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45702592087312416, |
|
"grad_norm": 0.28790685534477234, |
|
"learning_rate": 3.263393942265168e-05, |
|
"loss": 0.5273, |
|
"num_tokens": 256038440.0, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4638472032742155, |
|
"grad_norm": 0.25374555587768555, |
|
"learning_rate": 3.213826912998838e-05, |
|
"loss": 0.5197, |
|
"num_tokens": 260071254.0, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.47066848567530695, |
|
"grad_norm": 0.26900210976600647, |
|
"learning_rate": 3.164023640821719e-05, |
|
"loss": 0.513, |
|
"num_tokens": 263897078.0, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.47748976807639837, |
|
"grad_norm": 0.27621471881866455, |
|
"learning_rate": 3.114009492243721e-05, |
|
"loss": 0.5214, |
|
"num_tokens": 267702266.0, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4843110504774898, |
|
"grad_norm": 0.28301048278808594, |
|
"learning_rate": 3.063809941181321e-05, |
|
"loss": 0.531, |
|
"num_tokens": 271571889.0, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.49113233287858116, |
|
"grad_norm": 0.28320661187171936, |
|
"learning_rate": 3.0134505559828203e-05, |
|
"loss": 0.5348, |
|
"num_tokens": 275444208.0, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4979536152796726, |
|
"grad_norm": 0.2856563925743103, |
|
"learning_rate": 2.9629569864055125e-05, |
|
"loss": 0.5128, |
|
"num_tokens": 279090959.0, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.504774897680764, |
|
"grad_norm": 0.2746957540512085, |
|
"learning_rate": 2.9123549505513868e-05, |
|
"loss": 0.5148, |
|
"num_tokens": 283042351.0, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5115961800818554, |
|
"grad_norm": 0.27719607949256897, |
|
"learning_rate": 2.8616702217680134e-05, |
|
"loss": 0.5228, |
|
"num_tokens": 286947035.0, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5184174624829468, |
|
"grad_norm": 0.27276405692100525, |
|
"learning_rate": 2.810928615521303e-05, |
|
"loss": 0.5095, |
|
"num_tokens": 290627149.0, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5252387448840382, |
|
"grad_norm": 0.27591395378112793, |
|
"learning_rate": 2.7601559762468022e-05, |
|
"loss": 0.5187, |
|
"num_tokens": 294445695.0, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5320600272851296, |
|
"grad_norm": 0.27681204676628113, |
|
"learning_rate": 2.7093781641862387e-05, |
|
"loss": 0.5212, |
|
"num_tokens": 298235939.0, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.538881309686221, |
|
"grad_norm": 0.2667984366416931, |
|
"learning_rate": 2.658621042216021e-05, |
|
"loss": 0.5054, |
|
"num_tokens": 301940350.0, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5457025920873124, |
|
"grad_norm": 0.6714840531349182, |
|
"learning_rate": 2.6079104626743845e-05, |
|
"loss": 0.5256, |
|
"num_tokens": 305677252.0, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5525238744884038, |
|
"grad_norm": 0.2727600634098053, |
|
"learning_rate": 2.5572722541939113e-05, |
|
"loss": 0.5248, |
|
"num_tokens": 309492997.0, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5593451568894953, |
|
"grad_norm": 0.28910964727401733, |
|
"learning_rate": 2.5067322085461315e-05, |
|
"loss": 0.5102, |
|
"num_tokens": 313260754.0, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5661664392905866, |
|
"grad_norm": 0.30671924352645874, |
|
"learning_rate": 2.4563160675048846e-05, |
|
"loss": 0.5153, |
|
"num_tokens": 317054734.0, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.572987721691678, |
|
"grad_norm": 0.2649187743663788, |
|
"learning_rate": 2.406049509735156e-05, |
|
"loss": 0.5151, |
|
"num_tokens": 320934035.0, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5798090040927695, |
|
"grad_norm": 0.28437790274620056, |
|
"learning_rate": 2.355958137714056e-05, |
|
"loss": 0.5106, |
|
"num_tokens": 324620533.0, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5866302864938608, |
|
"grad_norm": 0.2717227041721344, |
|
"learning_rate": 2.3060674646906004e-05, |
|
"loss": 0.5152, |
|
"num_tokens": 328343683.0, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5934515688949522, |
|
"grad_norm": 0.3002876341342926, |
|
"learning_rate": 2.2564029016909416e-05, |
|
"loss": 0.5046, |
|
"num_tokens": 332273995.0, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6002728512960437, |
|
"grad_norm": 0.26823899149894714, |
|
"learning_rate": 2.2069897445756627e-05, |
|
"loss": 0.5026, |
|
"num_tokens": 336096910.0, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.607094133697135, |
|
"grad_norm": 0.26657503843307495, |
|
"learning_rate": 2.1578531611557322e-05, |
|
"loss": 0.5156, |
|
"num_tokens": 339854518.0, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.6139154160982264, |
|
"grad_norm": 0.2396160513162613, |
|
"learning_rate": 2.109018178373675e-05, |
|
"loss": 0.5144, |
|
"num_tokens": 343729017.0, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6207366984993179, |
|
"grad_norm": 0.25998055934906006, |
|
"learning_rate": 2.0605096695564973e-05, |
|
"loss": 0.5179, |
|
"num_tokens": 347564883.0, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6275579809004093, |
|
"grad_norm": 0.2405814677476883, |
|
"learning_rate": 2.0123523417468466e-05, |
|
"loss": 0.5112, |
|
"num_tokens": 351296309.0, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6343792633015006, |
|
"grad_norm": 0.23936094343662262, |
|
"learning_rate": 1.9645707231188742e-05, |
|
"loss": 0.5055, |
|
"num_tokens": 355112109.0, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6412005457025921, |
|
"grad_norm": 0.23796042799949646, |
|
"learning_rate": 1.9171891504851925e-05, |
|
"loss": 0.5243, |
|
"num_tokens": 358904864.0, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6480218281036835, |
|
"grad_norm": 0.2545931339263916, |
|
"learning_rate": 1.8702317569013094e-05, |
|
"loss": 0.5002, |
|
"num_tokens": 362651560.0, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.654843110504775, |
|
"grad_norm": 0.23106758296489716, |
|
"learning_rate": 1.8237224593738327e-05, |
|
"loss": 0.5025, |
|
"num_tokens": 366319152.0, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6616643929058663, |
|
"grad_norm": 0.22513054311275482, |
|
"learning_rate": 1.7776849466787223e-05, |
|
"loss": 0.5168, |
|
"num_tokens": 370176088.0, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6684856753069577, |
|
"grad_norm": 0.23235899209976196, |
|
"learning_rate": 1.7321426672957896e-05, |
|
"loss": 0.5022, |
|
"num_tokens": 374011109.0, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6753069577080492, |
|
"grad_norm": 0.2550266683101654, |
|
"learning_rate": 1.6871188174655787e-05, |
|
"loss": 0.4954, |
|
"num_tokens": 377769452.0, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6821282401091405, |
|
"grad_norm": 0.23668882250785828, |
|
"learning_rate": 1.6426363293747334e-05, |
|
"loss": 0.4998, |
|
"num_tokens": 381536770.0, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6889495225102319, |
|
"grad_norm": 0.24165432155132294, |
|
"learning_rate": 1.598717859475846e-05, |
|
"loss": 0.5084, |
|
"num_tokens": 385264947.0, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6957708049113234, |
|
"grad_norm": 0.235661581158638, |
|
"learning_rate": 1.5553857769477553e-05, |
|
"loss": 0.5052, |
|
"num_tokens": 389008667.0, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7025920873124147, |
|
"grad_norm": 0.2295641452074051, |
|
"learning_rate": 1.5126621523021518e-05, |
|
"loss": 0.5097, |
|
"num_tokens": 392845122.0, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7094133697135061, |
|
"grad_norm": 0.259378582239151, |
|
"learning_rate": 1.4705687461423209e-05, |
|
"loss": 0.522, |
|
"num_tokens": 396569410.0, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7162346521145976, |
|
"grad_norm": 0.24214191734790802, |
|
"learning_rate": 1.4291269980797139e-05, |
|
"loss": 0.5062, |
|
"num_tokens": 400328978.0, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.723055934515689, |
|
"grad_norm": 0.22774960100650787, |
|
"learning_rate": 1.3883580158140291e-05, |
|
"loss": 0.5002, |
|
"num_tokens": 404085025.0, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.7298772169167803, |
|
"grad_norm": 0.21534228324890137, |
|
"learning_rate": 1.3482825643823293e-05, |
|
"loss": 0.5058, |
|
"num_tokens": 407976499.0, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.7366984993178718, |
|
"grad_norm": 0.21766649186611176, |
|
"learning_rate": 1.3089210555827086e-05, |
|
"loss": 0.5116, |
|
"num_tokens": 411774021.0, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7435197817189632, |
|
"grad_norm": 0.21238680183887482, |
|
"learning_rate": 1.270293537577855e-05, |
|
"loss": 0.5145, |
|
"num_tokens": 415754417.0, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.7503410641200545, |
|
"grad_norm": 0.22271254658699036, |
|
"learning_rate": 1.232419684683844e-05, |
|
"loss": 0.4992, |
|
"num_tokens": 419428701.0, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.757162346521146, |
|
"grad_norm": 0.22422295808792114, |
|
"learning_rate": 1.1953187873493303e-05, |
|
"loss": 0.4998, |
|
"num_tokens": 423056747.0, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7639836289222374, |
|
"grad_norm": 0.2289811670780182, |
|
"learning_rate": 1.1590097423302684e-05, |
|
"loss": 0.4957, |
|
"num_tokens": 426771236.0, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7708049113233287, |
|
"grad_norm": 0.21832554042339325, |
|
"learning_rate": 1.1235110430651421e-05, |
|
"loss": 0.4956, |
|
"num_tokens": 430648058.0, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.7776261937244202, |
|
"grad_norm": 0.23512613773345947, |
|
"learning_rate": 1.0888407702556284e-05, |
|
"loss": 0.4995, |
|
"num_tokens": 434432023.0, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7844474761255116, |
|
"grad_norm": 0.2369619607925415, |
|
"learning_rate": 1.0550165826574766e-05, |
|
"loss": 0.4993, |
|
"num_tokens": 438355231.0, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.791268758526603, |
|
"grad_norm": 0.23256513476371765, |
|
"learning_rate": 1.0220557080862985e-05, |
|
"loss": 0.5145, |
|
"num_tokens": 442388385.0, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7980900409276944, |
|
"grad_norm": 0.2141689658164978, |
|
"learning_rate": 9.899749346428556e-06, |
|
"loss": 0.5017, |
|
"num_tokens": 446189045.0, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.8049113233287858, |
|
"grad_norm": 0.2098773866891861, |
|
"learning_rate": 9.587906021623016e-06, |
|
"loss": 0.5158, |
|
"num_tokens": 450018716.0, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8117326057298773, |
|
"grad_norm": 0.23991893231868744, |
|
"learning_rate": 9.28518593891749e-06, |
|
"loss": 0.501, |
|
"num_tokens": 453809691.0, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.8185538881309686, |
|
"grad_norm": 0.22266173362731934, |
|
"learning_rate": 8.99174328400385e-06, |
|
"loss": 0.4993, |
|
"num_tokens": 457645977.0, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.82537517053206, |
|
"grad_norm": 0.2336045503616333, |
|
"learning_rate": 8.707727517262697e-06, |
|
"loss": 0.5047, |
|
"num_tokens": 461370305.0, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.8321964529331515, |
|
"grad_norm": 0.20964659750461578, |
|
"learning_rate": 8.433283297638053e-06, |
|
"loss": 0.4989, |
|
"num_tokens": 465282670.0, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8390177353342428, |
|
"grad_norm": 0.21320775151252747, |
|
"learning_rate": 8.168550408957632e-06, |
|
"loss": 0.4967, |
|
"num_tokens": 469094631.0, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.8458390177353342, |
|
"grad_norm": 0.22750438749790192, |
|
"learning_rate": 7.91366368873613e-06, |
|
"loss": 0.4941, |
|
"num_tokens": 472868071.0, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8526603001364257, |
|
"grad_norm": 0.225652813911438, |
|
"learning_rate": 7.66875295949791e-06, |
|
"loss": 0.5101, |
|
"num_tokens": 476693164.0, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.859481582537517, |
|
"grad_norm": 0.2049553096294403, |
|
"learning_rate": 7.4339429626539e-06, |
|
"loss": 0.5098, |
|
"num_tokens": 480634992.0, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8663028649386084, |
|
"grad_norm": 0.2328362911939621, |
|
"learning_rate": 7.2093532949665715e-06, |
|
"loss": 0.5077, |
|
"num_tokens": 484452349.0, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8731241473396999, |
|
"grad_norm": 0.20552393794059753, |
|
"learning_rate": 6.995098347635173e-06, |
|
"loss": 0.493, |
|
"num_tokens": 488334317.0, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8799454297407913, |
|
"grad_norm": 0.20875284075737, |
|
"learning_rate": 6.791287248032431e-06, |
|
"loss": 0.4962, |
|
"num_tokens": 492162119.0, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.8867667121418826, |
|
"grad_norm": 0.21407166123390198, |
|
"learning_rate": 6.598023804122194e-06, |
|
"loss": 0.5035, |
|
"num_tokens": 496169094.0, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8935879945429741, |
|
"grad_norm": 0.20252534747123718, |
|
"learning_rate": 6.415406451586528e-06, |
|
"loss": 0.499, |
|
"num_tokens": 500122283.0, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.9004092769440655, |
|
"grad_norm": 0.20611464977264404, |
|
"learning_rate": 6.243528203689025e-06, |
|
"loss": 0.5028, |
|
"num_tokens": 504062446.0, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9072305593451568, |
|
"grad_norm": 0.2153824418783188, |
|
"learning_rate": 6.0824766039e-06, |
|
"loss": 0.499, |
|
"num_tokens": 507853633.0, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.9140518417462483, |
|
"grad_norm": 0.20805135369300842, |
|
"learning_rate": 5.932333681307571e-06, |
|
"loss": 0.5058, |
|
"num_tokens": 511714869.0, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.9208731241473397, |
|
"grad_norm": 0.2085290253162384, |
|
"learning_rate": 5.793175908837471e-06, |
|
"loss": 0.4964, |
|
"num_tokens": 515451192.0, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.927694406548431, |
|
"grad_norm": 0.20749785006046295, |
|
"learning_rate": 5.665074164302742e-06, |
|
"loss": 0.506, |
|
"num_tokens": 519195745.0, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.9345156889495225, |
|
"grad_norm": 0.19413329660892487, |
|
"learning_rate": 5.548093694303275e-06, |
|
"loss": 0.4915, |
|
"num_tokens": 523017567.0, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.9413369713506139, |
|
"grad_norm": 0.21901412308216095, |
|
"learning_rate": 5.442294080993446e-06, |
|
"loss": 0.5056, |
|
"num_tokens": 526884969.0, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9481582537517054, |
|
"grad_norm": 0.20552243292331696, |
|
"learning_rate": 5.347729211734919e-06, |
|
"loss": 0.503, |
|
"num_tokens": 530678785.0, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.9549795361527967, |
|
"grad_norm": 0.20113253593444824, |
|
"learning_rate": 5.264447251649954e-06, |
|
"loss": 0.5055, |
|
"num_tokens": 534460009.0, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9618008185538881, |
|
"grad_norm": 0.2043074369430542, |
|
"learning_rate": 5.192490619089267e-06, |
|
"loss": 0.4897, |
|
"num_tokens": 538338585.0, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.9686221009549796, |
|
"grad_norm": 0.2070448249578476, |
|
"learning_rate": 5.1318959640269095e-06, |
|
"loss": 0.5002, |
|
"num_tokens": 542019550.0, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.975443383356071, |
|
"grad_norm": 0.21117159724235535, |
|
"learning_rate": 5.082694149393189e-06, |
|
"loss": 0.5111, |
|
"num_tokens": 545767802.0, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.9822646657571623, |
|
"grad_norm": 0.20505015552043915, |
|
"learning_rate": 5.044910235355121e-06, |
|
"loss": 0.497, |
|
"num_tokens": 549561878.0, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9890859481582538, |
|
"grad_norm": 0.19267314672470093, |
|
"learning_rate": 5.0185634665524255e-06, |
|
"loss": 0.493, |
|
"num_tokens": 553402412.0, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.9959072305593452, |
|
"grad_norm": 0.18925239145755768, |
|
"learning_rate": 5.003667262295572e-06, |
|
"loss": 0.5012, |
|
"num_tokens": 557174057.0, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"num_tokens": 559480552.0, |
|
"step": 733, |
|
"total_flos": 1.0371853292601868e+19, |
|
"train_loss": 0.5346580615310357, |
|
"train_runtime": 5110.5186, |
|
"train_samples_per_second": 18.341, |
|
"train_steps_per_second": 0.143 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 733, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0371853292601868e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|