| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 733, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0068212824010914054, | |
| "grad_norm": 1.5700784921646118, | |
| "learning_rate": 5.405405405405406e-06, | |
| "loss": 0.8612, | |
| "num_tokens": 3753458.0, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.013642564802182811, | |
| "grad_norm": 1.541254997253418, | |
| "learning_rate": 1.2162162162162164e-05, | |
| "loss": 0.813, | |
| "num_tokens": 7657459.0, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.020463847203274217, | |
| "grad_norm": 0.8007029294967651, | |
| "learning_rate": 1.891891891891892e-05, | |
| "loss": 0.7308, | |
| "num_tokens": 11351827.0, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.027285129604365622, | |
| "grad_norm": 0.5904110074043274, | |
| "learning_rate": 2.5675675675675675e-05, | |
| "loss": 0.6861, | |
| "num_tokens": 15095356.0, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.034106412005457026, | |
| "grad_norm": 0.5023650527000427, | |
| "learning_rate": 3.2432432432432436e-05, | |
| "loss": 0.6616, | |
| "num_tokens": 18878489.0, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.040927694406548434, | |
| "grad_norm": 0.4876738488674164, | |
| "learning_rate": 3.918918918918919e-05, | |
| "loss": 0.6376, | |
| "num_tokens": 22607726.0, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.047748976807639835, | |
| "grad_norm": 0.36882731318473816, | |
| "learning_rate": 4.594594594594595e-05, | |
| "loss": 0.6317, | |
| "num_tokens": 26596975.0, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.054570259208731244, | |
| "grad_norm": 0.4269203245639801, | |
| "learning_rate": 4.999908316574644e-05, | |
| "loss": 0.5998, | |
| "num_tokens": 30372607.0, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.061391541609822645, | |
| "grad_norm": 0.39412692189216614, | |
| "learning_rate": 4.998876963847189e-05, | |
| "loss": 0.6009, | |
| "num_tokens": 34180321.0, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06821282401091405, | |
| "grad_norm": 0.4157862067222595, | |
| "learning_rate": 4.996700181165029e-05, | |
| "loss": 0.6006, | |
| "num_tokens": 37904755.0, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07503410641200546, | |
| "grad_norm": 0.5112420916557312, | |
| "learning_rate": 4.993379077238036e-05, | |
| "loss": 0.6012, | |
| "num_tokens": 41764548.0, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08185538881309687, | |
| "grad_norm": 0.4662182927131653, | |
| "learning_rate": 4.9889153436180295e-05, | |
| "loss": 0.5892, | |
| "num_tokens": 45475394.0, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08867667121418826, | |
| "grad_norm": 0.47541704773902893, | |
| "learning_rate": 4.983311253837213e-05, | |
| "loss": 0.5928, | |
| "num_tokens": 49295807.0, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.09549795361527967, | |
| "grad_norm": 0.4185464084148407, | |
| "learning_rate": 4.9765696622501846e-05, | |
| "loss": 0.5823, | |
| "num_tokens": 52931516.0, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.10231923601637108, | |
| "grad_norm": 0.4303089380264282, | |
| "learning_rate": 4.968694002580118e-05, | |
| "loss": 0.5842, | |
| "num_tokens": 56824861.0, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.10914051841746249, | |
| "grad_norm": 0.4935140013694763, | |
| "learning_rate": 4.959688286169851e-05, | |
| "loss": 0.5678, | |
| "num_tokens": 60559854.0, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11596180081855388, | |
| "grad_norm": 0.4861179292201996, | |
| "learning_rate": 4.9495570999387685e-05, | |
| "loss": 0.5616, | |
| "num_tokens": 64468301.0, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.12278308321964529, | |
| "grad_norm": 0.5267655849456787, | |
| "learning_rate": 4.9383056040465276e-05, | |
| "loss": 0.5797, | |
| "num_tokens": 68324862.0, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1296043656207367, | |
| "grad_norm": 0.38909223675727844, | |
| "learning_rate": 4.925939529264815e-05, | |
| "loss": 0.5749, | |
| "num_tokens": 72145111.0, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1364256480218281, | |
| "grad_norm": 0.40435904264450073, | |
| "learning_rate": 4.9124651740584684e-05, | |
| "loss": 0.5612, | |
| "num_tokens": 76047530.0, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1432469304229195, | |
| "grad_norm": 0.44939741492271423, | |
| "learning_rate": 4.897889401377447e-05, | |
| "loss": 0.5631, | |
| "num_tokens": 80034081.0, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.15006821282401092, | |
| "grad_norm": 0.373078316450119, | |
| "learning_rate": 4.882219635161306e-05, | |
| "loss": 0.5669, | |
| "num_tokens": 83777176.0, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.15688949522510232, | |
| "grad_norm": 0.49139291048049927, | |
| "learning_rate": 4.865463856557922e-05, | |
| "loss": 0.5657, | |
| "num_tokens": 87560813.0, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.16371077762619374, | |
| "grad_norm": 0.4161722958087921, | |
| "learning_rate": 4.847630599858426e-05, | |
| "loss": 0.5548, | |
| "num_tokens": 91406508.0, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.17053206002728513, | |
| "grad_norm": 0.34772634506225586, | |
| "learning_rate": 4.8287289481503954e-05, | |
| "loss": 0.5617, | |
| "num_tokens": 95296652.0, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.17735334242837653, | |
| "grad_norm": 0.40422406792640686, | |
| "learning_rate": 4.8087685286915276e-05, | |
| "loss": 0.557, | |
| "num_tokens": 99236540.0, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.18417462482946795, | |
| "grad_norm": 0.38424786925315857, | |
| "learning_rate": 4.787759508006147e-05, | |
| "loss": 0.5566, | |
| "num_tokens": 103070715.0, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.19099590723055934, | |
| "grad_norm": 0.47076845169067383, | |
| "learning_rate": 4.765712586707048e-05, | |
| "loss": 0.5696, | |
| "num_tokens": 106743213.0, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.19781718963165076, | |
| "grad_norm": 0.48413094878196716, | |
| "learning_rate": 4.7426389940453065e-05, | |
| "loss": 0.542, | |
| "num_tokens": 110676628.0, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.20463847203274216, | |
| "grad_norm": 0.46264445781707764, | |
| "learning_rate": 4.718550482190837e-05, | |
| "loss": 0.558, | |
| "num_tokens": 114351641.0, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.21145975443383355, | |
| "grad_norm": 0.39284056425094604, | |
| "learning_rate": 4.6934593202466127e-05, | |
| "loss": 0.5426, | |
| "num_tokens": 118270253.0, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.21828103683492497, | |
| "grad_norm": 0.4010249674320221, | |
| "learning_rate": 4.6673782879995896e-05, | |
| "loss": 0.5514, | |
| "num_tokens": 122130553.0, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.22510231923601637, | |
| "grad_norm": 0.38185256719589233, | |
| "learning_rate": 4.640320669411526e-05, | |
| "loss": 0.5541, | |
| "num_tokens": 125907714.0, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.23192360163710776, | |
| "grad_norm": 0.36932316422462463, | |
| "learning_rate": 4.612300245853004e-05, | |
| "loss": 0.5474, | |
| "num_tokens": 129778594.0, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.23874488403819918, | |
| "grad_norm": 0.4994874894618988, | |
| "learning_rate": 4.5833312890841085e-05, | |
| "loss": 0.5622, | |
| "num_tokens": 133567859.0, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.24556616643929058, | |
| "grad_norm": 0.4484744369983673, | |
| "learning_rate": 4.553428553985329e-05, | |
| "loss": 0.5418, | |
| "num_tokens": 137318470.0, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.252387448840382, | |
| "grad_norm": 0.402942031621933, | |
| "learning_rate": 4.522607271042399e-05, | |
| "loss": 0.5368, | |
| "num_tokens": 140986549.0, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.2592087312414734, | |
| "grad_norm": 0.5010215044021606, | |
| "learning_rate": 4.490883138588882e-05, | |
| "loss": 0.5482, | |
| "num_tokens": 144921526.0, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2660300136425648, | |
| "grad_norm": 0.39415615797042847, | |
| "learning_rate": 4.458272314810479e-05, | |
| "loss": 0.536, | |
| "num_tokens": 148719256.0, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.2728512960436562, | |
| "grad_norm": 0.386960506439209, | |
| "learning_rate": 4.4247914095151086e-05, | |
| "loss": 0.546, | |
| "num_tokens": 152583124.0, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.27967257844474763, | |
| "grad_norm": 0.4125616252422333, | |
| "learning_rate": 4.390457475672966e-05, | |
| "loss": 0.5395, | |
| "num_tokens": 156451358.0, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.286493860845839, | |
| "grad_norm": 0.3309045732021332, | |
| "learning_rate": 4.35528800073086e-05, | |
| "loss": 0.541, | |
| "num_tokens": 160195441.0, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2933151432469304, | |
| "grad_norm": 0.3817555010318756, | |
| "learning_rate": 4.31930089770526e-05, | |
| "loss": 0.5443, | |
| "num_tokens": 164130734.0, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.30013642564802184, | |
| "grad_norm": 0.39029499888420105, | |
| "learning_rate": 4.282514496058582e-05, | |
| "loss": 0.5238, | |
| "num_tokens": 167974083.0, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3069577080491132, | |
| "grad_norm": 0.33578798174858093, | |
| "learning_rate": 4.24494753236337e-05, | |
| "loss": 0.5365, | |
| "num_tokens": 171877159.0, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.31377899045020463, | |
| "grad_norm": 0.3513656258583069, | |
| "learning_rate": 4.2066191407591125e-05, | |
| "loss": 0.5321, | |
| "num_tokens": 175825874.0, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.32060027285129605, | |
| "grad_norm": 0.3408605456352234, | |
| "learning_rate": 4.1675488432065785e-05, | |
| "loss": 0.5244, | |
| "num_tokens": 179651504.0, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.3274215552523875, | |
| "grad_norm": 0.30909132957458496, | |
| "learning_rate": 4.127756539544609e-05, | |
| "loss": 0.537, | |
| "num_tokens": 183474332.0, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.33424283765347884, | |
| "grad_norm": 0.4393291175365448, | |
| "learning_rate": 4.087262497354452e-05, | |
| "loss": 0.5456, | |
| "num_tokens": 187421939.0, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.34106412005457026, | |
| "grad_norm": 0.37006404995918274, | |
| "learning_rate": 4.046087341636789e-05, | |
| "loss": 0.528, | |
| "num_tokens": 191229072.0, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3478854024556617, | |
| "grad_norm": 0.32528236508369446, | |
| "learning_rate": 4.0042520443067176e-05, | |
| "loss": 0.5293, | |
| "num_tokens": 195078991.0, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.35470668485675305, | |
| "grad_norm": 0.339620977640152, | |
| "learning_rate": 3.961777913512035e-05, | |
| "loss": 0.5183, | |
| "num_tokens": 198920961.0, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3615279672578445, | |
| "grad_norm": 0.32565462589263916, | |
| "learning_rate": 3.9186865827802724e-05, | |
| "loss": 0.5379, | |
| "num_tokens": 202602950.0, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.3683492496589359, | |
| "grad_norm": 0.3410382866859436, | |
| "learning_rate": 3.875e-05, | |
| "loss": 0.5267, | |
| "num_tokens": 206455454.0, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.37517053206002726, | |
| "grad_norm": 0.30947548151016235, | |
| "learning_rate": 3.830740416242014e-05, | |
| "loss": 0.5225, | |
| "num_tokens": 210274230.0, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.3819918144611187, | |
| "grad_norm": 0.3249005079269409, | |
| "learning_rate": 3.7859303744261064e-05, | |
| "loss": 0.5284, | |
| "num_tokens": 213944648.0, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3888130968622101, | |
| "grad_norm": 0.3326890766620636, | |
| "learning_rate": 3.740592697839185e-05, | |
| "loss": 0.5331, | |
| "num_tokens": 217821264.0, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.3956343792633015, | |
| "grad_norm": 0.30306893587112427, | |
| "learning_rate": 3.694750478510596e-05, | |
| "loss": 0.5286, | |
| "num_tokens": 221728874.0, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.4024556616643929, | |
| "grad_norm": 0.2941177189350128, | |
| "learning_rate": 3.648427065450555e-05, | |
| "loss": 0.5198, | |
| "num_tokens": 225494536.0, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.4092769440654843, | |
| "grad_norm": 0.30446290969848633, | |
| "learning_rate": 3.601646052757707e-05, | |
| "loss": 0.519, | |
| "num_tokens": 229370762.0, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.41609822646657574, | |
| "grad_norm": 0.33618056774139404, | |
| "learning_rate": 3.55443126760184e-05, | |
| "loss": 0.5345, | |
| "num_tokens": 233272121.0, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4229195088676671, | |
| "grad_norm": 0.39119410514831543, | |
| "learning_rate": 3.506806758087894e-05, | |
| "loss": 0.5321, | |
| "num_tokens": 237043352.0, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.4297407912687585, | |
| "grad_norm": 0.36198490858078003, | |
| "learning_rate": 3.458796781007437e-05, | |
| "loss": 0.5268, | |
| "num_tokens": 240757499.0, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.43656207366984995, | |
| "grad_norm": 0.3167111277580261, | |
| "learning_rate": 3.410425789483854e-05, | |
| "loss": 0.5271, | |
| "num_tokens": 244605555.0, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4433833560709413, | |
| "grad_norm": 0.3152565360069275, | |
| "learning_rate": 3.3617184205175304e-05, | |
| "loss": 0.5336, | |
| "num_tokens": 248382993.0, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.45020463847203274, | |
| "grad_norm": 0.3747156262397766, | |
| "learning_rate": 3.312699482437392e-05, | |
| "loss": 0.5208, | |
| "num_tokens": 252233466.0, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.45702592087312416, | |
| "grad_norm": 0.36773014068603516, | |
| "learning_rate": 3.263393942265168e-05, | |
| "loss": 0.5275, | |
| "num_tokens": 256038440.0, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.4638472032742155, | |
| "grad_norm": 0.2980627715587616, | |
| "learning_rate": 3.213826912998838e-05, | |
| "loss": 0.5199, | |
| "num_tokens": 260071254.0, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.47066848567530695, | |
| "grad_norm": 0.31149786710739136, | |
| "learning_rate": 3.164023640821719e-05, | |
| "loss": 0.5132, | |
| "num_tokens": 263897078.0, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.47748976807639837, | |
| "grad_norm": 0.30857953429222107, | |
| "learning_rate": 3.114009492243721e-05, | |
| "loss": 0.5217, | |
| "num_tokens": 267702266.0, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4843110504774898, | |
| "grad_norm": 0.303462952375412, | |
| "learning_rate": 3.063809941181321e-05, | |
| "loss": 0.5313, | |
| "num_tokens": 271571889.0, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.49113233287858116, | |
| "grad_norm": 0.33175215125083923, | |
| "learning_rate": 3.0134505559828203e-05, | |
| "loss": 0.535, | |
| "num_tokens": 275444208.0, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4979536152796726, | |
| "grad_norm": 0.31040850281715393, | |
| "learning_rate": 2.9629569864055125e-05, | |
| "loss": 0.513, | |
| "num_tokens": 279090959.0, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.504774897680764, | |
| "grad_norm": 0.3118685483932495, | |
| "learning_rate": 2.9123549505513868e-05, | |
| "loss": 0.515, | |
| "num_tokens": 283042351.0, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5115961800818554, | |
| "grad_norm": 0.28737205266952515, | |
| "learning_rate": 2.8616702217680134e-05, | |
| "loss": 0.523, | |
| "num_tokens": 286947035.0, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5184174624829468, | |
| "grad_norm": 0.32293662428855896, | |
| "learning_rate": 2.810928615521303e-05, | |
| "loss": 0.5097, | |
| "num_tokens": 290627149.0, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5252387448840382, | |
| "grad_norm": 0.30941659212112427, | |
| "learning_rate": 2.7601559762468022e-05, | |
| "loss": 0.5189, | |
| "num_tokens": 294445695.0, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5320600272851296, | |
| "grad_norm": 0.3308981955051422, | |
| "learning_rate": 2.7093781641862387e-05, | |
| "loss": 0.5215, | |
| "num_tokens": 298235939.0, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.538881309686221, | |
| "grad_norm": 0.3184700608253479, | |
| "learning_rate": 2.658621042216021e-05, | |
| "loss": 0.5057, | |
| "num_tokens": 301940350.0, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.5457025920873124, | |
| "grad_norm": 2.0989575386047363, | |
| "learning_rate": 2.6079104626743845e-05, | |
| "loss": 0.5267, | |
| "num_tokens": 305677252.0, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5525238744884038, | |
| "grad_norm": 0.37106770277023315, | |
| "learning_rate": 2.5572722541939113e-05, | |
| "loss": 0.5256, | |
| "num_tokens": 309492997.0, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.5593451568894953, | |
| "grad_norm": 0.3112622797489166, | |
| "learning_rate": 2.5067322085461315e-05, | |
| "loss": 0.5105, | |
| "num_tokens": 313260754.0, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5661664392905866, | |
| "grad_norm": 0.4030674695968628, | |
| "learning_rate": 2.4563160675048846e-05, | |
| "loss": 0.5157, | |
| "num_tokens": 317054734.0, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.572987721691678, | |
| "grad_norm": 0.3257318139076233, | |
| "learning_rate": 2.406049509735156e-05, | |
| "loss": 0.5153, | |
| "num_tokens": 320934035.0, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5798090040927695, | |
| "grad_norm": 0.3117997646331787, | |
| "learning_rate": 2.355958137714056e-05, | |
| "loss": 0.5108, | |
| "num_tokens": 324620533.0, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.5866302864938608, | |
| "grad_norm": 0.3129447400569916, | |
| "learning_rate": 2.3060674646906004e-05, | |
| "loss": 0.5155, | |
| "num_tokens": 328343683.0, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5934515688949522, | |
| "grad_norm": 0.3214550316333771, | |
| "learning_rate": 2.2564029016909416e-05, | |
| "loss": 0.5048, | |
| "num_tokens": 332273995.0, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6002728512960437, | |
| "grad_norm": 0.2830764055252075, | |
| "learning_rate": 2.2069897445756627e-05, | |
| "loss": 0.5028, | |
| "num_tokens": 336096910.0, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.607094133697135, | |
| "grad_norm": 0.2954827547073364, | |
| "learning_rate": 2.1578531611557322e-05, | |
| "loss": 0.5159, | |
| "num_tokens": 339854518.0, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.6139154160982264, | |
| "grad_norm": 0.25289270281791687, | |
| "learning_rate": 2.109018178373675e-05, | |
| "loss": 0.5146, | |
| "num_tokens": 343729017.0, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6207366984993179, | |
| "grad_norm": 0.2954281270503998, | |
| "learning_rate": 2.0605096695564973e-05, | |
| "loss": 0.5182, | |
| "num_tokens": 347564883.0, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.6275579809004093, | |
| "grad_norm": 0.264874666929245, | |
| "learning_rate": 2.0123523417468466e-05, | |
| "loss": 0.5115, | |
| "num_tokens": 351296309.0, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6343792633015006, | |
| "grad_norm": 0.2828667461872101, | |
| "learning_rate": 1.9645707231188742e-05, | |
| "loss": 0.5058, | |
| "num_tokens": 355112109.0, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.6412005457025921, | |
| "grad_norm": 0.2687091529369354, | |
| "learning_rate": 1.9171891504851925e-05, | |
| "loss": 0.5245, | |
| "num_tokens": 358904864.0, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6480218281036835, | |
| "grad_norm": 0.2940763533115387, | |
| "learning_rate": 1.8702317569013094e-05, | |
| "loss": 0.5004, | |
| "num_tokens": 362651560.0, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.654843110504775, | |
| "grad_norm": 0.2619578540325165, | |
| "learning_rate": 1.8237224593738327e-05, | |
| "loss": 0.5027, | |
| "num_tokens": 366319152.0, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6616643929058663, | |
| "grad_norm": 0.2464180737733841, | |
| "learning_rate": 1.7776849466787223e-05, | |
| "loss": 0.517, | |
| "num_tokens": 370176088.0, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.6684856753069577, | |
| "grad_norm": 0.2550261616706848, | |
| "learning_rate": 1.7321426672957896e-05, | |
| "loss": 0.5025, | |
| "num_tokens": 374011109.0, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6753069577080492, | |
| "grad_norm": 0.2865135073661804, | |
| "learning_rate": 1.6871188174655787e-05, | |
| "loss": 0.4956, | |
| "num_tokens": 377769452.0, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.6821282401091405, | |
| "grad_norm": 0.25042206048965454, | |
| "learning_rate": 1.6426363293747334e-05, | |
| "loss": 0.5, | |
| "num_tokens": 381536770.0, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6889495225102319, | |
| "grad_norm": 0.26310259103775024, | |
| "learning_rate": 1.598717859475846e-05, | |
| "loss": 0.5086, | |
| "num_tokens": 385264947.0, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.6957708049113234, | |
| "grad_norm": 0.26373496651649475, | |
| "learning_rate": 1.5553857769477553e-05, | |
| "loss": 0.5055, | |
| "num_tokens": 389008667.0, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7025920873124147, | |
| "grad_norm": 0.2504028379917145, | |
| "learning_rate": 1.5126621523021518e-05, | |
| "loss": 0.51, | |
| "num_tokens": 392845122.0, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.7094133697135061, | |
| "grad_norm": 0.3016691505908966, | |
| "learning_rate": 1.4705687461423209e-05, | |
| "loss": 0.5223, | |
| "num_tokens": 396569410.0, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.7162346521145976, | |
| "grad_norm": 0.26355814933776855, | |
| "learning_rate": 1.4291269980797139e-05, | |
| "loss": 0.5065, | |
| "num_tokens": 400328978.0, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.723055934515689, | |
| "grad_norm": 0.258340448141098, | |
| "learning_rate": 1.3883580158140291e-05, | |
| "loss": 0.5004, | |
| "num_tokens": 404085025.0, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7298772169167803, | |
| "grad_norm": 0.23762258887290955, | |
| "learning_rate": 1.3482825643823293e-05, | |
| "loss": 0.5061, | |
| "num_tokens": 407976499.0, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.7366984993178718, | |
| "grad_norm": 0.24250030517578125, | |
| "learning_rate": 1.3089210555827086e-05, | |
| "loss": 0.512, | |
| "num_tokens": 411774021.0, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7435197817189632, | |
| "grad_norm": 0.23150943219661713, | |
| "learning_rate": 1.270293537577855e-05, | |
| "loss": 0.5147, | |
| "num_tokens": 415754417.0, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.7503410641200545, | |
| "grad_norm": 0.24009953439235687, | |
| "learning_rate": 1.232419684683844e-05, | |
| "loss": 0.4995, | |
| "num_tokens": 419428701.0, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.757162346521146, | |
| "grad_norm": 0.24892768263816833, | |
| "learning_rate": 1.1953187873493303e-05, | |
| "loss": 0.5001, | |
| "num_tokens": 423056747.0, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.7639836289222374, | |
| "grad_norm": 0.2480500489473343, | |
| "learning_rate": 1.1590097423302684e-05, | |
| "loss": 0.496, | |
| "num_tokens": 426771236.0, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7708049113233287, | |
| "grad_norm": 0.23765107989311218, | |
| "learning_rate": 1.1235110430651421e-05, | |
| "loss": 0.4959, | |
| "num_tokens": 430648058.0, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.7776261937244202, | |
| "grad_norm": 0.25531235337257385, | |
| "learning_rate": 1.0888407702556284e-05, | |
| "loss": 0.4998, | |
| "num_tokens": 434432023.0, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7844474761255116, | |
| "grad_norm": 0.262312114238739, | |
| "learning_rate": 1.0550165826574766e-05, | |
| "loss": 0.4995, | |
| "num_tokens": 438355231.0, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.791268758526603, | |
| "grad_norm": 0.25291720032691956, | |
| "learning_rate": 1.0220557080862985e-05, | |
| "loss": 0.5148, | |
| "num_tokens": 442388385.0, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7980900409276944, | |
| "grad_norm": 0.22738206386566162, | |
| "learning_rate": 9.899749346428556e-06, | |
| "loss": 0.502, | |
| "num_tokens": 446189045.0, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.8049113233287858, | |
| "grad_norm": 0.2262069582939148, | |
| "learning_rate": 9.587906021623016e-06, | |
| "loss": 0.5161, | |
| "num_tokens": 450018716.0, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.8117326057298773, | |
| "grad_norm": 0.2656482458114624, | |
| "learning_rate": 9.28518593891749e-06, | |
| "loss": 0.5013, | |
| "num_tokens": 453809691.0, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.8185538881309686, | |
| "grad_norm": 0.2433101087808609, | |
| "learning_rate": 8.99174328400385e-06, | |
| "loss": 0.4996, | |
| "num_tokens": 457645977.0, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.82537517053206, | |
| "grad_norm": 0.25572413206100464, | |
| "learning_rate": 8.707727517262697e-06, | |
| "loss": 0.5049, | |
| "num_tokens": 461370305.0, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.8321964529331515, | |
| "grad_norm": 0.23688948154449463, | |
| "learning_rate": 8.433283297638053e-06, | |
| "loss": 0.4992, | |
| "num_tokens": 465282670.0, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.8390177353342428, | |
| "grad_norm": 0.23922283947467804, | |
| "learning_rate": 8.168550408957632e-06, | |
| "loss": 0.4969, | |
| "num_tokens": 469094631.0, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.8458390177353342, | |
| "grad_norm": 0.25452542304992676, | |
| "learning_rate": 7.91366368873613e-06, | |
| "loss": 0.4944, | |
| "num_tokens": 472868071.0, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.8526603001364257, | |
| "grad_norm": 0.24521248042583466, | |
| "learning_rate": 7.66875295949791e-06, | |
| "loss": 0.5104, | |
| "num_tokens": 476693164.0, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.859481582537517, | |
| "grad_norm": 0.2180357724428177, | |
| "learning_rate": 7.4339429626539e-06, | |
| "loss": 0.5101, | |
| "num_tokens": 480634992.0, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.8663028649386084, | |
| "grad_norm": 0.25706538558006287, | |
| "learning_rate": 7.2093532949665715e-06, | |
| "loss": 0.508, | |
| "num_tokens": 484452349.0, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.8731241473396999, | |
| "grad_norm": 0.22597377002239227, | |
| "learning_rate": 6.995098347635173e-06, | |
| "loss": 0.4933, | |
| "num_tokens": 488334317.0, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8799454297407913, | |
| "grad_norm": 0.23853003978729248, | |
| "learning_rate": 6.791287248032431e-06, | |
| "loss": 0.4965, | |
| "num_tokens": 492162119.0, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.8867667121418826, | |
| "grad_norm": 0.2505284547805786, | |
| "learning_rate": 6.598023804122194e-06, | |
| "loss": 0.5037, | |
| "num_tokens": 496169094.0, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8935879945429741, | |
| "grad_norm": 0.22088280320167542, | |
| "learning_rate": 6.415406451586528e-06, | |
| "loss": 0.4993, | |
| "num_tokens": 500122283.0, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.9004092769440655, | |
| "grad_norm": 0.21783269941806793, | |
| "learning_rate": 6.243528203689025e-06, | |
| "loss": 0.5031, | |
| "num_tokens": 504062446.0, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.9072305593451568, | |
| "grad_norm": 0.22754983603954315, | |
| "learning_rate": 6.0824766039e-06, | |
| "loss": 0.4993, | |
| "num_tokens": 507853633.0, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.9140518417462483, | |
| "grad_norm": 0.21411247551441193, | |
| "learning_rate": 5.932333681307571e-06, | |
| "loss": 0.5061, | |
| "num_tokens": 511714869.0, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.9208731241473397, | |
| "grad_norm": 0.21759825944900513, | |
| "learning_rate": 5.793175908837471e-06, | |
| "loss": 0.4966, | |
| "num_tokens": 515451192.0, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.927694406548431, | |
| "grad_norm": 0.21772339940071106, | |
| "learning_rate": 5.665074164302742e-06, | |
| "loss": 0.5063, | |
| "num_tokens": 519195745.0, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.9345156889495225, | |
| "grad_norm": 0.20628990232944489, | |
| "learning_rate": 5.548093694303275e-06, | |
| "loss": 0.4918, | |
| "num_tokens": 523017567.0, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.9413369713506139, | |
| "grad_norm": 0.23247572779655457, | |
| "learning_rate": 5.442294080993446e-06, | |
| "loss": 0.5059, | |
| "num_tokens": 526884969.0, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.9481582537517054, | |
| "grad_norm": 0.21672718226909637, | |
| "learning_rate": 5.347729211734919e-06, | |
| "loss": 0.5033, | |
| "num_tokens": 530678785.0, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.9549795361527967, | |
| "grad_norm": 0.21295787394046783, | |
| "learning_rate": 5.264447251649954e-06, | |
| "loss": 0.5057, | |
| "num_tokens": 534460009.0, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.9618008185538881, | |
| "grad_norm": 0.21965418756008148, | |
| "learning_rate": 5.192490619089267e-06, | |
| "loss": 0.4899, | |
| "num_tokens": 538338585.0, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.9686221009549796, | |
| "grad_norm": 0.21933406591415405, | |
| "learning_rate": 5.1318959640269095e-06, | |
| "loss": 0.5005, | |
| "num_tokens": 542019550.0, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.975443383356071, | |
| "grad_norm": 0.2290937751531601, | |
| "learning_rate": 5.082694149393189e-06, | |
| "loss": 0.5113, | |
| "num_tokens": 545767802.0, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.9822646657571623, | |
| "grad_norm": 0.23024672269821167, | |
| "learning_rate": 5.044910235355121e-06, | |
| "loss": 0.4973, | |
| "num_tokens": 549561878.0, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.9890859481582538, | |
| "grad_norm": 0.20155958831310272, | |
| "learning_rate": 5.0185634665524255e-06, | |
| "loss": 0.4933, | |
| "num_tokens": 553402412.0, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.9959072305593452, | |
| "grad_norm": 0.20187729597091675, | |
| "learning_rate": 5.003667262295572e-06, | |
| "loss": 0.5014, | |
| "num_tokens": 557174057.0, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "num_tokens": 559480552.0, | |
| "step": 733, | |
| "total_flos": 1.0371853292601868e+19, | |
| "train_loss": 0.5348884052664327, | |
| "train_runtime": 11645.0887, | |
| "train_samples_per_second": 8.049, | |
| "train_steps_per_second": 0.063 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 733, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0371853292601868e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |