| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9992229992229992, | |
| "eval_steps": 500, | |
| "global_step": 643, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.001554001554001554, | |
| "grad_norm": 1.287060567597129, | |
| "learning_rate": 1e-05, | |
| "loss": 0.7171, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.003108003108003108, | |
| "grad_norm": 1.290398939330793, | |
| "learning_rate": 9.999940321631158e-06, | |
| "loss": 0.6721, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004662004662004662, | |
| "grad_norm": 0.9929109717377168, | |
| "learning_rate": 9.999761287949237e-06, | |
| "loss": 0.4993, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.006216006216006216, | |
| "grad_norm": 0.9031927531720302, | |
| "learning_rate": 9.99946290322801e-06, | |
| "loss": 0.7566, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.00777000777000777, | |
| "grad_norm": 0.8214431188948244, | |
| "learning_rate": 9.999045174590324e-06, | |
| "loss": 0.6818, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009324009324009324, | |
| "grad_norm": 0.9593999539651701, | |
| "learning_rate": 9.998508112007925e-06, | |
| "loss": 0.6547, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.010878010878010878, | |
| "grad_norm": 1.1099888789676076, | |
| "learning_rate": 9.997851728301219e-06, | |
| "loss": 0.6804, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.012432012432012432, | |
| "grad_norm": 0.8564192310514048, | |
| "learning_rate": 9.99707603913897e-06, | |
| "loss": 0.5633, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.013986013986013986, | |
| "grad_norm": 1.063236098586671, | |
| "learning_rate": 9.996181063037924e-06, | |
| "loss": 0.6966, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01554001554001554, | |
| "grad_norm": 0.9541051202078349, | |
| "learning_rate": 9.995166821362368e-06, | |
| "loss": 0.7143, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.017094017094017096, | |
| "grad_norm": 0.9386326913828528, | |
| "learning_rate": 9.994033338323612e-06, | |
| "loss": 0.6527, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.018648018648018648, | |
| "grad_norm": 0.9850267342487482, | |
| "learning_rate": 9.99278064097943e-06, | |
| "loss": 0.5105, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.020202020202020204, | |
| "grad_norm": 0.9003120220786494, | |
| "learning_rate": 9.991408759233394e-06, | |
| "loss": 0.6594, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.021756021756021756, | |
| "grad_norm": 0.9818892990706043, | |
| "learning_rate": 9.989917725834166e-06, | |
| "loss": 0.6786, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.023310023310023312, | |
| "grad_norm": 1.3235201527859266, | |
| "learning_rate": 9.988307576374727e-06, | |
| "loss": 0.6224, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.024864024864024864, | |
| "grad_norm": 0.9490358455901691, | |
| "learning_rate": 9.986578349291514e-06, | |
| "loss": 0.7105, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02641802641802642, | |
| "grad_norm": 1.0959823860120133, | |
| "learning_rate": 9.984730085863504e-06, | |
| "loss": 0.5076, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.027972027972027972, | |
| "grad_norm": 1.1760066598209744, | |
| "learning_rate": 9.982762830211239e-06, | |
| "loss": 0.5839, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.029526029526029528, | |
| "grad_norm": 0.9624079621780423, | |
| "learning_rate": 9.980676629295763e-06, | |
| "loss": 0.5875, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.03108003108003108, | |
| "grad_norm": 1.1138209649690243, | |
| "learning_rate": 9.9784715329175e-06, | |
| "loss": 0.5983, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03263403263403263, | |
| "grad_norm": 0.7824885823731469, | |
| "learning_rate": 9.976147593715074e-06, | |
| "loss": 0.5242, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03418803418803419, | |
| "grad_norm": 1.0508935007482303, | |
| "learning_rate": 9.973704867164044e-06, | |
| "loss": 0.6223, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.035742035742035744, | |
| "grad_norm": 0.8235018550902833, | |
| "learning_rate": 9.971143411575585e-06, | |
| "loss": 0.4938, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.037296037296037296, | |
| "grad_norm": 1.2898619091079182, | |
| "learning_rate": 9.968463288095096e-06, | |
| "loss": 0.4851, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03885003885003885, | |
| "grad_norm": 0.9265755568894043, | |
| "learning_rate": 9.965664560700734e-06, | |
| "loss": 0.6658, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.04040404040404041, | |
| "grad_norm": 1.33414254076587, | |
| "learning_rate": 9.962747296201891e-06, | |
| "loss": 0.5924, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.04195804195804196, | |
| "grad_norm": 0.762599048893836, | |
| "learning_rate": 9.959711564237603e-06, | |
| "loss": 0.5684, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.04351204351204351, | |
| "grad_norm": 1.0642367716149972, | |
| "learning_rate": 9.956557437274887e-06, | |
| "loss": 0.8413, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.045066045066045064, | |
| "grad_norm": 0.9613084594171063, | |
| "learning_rate": 9.953284990607e-06, | |
| "loss": 0.6419, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.046620046620046623, | |
| "grad_norm": 1.0408055633101505, | |
| "learning_rate": 9.949894302351653e-06, | |
| "loss": 0.7669, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.048174048174048176, | |
| "grad_norm": 1.129431352855463, | |
| "learning_rate": 9.946385453449145e-06, | |
| "loss": 0.65, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.04972804972804973, | |
| "grad_norm": 1.040149278900537, | |
| "learning_rate": 9.942758527660429e-06, | |
| "loss": 0.5691, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.05128205128205128, | |
| "grad_norm": 1.185572202243125, | |
| "learning_rate": 9.93901361156511e-06, | |
| "loss": 0.671, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.05283605283605284, | |
| "grad_norm": 1.279058377235717, | |
| "learning_rate": 9.935150794559379e-06, | |
| "loss": 0.7019, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.05439005439005439, | |
| "grad_norm": 0.865984848234437, | |
| "learning_rate": 9.931170168853886e-06, | |
| "loss": 0.6164, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.055944055944055944, | |
| "grad_norm": 1.8291545977352945, | |
| "learning_rate": 9.927071829471531e-06, | |
| "loss": 0.5272, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.057498057498057496, | |
| "grad_norm": 0.8851885541597515, | |
| "learning_rate": 9.922855874245197e-06, | |
| "loss": 0.6185, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.059052059052059055, | |
| "grad_norm": 0.9256948505546105, | |
| "learning_rate": 9.918522403815414e-06, | |
| "loss": 0.5428, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.06060606060606061, | |
| "grad_norm": 0.9865980759954314, | |
| "learning_rate": 9.914071521627964e-06, | |
| "loss": 0.6704, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.06216006216006216, | |
| "grad_norm": 1.0375856097276237, | |
| "learning_rate": 9.909503333931402e-06, | |
| "loss": 0.562, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06371406371406371, | |
| "grad_norm": 1.0277215273777662, | |
| "learning_rate": 9.904817949774524e-06, | |
| "loss": 0.5346, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.06526806526806526, | |
| "grad_norm": 1.0255486122101665, | |
| "learning_rate": 9.900015481003762e-06, | |
| "loss": 0.5937, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.06682206682206682, | |
| "grad_norm": 0.9991894136698592, | |
| "learning_rate": 9.895096042260517e-06, | |
| "loss": 0.6547, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.06837606837606838, | |
| "grad_norm": 0.9734302555719793, | |
| "learning_rate": 9.890059750978425e-06, | |
| "loss": 0.64, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06993006993006994, | |
| "grad_norm": 1.0702916548112473, | |
| "learning_rate": 9.88490672738054e-06, | |
| "loss": 0.7681, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.07148407148407149, | |
| "grad_norm": 0.7279123663050403, | |
| "learning_rate": 9.879637094476482e-06, | |
| "loss": 0.5674, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.07303807303807304, | |
| "grad_norm": 1.0078086114529654, | |
| "learning_rate": 9.874250978059489e-06, | |
| "loss": 0.5571, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.07459207459207459, | |
| "grad_norm": 0.8826041722896284, | |
| "learning_rate": 9.86874850670342e-06, | |
| "loss": 0.6377, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.07614607614607614, | |
| "grad_norm": 0.8063692525626558, | |
| "learning_rate": 9.863129811759678e-06, | |
| "loss": 0.6031, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.0777000777000777, | |
| "grad_norm": 0.9178388913448965, | |
| "learning_rate": 9.857395027354085e-06, | |
| "loss": 0.5568, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07925407925407925, | |
| "grad_norm": 1.0644585188170625, | |
| "learning_rate": 9.85154429038367e-06, | |
| "loss": 0.69, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.08080808080808081, | |
| "grad_norm": 0.7927465990485753, | |
| "learning_rate": 9.845577740513409e-06, | |
| "loss": 0.5939, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.08236208236208237, | |
| "grad_norm": 0.9039359445089957, | |
| "learning_rate": 9.83949552017289e-06, | |
| "loss": 0.5956, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.08391608391608392, | |
| "grad_norm": 0.8329996086579473, | |
| "learning_rate": 9.833297774552905e-06, | |
| "loss": 0.5418, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.08547008547008547, | |
| "grad_norm": 0.8481919356532074, | |
| "learning_rate": 9.826984651601998e-06, | |
| "loss": 0.5471, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08702408702408702, | |
| "grad_norm": 0.8509131135593907, | |
| "learning_rate": 9.820556302022916e-06, | |
| "loss": 0.5421, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.08857808857808858, | |
| "grad_norm": 0.8554172361714503, | |
| "learning_rate": 9.814012879269031e-06, | |
| "loss": 0.6966, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.09013209013209013, | |
| "grad_norm": 0.9899411098677792, | |
| "learning_rate": 9.80735453954066e-06, | |
| "loss": 0.524, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.09168609168609168, | |
| "grad_norm": 0.7650857989975672, | |
| "learning_rate": 9.800581441781342e-06, | |
| "loss": 0.5388, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.09324009324009325, | |
| "grad_norm": 0.8850444609719069, | |
| "learning_rate": 9.79369374767405e-06, | |
| "loss": 0.7299, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0947940947940948, | |
| "grad_norm": 0.8836895106385606, | |
| "learning_rate": 9.786691621637322e-06, | |
| "loss": 0.5961, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.09634809634809635, | |
| "grad_norm": 0.9121728587330553, | |
| "learning_rate": 9.779575230821344e-06, | |
| "loss": 0.5426, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.0979020979020979, | |
| "grad_norm": 0.7042151959264945, | |
| "learning_rate": 9.772344745103955e-06, | |
| "loss": 0.5818, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.09945609945609946, | |
| "grad_norm": 0.9852547742106776, | |
| "learning_rate": 9.76500033708659e-06, | |
| "loss": 0.5852, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.10101010101010101, | |
| "grad_norm": 0.9664556160856704, | |
| "learning_rate": 9.757542182090165e-06, | |
| "loss": 0.609, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.10256410256410256, | |
| "grad_norm": 0.853406476434564, | |
| "learning_rate": 9.749970458150893e-06, | |
| "loss": 0.6294, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.10411810411810411, | |
| "grad_norm": 0.7958625946346, | |
| "learning_rate": 9.742285346016024e-06, | |
| "loss": 0.566, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.10567210567210568, | |
| "grad_norm": 0.9121076353990936, | |
| "learning_rate": 9.734487029139544e-06, | |
| "loss": 0.5667, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.10722610722610723, | |
| "grad_norm": 1.0944470614224682, | |
| "learning_rate": 9.726575693677782e-06, | |
| "loss": 0.6562, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.10878010878010878, | |
| "grad_norm": 0.9473072471718225, | |
| "learning_rate": 9.718551528484979e-06, | |
| "loss": 0.6156, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.11033411033411034, | |
| "grad_norm": 0.9165875064014658, | |
| "learning_rate": 9.710414725108771e-06, | |
| "loss": 0.4945, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.11188811188811189, | |
| "grad_norm": 0.7947044061935308, | |
| "learning_rate": 9.702165477785618e-06, | |
| "loss": 0.566, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.11344211344211344, | |
| "grad_norm": 0.8217207231050921, | |
| "learning_rate": 9.69380398343617e-06, | |
| "loss": 0.5382, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.11499611499611499, | |
| "grad_norm": 0.7072030756519719, | |
| "learning_rate": 9.685330441660564e-06, | |
| "loss": 0.6005, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.11655011655011654, | |
| "grad_norm": 1.127735633700878, | |
| "learning_rate": 9.676745054733661e-06, | |
| "loss": 0.686, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.11810411810411811, | |
| "grad_norm": 0.7567722308435004, | |
| "learning_rate": 9.668048027600217e-06, | |
| "loss": 0.5949, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.11965811965811966, | |
| "grad_norm": 0.940904721274449, | |
| "learning_rate": 9.659239567869989e-06, | |
| "loss": 0.7208, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.12121212121212122, | |
| "grad_norm": 0.8589933167401518, | |
| "learning_rate": 9.650319885812777e-06, | |
| "loss": 0.6428, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.12276612276612277, | |
| "grad_norm": 0.7460567300212997, | |
| "learning_rate": 9.641289194353418e-06, | |
| "loss": 0.4566, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.12432012432012432, | |
| "grad_norm": 0.9462477579247586, | |
| "learning_rate": 9.632147709066682e-06, | |
| "loss": 0.6661, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1258741258741259, | |
| "grad_norm": 1.0265019580385855, | |
| "learning_rate": 9.622895648172141e-06, | |
| "loss": 0.5686, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.12742812742812742, | |
| "grad_norm": 0.8465826530732603, | |
| "learning_rate": 9.613533232528956e-06, | |
| "loss": 0.58, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.128982128982129, | |
| "grad_norm": 1.1107578812568586, | |
| "learning_rate": 9.604060685630608e-06, | |
| "loss": 0.6289, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.13053613053613053, | |
| "grad_norm": 0.9988960651039751, | |
| "learning_rate": 9.594478233599551e-06, | |
| "loss": 0.5435, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.1320901320901321, | |
| "grad_norm": 0.9725142325555755, | |
| "learning_rate": 9.584786105181831e-06, | |
| "loss": 0.5997, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.13364413364413363, | |
| "grad_norm": 0.9253748447907283, | |
| "learning_rate": 9.574984531741613e-06, | |
| "loss": 0.5761, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.1351981351981352, | |
| "grad_norm": 0.7533627407586, | |
| "learning_rate": 9.565073747255665e-06, | |
| "loss": 0.4554, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.13675213675213677, | |
| "grad_norm": 0.8354149018093497, | |
| "learning_rate": 9.555053988307764e-06, | |
| "loss": 0.6392, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.1383061383061383, | |
| "grad_norm": 0.7922119412325215, | |
| "learning_rate": 9.544925494083062e-06, | |
| "loss": 0.5969, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.13986013986013987, | |
| "grad_norm": 1.098491709946335, | |
| "learning_rate": 9.53468850636236e-06, | |
| "loss": 0.6687, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1414141414141414, | |
| "grad_norm": 1.1478661593087778, | |
| "learning_rate": 9.524343269516354e-06, | |
| "loss": 0.5643, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.14296814296814297, | |
| "grad_norm": 1.1224670962595298, | |
| "learning_rate": 9.513890030499786e-06, | |
| "loss": 0.5974, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.1445221445221445, | |
| "grad_norm": 0.809091939178162, | |
| "learning_rate": 9.503329038845556e-06, | |
| "loss": 0.5372, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.14607614607614608, | |
| "grad_norm": 0.8198348581856645, | |
| "learning_rate": 9.492660546658771e-06, | |
| "loss": 0.4845, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.14763014763014762, | |
| "grad_norm": 1.057619334118549, | |
| "learning_rate": 9.481884808610712e-06, | |
| "loss": 0.6607, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.14918414918414918, | |
| "grad_norm": 0.908346525172346, | |
| "learning_rate": 9.471002081932767e-06, | |
| "loss": 0.6958, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.15073815073815075, | |
| "grad_norm": 0.9282968819634952, | |
| "learning_rate": 9.460012626410286e-06, | |
| "loss": 0.7207, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1522921522921523, | |
| "grad_norm": 1.0629870155581145, | |
| "learning_rate": 9.448916704376384e-06, | |
| "loss": 0.5156, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 1.0854387649323378, | |
| "learning_rate": 9.437714580705671e-06, | |
| "loss": 0.6926, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.1554001554001554, | |
| "grad_norm": 1.0036938568797955, | |
| "learning_rate": 9.426406522807932e-06, | |
| "loss": 0.7549, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.15695415695415696, | |
| "grad_norm": 0.8940797944953299, | |
| "learning_rate": 9.414992800621749e-06, | |
| "loss": 0.48, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.1585081585081585, | |
| "grad_norm": 0.9000106502572475, | |
| "learning_rate": 9.40347368660805e-06, | |
| "loss": 0.5819, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.16006216006216006, | |
| "grad_norm": 1.1002281313656057, | |
| "learning_rate": 9.39184945574361e-06, | |
| "loss": 0.5819, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.16161616161616163, | |
| "grad_norm": 1.0635278127825991, | |
| "learning_rate": 9.380120385514484e-06, | |
| "loss": 0.5034, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.16317016317016317, | |
| "grad_norm": 0.8048515611001529, | |
| "learning_rate": 9.368286755909383e-06, | |
| "loss": 0.5868, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.16472416472416473, | |
| "grad_norm": 0.7793016258380376, | |
| "learning_rate": 9.356348849412991e-06, | |
| "loss": 0.4159, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.16627816627816627, | |
| "grad_norm": 0.8749226384769527, | |
| "learning_rate": 9.344306950999226e-06, | |
| "loss": 0.5689, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.16783216783216784, | |
| "grad_norm": 0.7914152537538985, | |
| "learning_rate": 9.332161348124426e-06, | |
| "loss": 0.5214, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.16938616938616938, | |
| "grad_norm": 0.8511015877493499, | |
| "learning_rate": 9.319912330720502e-06, | |
| "loss": 0.5067, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.17094017094017094, | |
| "grad_norm": 0.7604252629336278, | |
| "learning_rate": 9.307560191188e-06, | |
| "loss": 0.6011, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.17249417249417248, | |
| "grad_norm": 0.739722467231117, | |
| "learning_rate": 9.295105224389144e-06, | |
| "loss": 0.58, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.17404817404817405, | |
| "grad_norm": 0.9930912809185233, | |
| "learning_rate": 9.282547727640767e-06, | |
| "loss": 0.5622, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.17560217560217561, | |
| "grad_norm": 1.0011373833431905, | |
| "learning_rate": 9.269888000707243e-06, | |
| "loss": 0.4572, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.17715617715617715, | |
| "grad_norm": 1.07232267999874, | |
| "learning_rate": 9.25712634579331e-06, | |
| "loss": 0.6382, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.17871017871017872, | |
| "grad_norm": 0.989519175726022, | |
| "learning_rate": 9.244263067536872e-06, | |
| "loss": 0.5169, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.18026418026418026, | |
| "grad_norm": 0.9692529023759858, | |
| "learning_rate": 9.23129847300171e-06, | |
| "loss": 0.5671, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 0.9086519385832078, | |
| "learning_rate": 9.218232871670168e-06, | |
| "loss": 0.4454, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.18337218337218336, | |
| "grad_norm": 1.1029703568444662, | |
| "learning_rate": 9.205066575435754e-06, | |
| "loss": 0.5983, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.18492618492618493, | |
| "grad_norm": 1.0557315125722626, | |
| "learning_rate": 9.191799898595706e-06, | |
| "loss": 0.5914, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.1864801864801865, | |
| "grad_norm": 0.7020511039508586, | |
| "learning_rate": 9.178433157843474e-06, | |
| "loss": 0.5783, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.18803418803418803, | |
| "grad_norm": 1.030598572867442, | |
| "learning_rate": 9.164966672261171e-06, | |
| "loss": 0.5806, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1895881895881896, | |
| "grad_norm": 1.0483043804107857, | |
| "learning_rate": 9.151400763311958e-06, | |
| "loss": 0.5605, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.19114219114219114, | |
| "grad_norm": 0.8689865881103244, | |
| "learning_rate": 9.13773575483236e-06, | |
| "loss": 0.5631, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.1926961926961927, | |
| "grad_norm": 0.7359655114483693, | |
| "learning_rate": 9.123971973024543e-06, | |
| "loss": 0.5697, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.19425019425019424, | |
| "grad_norm": 0.831516688258424, | |
| "learning_rate": 9.110109746448527e-06, | |
| "loss": 0.5017, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1958041958041958, | |
| "grad_norm": 1.2080050795469695, | |
| "learning_rate": 9.09614940601434e-06, | |
| "loss": 0.5961, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.19735819735819735, | |
| "grad_norm": 0.9332888068852607, | |
| "learning_rate": 9.08209128497412e-06, | |
| "loss": 0.6506, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.1989121989121989, | |
| "grad_norm": 0.698648999430127, | |
| "learning_rate": 9.06793571891416e-06, | |
| "loss": 0.4766, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.20046620046620048, | |
| "grad_norm": 1.0441318680416376, | |
| "learning_rate": 9.053683045746897e-06, | |
| "loss": 0.5247, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.20202020202020202, | |
| "grad_norm": 0.8981679337660563, | |
| "learning_rate": 9.039333605702844e-06, | |
| "loss": 0.6338, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.20357420357420358, | |
| "grad_norm": 0.8484981101489031, | |
| "learning_rate": 9.024887741322475e-06, | |
| "loss": 0.5684, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.20512820512820512, | |
| "grad_norm": 1.0445078112703394, | |
| "learning_rate": 9.010345797448037e-06, | |
| "loss": 0.6309, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.2066822066822067, | |
| "grad_norm": 0.6975235761912434, | |
| "learning_rate": 8.995708121215325e-06, | |
| "loss": 0.5138, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.20823620823620823, | |
| "grad_norm": 0.7711316132241796, | |
| "learning_rate": 8.980975062045398e-06, | |
| "loss": 0.5182, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.2097902097902098, | |
| "grad_norm": 1.010608943282339, | |
| "learning_rate": 8.96614697163623e-06, | |
| "loss": 0.684, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.21134421134421136, | |
| "grad_norm": 0.9424855629079735, | |
| "learning_rate": 8.95122420395432e-06, | |
| "loss": 0.5894, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.2128982128982129, | |
| "grad_norm": 0.8196370668961501, | |
| "learning_rate": 8.936207115226242e-06, | |
| "loss": 0.5738, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.21445221445221446, | |
| "grad_norm": 0.7724030975377769, | |
| "learning_rate": 8.921096063930141e-06, | |
| "loss": 0.6508, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.216006216006216, | |
| "grad_norm": 0.9383967814046783, | |
| "learning_rate": 8.905891410787174e-06, | |
| "loss": 0.5184, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.21756021756021757, | |
| "grad_norm": 0.9234435984562406, | |
| "learning_rate": 8.8905935187529e-06, | |
| "loss": 0.6006, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2191142191142191, | |
| "grad_norm": 1.3623850981895826, | |
| "learning_rate": 8.875202753008614e-06, | |
| "loss": 0.5531, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.22066822066822067, | |
| "grad_norm": 1.2508758031618654, | |
| "learning_rate": 8.859719480952637e-06, | |
| "loss": 0.7084, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 0.8432095945353565, | |
| "learning_rate": 8.844144072191537e-06, | |
| "loss": 0.5614, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.22377622377622378, | |
| "grad_norm": 0.869685982949141, | |
| "learning_rate": 8.828476898531308e-06, | |
| "loss": 0.5273, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.22533022533022534, | |
| "grad_norm": 1.0535269875683682, | |
| "learning_rate": 8.812718333968498e-06, | |
| "loss": 0.5677, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.22688422688422688, | |
| "grad_norm": 0.7840965616384874, | |
| "learning_rate": 8.79686875468128e-06, | |
| "loss": 0.5804, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.22843822843822845, | |
| "grad_norm": 0.8922758139793021, | |
| "learning_rate": 8.780928539020467e-06, | |
| "loss": 0.4879, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.22999222999222999, | |
| "grad_norm": 1.0663700028063874, | |
| "learning_rate": 8.764898067500488e-06, | |
| "loss": 0.5255, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.23154623154623155, | |
| "grad_norm": 1.1052154796655376, | |
| "learning_rate": 8.7487777227903e-06, | |
| "loss": 0.5823, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.2331002331002331, | |
| "grad_norm": 0.8372838204749399, | |
| "learning_rate": 8.732567889704253e-06, | |
| "loss": 0.552, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.23465423465423466, | |
| "grad_norm": 1.0383914168078336, | |
| "learning_rate": 8.716268955192908e-06, | |
| "loss": 0.4969, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.23620823620823622, | |
| "grad_norm": 0.9310356859635174, | |
| "learning_rate": 8.699881308333794e-06, | |
| "loss": 0.6232, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.23776223776223776, | |
| "grad_norm": 0.9542737630454637, | |
| "learning_rate": 8.683405340322123e-06, | |
| "loss": 0.647, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.23931623931623933, | |
| "grad_norm": 0.7235983273059299, | |
| "learning_rate": 8.666841444461456e-06, | |
| "loss": 0.4978, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.24087024087024086, | |
| "grad_norm": 0.9214338180679594, | |
| "learning_rate": 8.650190016154307e-06, | |
| "loss": 0.5112, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.24242424242424243, | |
| "grad_norm": 0.7765319698353046, | |
| "learning_rate": 8.633451452892707e-06, | |
| "loss": 0.4189, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.24397824397824397, | |
| "grad_norm": 0.8361437850631817, | |
| "learning_rate": 8.616626154248717e-06, | |
| "loss": 0.6564, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.24553224553224554, | |
| "grad_norm": 1.1201137563120651, | |
| "learning_rate": 8.59971452186489e-06, | |
| "loss": 0.5598, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.24708624708624707, | |
| "grad_norm": 0.8323491927700207, | |
| "learning_rate": 8.582716959444679e-06, | |
| "loss": 0.5835, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.24864024864024864, | |
| "grad_norm": 0.9138271140965946, | |
| "learning_rate": 8.565633872742803e-06, | |
| "loss": 0.4858, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2501942501942502, | |
| "grad_norm": 0.7984227928741979, | |
| "learning_rate": 8.548465669555564e-06, | |
| "loss": 0.6801, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.2517482517482518, | |
| "grad_norm": 1.0387416165235834, | |
| "learning_rate": 8.531212759711103e-06, | |
| "loss": 0.6349, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.2533022533022533, | |
| "grad_norm": 0.8098250618756626, | |
| "learning_rate": 8.51387555505963e-06, | |
| "loss": 0.5217, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.25485625485625485, | |
| "grad_norm": 0.7604279630540275, | |
| "learning_rate": 8.496454469463583e-06, | |
| "loss": 0.4321, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.2564102564102564, | |
| "grad_norm": 0.7467153785853686, | |
| "learning_rate": 8.478949918787746e-06, | |
| "loss": 0.7456, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.257964257964258, | |
| "grad_norm": 0.9217242408114555, | |
| "learning_rate": 8.461362320889338e-06, | |
| "loss": 0.4174, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.2595182595182595, | |
| "grad_norm": 1.0701886438461339, | |
| "learning_rate": 8.443692095608019e-06, | |
| "loss": 0.7204, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.26107226107226106, | |
| "grad_norm": 0.9940719591500394, | |
| "learning_rate": 8.425939664755874e-06, | |
| "loss": 0.5567, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.26262626262626265, | |
| "grad_norm": 0.8026878231443845, | |
| "learning_rate": 8.408105452107353e-06, | |
| "loss": 0.4792, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.2641802641802642, | |
| "grad_norm": 1.22440273921866, | |
| "learning_rate": 8.390189883389143e-06, | |
| "loss": 0.4696, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.26573426573426573, | |
| "grad_norm": 0.8474717813756039, | |
| "learning_rate": 8.37219338627001e-06, | |
| "loss": 0.4837, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.26728826728826727, | |
| "grad_norm": 0.9271171444493542, | |
| "learning_rate": 8.354116390350594e-06, | |
| "loss": 0.6195, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.26884226884226886, | |
| "grad_norm": 0.9175517581827086, | |
| "learning_rate": 8.335959327153148e-06, | |
| "loss": 0.6171, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.2703962703962704, | |
| "grad_norm": 1.1214384534629507, | |
| "learning_rate": 8.317722630111233e-06, | |
| "loss": 0.6122, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.27195027195027194, | |
| "grad_norm": 1.0369916538716633, | |
| "learning_rate": 8.299406734559385e-06, | |
| "loss": 0.4738, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.27350427350427353, | |
| "grad_norm": 0.9517993003576442, | |
| "learning_rate": 8.281012077722712e-06, | |
| "loss": 0.5938, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.27505827505827507, | |
| "grad_norm": 0.7651148829258754, | |
| "learning_rate": 8.26253909870646e-06, | |
| "loss": 0.6886, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.2766122766122766, | |
| "grad_norm": 0.9253215200171306, | |
| "learning_rate": 8.24398823848553e-06, | |
| "loss": 0.4333, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.27816627816627815, | |
| "grad_norm": 1.527596309810129, | |
| "learning_rate": 8.225359939893954e-06, | |
| "loss": 0.6215, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.27972027972027974, | |
| "grad_norm": 0.9172593952457445, | |
| "learning_rate": 8.206654647614323e-06, | |
| "loss": 0.5789, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2812742812742813, | |
| "grad_norm": 0.8527814414594325, | |
| "learning_rate": 8.18787280816717e-06, | |
| "loss": 0.6184, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.2828282828282828, | |
| "grad_norm": 0.8865469839921362, | |
| "learning_rate": 8.169014869900308e-06, | |
| "loss": 0.6295, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.28438228438228436, | |
| "grad_norm": 0.8517309651275252, | |
| "learning_rate": 8.150081282978139e-06, | |
| "loss": 0.5331, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.28593628593628595, | |
| "grad_norm": 0.8315076118590468, | |
| "learning_rate": 8.131072499370897e-06, | |
| "loss": 0.5789, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.2874902874902875, | |
| "grad_norm": 0.7203294922723644, | |
| "learning_rate": 8.111988972843859e-06, | |
| "loss": 0.4577, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.289044289044289, | |
| "grad_norm": 0.8794205779087169, | |
| "learning_rate": 8.09283115894652e-06, | |
| "loss": 0.6125, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.2905982905982906, | |
| "grad_norm": 0.9889975335181714, | |
| "learning_rate": 8.073599515001713e-06, | |
| "loss": 0.6116, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.29215229215229216, | |
| "grad_norm": 0.9167264940797374, | |
| "learning_rate": 8.054294500094697e-06, | |
| "loss": 0.6634, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.2937062937062937, | |
| "grad_norm": 1.1038199923144063, | |
| "learning_rate": 8.034916575062188e-06, | |
| "loss": 0.495, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.29526029526029524, | |
| "grad_norm": 1.1290155229402656, | |
| "learning_rate": 8.015466202481371e-06, | |
| "loss": 0.6783, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.29681429681429683, | |
| "grad_norm": 1.0055630496859111, | |
| "learning_rate": 7.995943846658852e-06, | |
| "loss": 0.5865, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.29836829836829837, | |
| "grad_norm": 1.0372133424369208, | |
| "learning_rate": 7.976349973619567e-06, | |
| "loss": 0.5451, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.2999222999222999, | |
| "grad_norm": 0.8040563490915533, | |
| "learning_rate": 7.956685051095672e-06, | |
| "loss": 0.5964, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.3014763014763015, | |
| "grad_norm": 1.057546655101924, | |
| "learning_rate": 7.936949548515364e-06, | |
| "loss": 0.5565, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.30303030303030304, | |
| "grad_norm": 0.8821680821336786, | |
| "learning_rate": 7.917143936991688e-06, | |
| "loss": 0.5606, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.3045843045843046, | |
| "grad_norm": 0.8674907577741584, | |
| "learning_rate": 7.897268689311278e-06, | |
| "loss": 0.5862, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.3061383061383061, | |
| "grad_norm": 0.8706444651270692, | |
| "learning_rate": 7.877324279923078e-06, | |
| "loss": 0.6849, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 1.4485468544220046, | |
| "learning_rate": 7.857311184927015e-06, | |
| "loss": 0.6806, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.30924630924630925, | |
| "grad_norm": 1.2122402985477576, | |
| "learning_rate": 7.837229882062638e-06, | |
| "loss": 0.6029, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.3108003108003108, | |
| "grad_norm": 0.6888280161460392, | |
| "learning_rate": 7.817080850697705e-06, | |
| "loss": 0.4127, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3123543123543124, | |
| "grad_norm": 0.82470002461982, | |
| "learning_rate": 7.796864571816745e-06, | |
| "loss": 0.5841, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.3139083139083139, | |
| "grad_norm": 0.8090245690817658, | |
| "learning_rate": 7.77658152800958e-06, | |
| "loss": 0.5473, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.31546231546231546, | |
| "grad_norm": 0.7971008205776199, | |
| "learning_rate": 7.756232203459794e-06, | |
| "loss": 0.5453, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.317016317016317, | |
| "grad_norm": 0.8668563067845961, | |
| "learning_rate": 7.735817083933189e-06, | |
| "loss": 0.6337, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.3185703185703186, | |
| "grad_norm": 0.998359039754979, | |
| "learning_rate": 7.715336656766176e-06, | |
| "loss": 0.4682, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3201243201243201, | |
| "grad_norm": 1.0072022163437613, | |
| "learning_rate": 7.69479141085415e-06, | |
| "loss": 0.6439, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.32167832167832167, | |
| "grad_norm": 1.0428130086675327, | |
| "learning_rate": 7.674181836639819e-06, | |
| "loss": 0.5053, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.32323232323232326, | |
| "grad_norm": 0.8220071235320535, | |
| "learning_rate": 7.653508426101488e-06, | |
| "loss": 0.5921, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.3247863247863248, | |
| "grad_norm": 1.403320779877978, | |
| "learning_rate": 7.632771672741326e-06, | |
| "loss": 0.6189, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.32634032634032634, | |
| "grad_norm": 1.0783911186573163, | |
| "learning_rate": 7.611972071573579e-06, | |
| "loss": 0.5385, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3278943278943279, | |
| "grad_norm": 0.6792518204386103, | |
| "learning_rate": 7.591110119112757e-06, | |
| "loss": 0.5257, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.32944832944832947, | |
| "grad_norm": 0.9842199821582761, | |
| "learning_rate": 7.5701863133617735e-06, | |
| "loss": 0.5607, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.331002331002331, | |
| "grad_norm": 0.8239855954978503, | |
| "learning_rate": 7.549201153800073e-06, | |
| "loss": 0.5418, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.33255633255633255, | |
| "grad_norm": 0.7559206402903527, | |
| "learning_rate": 7.528155141371688e-06, | |
| "loss": 0.5555, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.3341103341103341, | |
| "grad_norm": 0.809815004677253, | |
| "learning_rate": 7.507048778473296e-06, | |
| "loss": 0.55, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.3356643356643357, | |
| "grad_norm": 0.8754773402756791, | |
| "learning_rate": 7.485882568942222e-06, | |
| "loss": 0.5637, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.3372183372183372, | |
| "grad_norm": 0.6877512908870741, | |
| "learning_rate": 7.464657018044411e-06, | |
| "loss": 0.3471, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.33877233877233875, | |
| "grad_norm": 1.0337878547530994, | |
| "learning_rate": 7.443372632462363e-06, | |
| "loss": 0.57, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.34032634032634035, | |
| "grad_norm": 1.2498880965990447, | |
| "learning_rate": 7.422029920283044e-06, | |
| "loss": 0.5174, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.3418803418803419, | |
| "grad_norm": 0.9357263067576875, | |
| "learning_rate": 7.400629390985753e-06, | |
| "loss": 0.5369, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3434343434343434, | |
| "grad_norm": 0.7849539728009569, | |
| "learning_rate": 7.379171555429965e-06, | |
| "loss": 0.3983, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.34498834498834496, | |
| "grad_norm": 0.8659243256930758, | |
| "learning_rate": 7.357656925843125e-06, | |
| "loss": 0.4341, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.34654234654234656, | |
| "grad_norm": 0.6240344673857208, | |
| "learning_rate": 7.336086015808439e-06, | |
| "loss": 0.5405, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.3480963480963481, | |
| "grad_norm": 0.8177759212146966, | |
| "learning_rate": 7.314459340252593e-06, | |
| "loss": 0.5676, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.34965034965034963, | |
| "grad_norm": 0.9093224937417566, | |
| "learning_rate": 7.2927774154334765e-06, | |
| "loss": 0.5168, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.35120435120435123, | |
| "grad_norm": 1.0717234885263538, | |
| "learning_rate": 7.271040758927852e-06, | |
| "loss": 0.6916, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.35275835275835277, | |
| "grad_norm": 0.9075665009935251, | |
| "learning_rate": 7.2492498896190015e-06, | |
| "loss": 0.6452, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3543123543123543, | |
| "grad_norm": 0.8136392828852748, | |
| "learning_rate": 7.227405327684339e-06, | |
| "loss": 0.4489, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.35586635586635584, | |
| "grad_norm": 0.8666252959292928, | |
| "learning_rate": 7.205507594582994e-06, | |
| "loss": 0.4924, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.35742035742035744, | |
| "grad_norm": 0.8422180634854972, | |
| "learning_rate": 7.183557213043365e-06, | |
| "loss": 0.5054, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.358974358974359, | |
| "grad_norm": 1.0386621335508512, | |
| "learning_rate": 7.161554707050637e-06, | |
| "loss": 0.6398, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.3605283605283605, | |
| "grad_norm": 0.8954415489915444, | |
| "learning_rate": 7.1395006018342774e-06, | |
| "loss": 0.4605, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.3620823620823621, | |
| "grad_norm": 0.8414488989233131, | |
| "learning_rate": 7.117395423855496e-06, | |
| "loss": 0.6271, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 0.8451743034151997, | |
| "learning_rate": 7.09523970079468e-06, | |
| "loss": 0.4989, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.3651903651903652, | |
| "grad_norm": 0.9767891822448627, | |
| "learning_rate": 7.073033961538793e-06, | |
| "loss": 0.5808, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.3667443667443667, | |
| "grad_norm": 0.8524578887655285, | |
| "learning_rate": 7.050778736168757e-06, | |
| "loss": 0.5286, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.3682983682983683, | |
| "grad_norm": 0.8483585297089656, | |
| "learning_rate": 7.028474555946787e-06, | |
| "loss": 0.5654, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.36985236985236986, | |
| "grad_norm": 0.9267082807993728, | |
| "learning_rate": 7.006121953303724e-06, | |
| "loss": 0.4236, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.3714063714063714, | |
| "grad_norm": 0.8583744764085101, | |
| "learning_rate": 6.983721461826312e-06, | |
| "loss": 0.5422, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.372960372960373, | |
| "grad_norm": 0.9308888061772138, | |
| "learning_rate": 6.9612736162444695e-06, | |
| "loss": 0.647, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3745143745143745, | |
| "grad_norm": 0.8721712511886593, | |
| "learning_rate": 6.938778952418519e-06, | |
| "loss": 0.5396, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.37606837606837606, | |
| "grad_norm": 1.0461384114911212, | |
| "learning_rate": 6.916238007326399e-06, | |
| "loss": 0.5289, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.3776223776223776, | |
| "grad_norm": 0.9117669935532693, | |
| "learning_rate": 6.893651319050842e-06, | |
| "loss": 0.5766, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.3791763791763792, | |
| "grad_norm": 0.8570178043159496, | |
| "learning_rate": 6.871019426766537e-06, | |
| "loss": 0.5391, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.38073038073038074, | |
| "grad_norm": 0.8073022614374971, | |
| "learning_rate": 6.8483428707272456e-06, | |
| "loss": 0.5389, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.3822843822843823, | |
| "grad_norm": 1.1407010013632595, | |
| "learning_rate": 6.825622192252922e-06, | |
| "loss": 0.3993, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.3838383838383838, | |
| "grad_norm": 0.7096109551356327, | |
| "learning_rate": 6.802857933716774e-06, | |
| "loss": 0.5799, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.3853923853923854, | |
| "grad_norm": 1.1253967474034594, | |
| "learning_rate": 6.7800506385323335e-06, | |
| "loss": 0.5872, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.38694638694638694, | |
| "grad_norm": 0.9340957791894847, | |
| "learning_rate": 6.757200851140468e-06, | |
| "loss": 0.5624, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.3885003885003885, | |
| "grad_norm": 1.1014437410465232, | |
| "learning_rate": 6.734309116996392e-06, | |
| "loss": 0.7582, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3900543900543901, | |
| "grad_norm": 0.8699905581797581, | |
| "learning_rate": 6.711375982556648e-06, | |
| "loss": 0.4406, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.3916083916083916, | |
| "grad_norm": 1.173270780538771, | |
| "learning_rate": 6.688401995266061e-06, | |
| "loss": 0.6138, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.39316239316239315, | |
| "grad_norm": 1.0081768329864775, | |
| "learning_rate": 6.665387703544661e-06, | |
| "loss": 0.6273, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.3947163947163947, | |
| "grad_norm": 0.8819787015794277, | |
| "learning_rate": 6.642333656774607e-06, | |
| "loss": 0.4788, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.3962703962703963, | |
| "grad_norm": 0.9122685441057363, | |
| "learning_rate": 6.61924040528706e-06, | |
| "loss": 0.7072, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.3978243978243978, | |
| "grad_norm": 0.8791881328338158, | |
| "learning_rate": 6.596108500349054e-06, | |
| "loss": 0.6548, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.39937839937839936, | |
| "grad_norm": 0.784551616184446, | |
| "learning_rate": 6.572938494150332e-06, | |
| "loss": 0.5124, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.40093240093240096, | |
| "grad_norm": 1.0525063796221164, | |
| "learning_rate": 6.54973093979016e-06, | |
| "loss": 0.5484, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.4024864024864025, | |
| "grad_norm": 0.835003041239398, | |
| "learning_rate": 6.526486391264137e-06, | |
| "loss": 0.5737, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.40404040404040403, | |
| "grad_norm": 0.8834963101663964, | |
| "learning_rate": 6.503205403450957e-06, | |
| "loss": 0.5344, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.40559440559440557, | |
| "grad_norm": 0.9417120521658039, | |
| "learning_rate": 6.479888532099175e-06, | |
| "loss": 0.6799, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.40714840714840717, | |
| "grad_norm": 0.8617906130788676, | |
| "learning_rate": 6.4565363338139245e-06, | |
| "loss": 0.6621, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.4087024087024087, | |
| "grad_norm": 1.0509044094888915, | |
| "learning_rate": 6.433149366043652e-06, | |
| "loss": 0.6012, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.41025641025641024, | |
| "grad_norm": 1.1435254339427408, | |
| "learning_rate": 6.409728187066789e-06, | |
| "loss": 0.5096, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.41181041181041184, | |
| "grad_norm": 1.1457940839749883, | |
| "learning_rate": 6.386273355978442e-06, | |
| "loss": 0.5401, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.4133644133644134, | |
| "grad_norm": 2.0120271092743196, | |
| "learning_rate": 6.3627854326770326e-06, | |
| "loss": 0.5305, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.4149184149184149, | |
| "grad_norm": 0.8333683535180947, | |
| "learning_rate": 6.339264977850943e-06, | |
| "loss": 0.4673, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.41647241647241645, | |
| "grad_norm": 0.903528371130852, | |
| "learning_rate": 6.3157125529651205e-06, | |
| "loss": 0.5881, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.41802641802641805, | |
| "grad_norm": 0.8406474914833879, | |
| "learning_rate": 6.292128720247692e-06, | |
| "loss": 0.5169, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.4195804195804196, | |
| "grad_norm": 1.0599578458757946, | |
| "learning_rate": 6.268514042676519e-06, | |
| "loss": 0.7998, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.4211344211344211, | |
| "grad_norm": 0.8286229383890666, | |
| "learning_rate": 6.244869083965777e-06, | |
| "loss": 0.5183, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.4226884226884227, | |
| "grad_norm": 0.8288746254893705, | |
| "learning_rate": 6.221194408552494e-06, | |
| "loss": 0.5148, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.42424242424242425, | |
| "grad_norm": 0.8954699370556662, | |
| "learning_rate": 6.197490581583078e-06, | |
| "loss": 0.6395, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.4257964257964258, | |
| "grad_norm": 0.9215984961269827, | |
| "learning_rate": 6.173758168899814e-06, | |
| "loss": 0.5935, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.42735042735042733, | |
| "grad_norm": 0.9225474961665954, | |
| "learning_rate": 6.149997737027377e-06, | |
| "loss": 0.5906, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.4289044289044289, | |
| "grad_norm": 1.2221285719562045, | |
| "learning_rate": 6.126209853159293e-06, | |
| "loss": 0.8451, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.43045843045843046, | |
| "grad_norm": 0.7995085563421066, | |
| "learning_rate": 6.102395085144406e-06, | |
| "loss": 0.5054, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.432012432012432, | |
| "grad_norm": 1.0301315463512406, | |
| "learning_rate": 6.078554001473317e-06, | |
| "loss": 0.7042, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.43356643356643354, | |
| "grad_norm": 0.6966173508409831, | |
| "learning_rate": 6.054687171264822e-06, | |
| "loss": 0.5011, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.43512043512043513, | |
| "grad_norm": 0.8623621684914001, | |
| "learning_rate": 6.030795164252321e-06, | |
| "loss": 0.5034, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.4366744366744367, | |
| "grad_norm": 0.998337745266573, | |
| "learning_rate": 6.006878550770213e-06, | |
| "loss": 0.6222, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.4382284382284382, | |
| "grad_norm": 0.7872161913664657, | |
| "learning_rate": 5.982937901740296e-06, | |
| "loss": 0.6075, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.4397824397824398, | |
| "grad_norm": 1.1000342755543842, | |
| "learning_rate": 5.958973788658115e-06, | |
| "loss": 0.5029, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.44133644133644134, | |
| "grad_norm": 1.0312528408288124, | |
| "learning_rate": 5.934986783579349e-06, | |
| "loss": 0.5106, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.4428904428904429, | |
| "grad_norm": 0.9770392362683729, | |
| "learning_rate": 5.91097745910613e-06, | |
| "loss": 0.4993, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 0.9659615323292092, | |
| "learning_rate": 5.886946388373387e-06, | |
| "loss": 0.5144, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.445998445998446, | |
| "grad_norm": 1.0973377248982952, | |
| "learning_rate": 5.862894145035158e-06, | |
| "loss": 0.5517, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.44755244755244755, | |
| "grad_norm": 1.0588242348941252, | |
| "learning_rate": 5.83882130325091e-06, | |
| "loss": 0.6267, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.4491064491064491, | |
| "grad_norm": 0.8267393911257279, | |
| "learning_rate": 5.814728437671808e-06, | |
| "loss": 0.5095, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.4506604506604507, | |
| "grad_norm": 0.9546909177431534, | |
| "learning_rate": 5.7906161234270234e-06, | |
| "loss": 0.5733, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.4522144522144522, | |
| "grad_norm": 0.8620531179333135, | |
| "learning_rate": 5.766484936109987e-06, | |
| "loss": 0.6206, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.45376845376845376, | |
| "grad_norm": 1.0724392623059507, | |
| "learning_rate": 5.7423354517646616e-06, | |
| "loss": 0.5701, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.4553224553224553, | |
| "grad_norm": 1.103019835893421, | |
| "learning_rate": 5.718168246871775e-06, | |
| "loss": 0.5365, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.4568764568764569, | |
| "grad_norm": 0.8804429160562605, | |
| "learning_rate": 5.69398389833508e-06, | |
| "loss": 0.4656, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.45843045843045843, | |
| "grad_norm": 0.728501083025643, | |
| "learning_rate": 5.669782983467562e-06, | |
| "loss": 0.4538, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.45998445998445997, | |
| "grad_norm": 0.89679619861348, | |
| "learning_rate": 5.645566079977673e-06, | |
| "loss": 0.5794, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.8540763123009817, | |
| "learning_rate": 5.621333765955529e-06, | |
| "loss": 0.6715, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.4630924630924631, | |
| "grad_norm": 0.9215429357687969, | |
| "learning_rate": 5.5970866198591235e-06, | |
| "loss": 0.4903, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.46464646464646464, | |
| "grad_norm": 0.971111182369051, | |
| "learning_rate": 5.572825220500505e-06, | |
| "loss": 0.5717, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.4662004662004662, | |
| "grad_norm": 0.7525898375498171, | |
| "learning_rate": 5.548550147031971e-06, | |
| "loss": 0.5359, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4677544677544678, | |
| "grad_norm": 0.956681429994755, | |
| "learning_rate": 5.524261978932234e-06, | |
| "loss": 0.5076, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.4693084693084693, | |
| "grad_norm": 0.8724302136744727, | |
| "learning_rate": 5.4999612959925995e-06, | |
| "loss": 0.5158, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.47086247086247085, | |
| "grad_norm": 0.7690139881658553, | |
| "learning_rate": 5.475648678303112e-06, | |
| "loss": 0.6411, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.47241647241647244, | |
| "grad_norm": 1.044841230716274, | |
| "learning_rate": 5.451324706238721e-06, | |
| "loss": 0.515, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.473970473970474, | |
| "grad_norm": 0.7250530480039049, | |
| "learning_rate": 5.426989960445415e-06, | |
| "loss": 0.486, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4755244755244755, | |
| "grad_norm": 0.9555846587094475, | |
| "learning_rate": 5.402645021826367e-06, | |
| "loss": 0.4873, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.47707847707847706, | |
| "grad_norm": 0.8949848630568665, | |
| "learning_rate": 5.3782904715280705e-06, | |
| "loss": 0.4881, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.47863247863247865, | |
| "grad_norm": 0.9734141387829424, | |
| "learning_rate": 5.35392689092646e-06, | |
| "loss": 0.6362, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.4801864801864802, | |
| "grad_norm": 0.9432639944738404, | |
| "learning_rate": 5.329554861613031e-06, | |
| "loss": 0.558, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.48174048174048173, | |
| "grad_norm": 0.926110226229468, | |
| "learning_rate": 5.3051749653809685e-06, | |
| "loss": 0.6836, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.48329448329448327, | |
| "grad_norm": 0.8306639970751976, | |
| "learning_rate": 5.2807877842112475e-06, | |
| "loss": 0.5398, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.48484848484848486, | |
| "grad_norm": 0.8451961871886501, | |
| "learning_rate": 5.256393900258747e-06, | |
| "loss": 0.5038, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.4864024864024864, | |
| "grad_norm": 0.92675087789733, | |
| "learning_rate": 5.231993895838348e-06, | |
| "loss": 0.5405, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.48795648795648794, | |
| "grad_norm": 0.897235258445696, | |
| "learning_rate": 5.207588353411032e-06, | |
| "loss": 0.5597, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.48951048951048953, | |
| "grad_norm": 0.8645857836074616, | |
| "learning_rate": 5.183177855569989e-06, | |
| "loss": 0.605, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.49106449106449107, | |
| "grad_norm": 0.8652623075328669, | |
| "learning_rate": 5.158762985026694e-06, | |
| "loss": 0.6347, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.4926184926184926, | |
| "grad_norm": 0.974238117866376, | |
| "learning_rate": 5.1343443245970095e-06, | |
| "loss": 0.5848, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.49417249417249415, | |
| "grad_norm": 1.0702212280059262, | |
| "learning_rate": 5.10992245718726e-06, | |
| "loss": 0.7456, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.49572649572649574, | |
| "grad_norm": 0.9332991045536498, | |
| "learning_rate": 5.085497965780335e-06, | |
| "loss": 0.5094, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.4972804972804973, | |
| "grad_norm": 0.7666822911499076, | |
| "learning_rate": 5.061071433421754e-06, | |
| "loss": 0.4988, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4988344988344988, | |
| "grad_norm": 1.0306498703517015, | |
| "learning_rate": 5.0366434432057624e-06, | |
| "loss": 0.5804, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.5003885003885004, | |
| "grad_norm": 0.8175426737536865, | |
| "learning_rate": 5.012214578261402e-06, | |
| "loss": 0.5078, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.5019425019425019, | |
| "grad_norm": 1.1437884320473477, | |
| "learning_rate": 4.987785421738599e-06, | |
| "loss": 0.5265, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.5034965034965035, | |
| "grad_norm": 0.9884279600512011, | |
| "learning_rate": 4.963356556794238e-06, | |
| "loss": 0.7306, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.5050505050505051, | |
| "grad_norm": 1.0273572672750226, | |
| "learning_rate": 4.938928566578247e-06, | |
| "loss": 0.5403, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5066045066045066, | |
| "grad_norm": 0.9211672996812349, | |
| "learning_rate": 4.914502034219667e-06, | |
| "loss": 0.6839, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.5081585081585082, | |
| "grad_norm": 0.8357279664928383, | |
| "learning_rate": 4.890077542812742e-06, | |
| "loss": 0.4687, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.5097125097125097, | |
| "grad_norm": 1.01851904270436, | |
| "learning_rate": 4.865655675402993e-06, | |
| "loss": 0.5688, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.5112665112665112, | |
| "grad_norm": 0.9530277794856284, | |
| "learning_rate": 4.841237014973305e-06, | |
| "loss": 0.6302, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.5128205128205128, | |
| "grad_norm": 0.8945625499429839, | |
| "learning_rate": 4.8168221444300124e-06, | |
| "loss": 0.5748, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.5143745143745144, | |
| "grad_norm": 0.8213408851658214, | |
| "learning_rate": 4.7924116465889684e-06, | |
| "loss": 0.4851, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.515928515928516, | |
| "grad_norm": 0.8909942791600822, | |
| "learning_rate": 4.768006104161655e-06, | |
| "loss": 0.6238, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.5174825174825175, | |
| "grad_norm": 0.8715309540453933, | |
| "learning_rate": 4.743606099741255e-06, | |
| "loss": 0.4499, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.519036519036519, | |
| "grad_norm": 0.9267575769108474, | |
| "learning_rate": 4.719212215788753e-06, | |
| "loss": 0.5537, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.5205905205905206, | |
| "grad_norm": 1.0242507045067903, | |
| "learning_rate": 4.6948250346190315e-06, | |
| "loss": 0.5579, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.5221445221445221, | |
| "grad_norm": 0.8168789723510115, | |
| "learning_rate": 4.670445138386971e-06, | |
| "loss": 0.6091, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.5236985236985237, | |
| "grad_norm": 0.9545835186230072, | |
| "learning_rate": 4.646073109073542e-06, | |
| "loss": 0.5246, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.5252525252525253, | |
| "grad_norm": 1.072717728215519, | |
| "learning_rate": 4.621709528471931e-06, | |
| "loss": 0.4918, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.5268065268065268, | |
| "grad_norm": 0.8183513806382199, | |
| "learning_rate": 4.5973549781736335e-06, | |
| "loss": 0.491, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.5283605283605284, | |
| "grad_norm": 0.9683292063294122, | |
| "learning_rate": 4.573010039554587e-06, | |
| "loss": 0.6359, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5299145299145299, | |
| "grad_norm": 1.0264839081727954, | |
| "learning_rate": 4.548675293761281e-06, | |
| "loss": 0.655, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.5314685314685315, | |
| "grad_norm": 0.7361166824699995, | |
| "learning_rate": 4.524351321696889e-06, | |
| "loss": 0.4948, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.533022533022533, | |
| "grad_norm": 1.1294660407885464, | |
| "learning_rate": 4.500038704007402e-06, | |
| "loss": 0.5751, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.5345765345765345, | |
| "grad_norm": 0.934407670174655, | |
| "learning_rate": 4.475738021067768e-06, | |
| "loss": 0.5434, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.5361305361305362, | |
| "grad_norm": 0.9178261235495052, | |
| "learning_rate": 4.451449852968031e-06, | |
| "loss": 0.5195, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5376845376845377, | |
| "grad_norm": 0.9435990884434239, | |
| "learning_rate": 4.427174779499498e-06, | |
| "loss": 0.7334, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.5392385392385393, | |
| "grad_norm": 0.7911445940026536, | |
| "learning_rate": 4.402913380140878e-06, | |
| "loss": 0.5774, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.5407925407925408, | |
| "grad_norm": 1.0017027537663632, | |
| "learning_rate": 4.378666234044471e-06, | |
| "loss": 0.5454, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.5423465423465423, | |
| "grad_norm": 1.063814328831088, | |
| "learning_rate": 4.354433920022328e-06, | |
| "loss": 0.5098, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.5439005439005439, | |
| "grad_norm": 0.8748299244500228, | |
| "learning_rate": 4.3302170165324385e-06, | |
| "loss": 0.5449, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.8369440203959997, | |
| "learning_rate": 4.306016101664921e-06, | |
| "loss": 0.5319, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.5470085470085471, | |
| "grad_norm": 1.255802499656197, | |
| "learning_rate": 4.281831753128226e-06, | |
| "loss": 0.4056, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.5485625485625486, | |
| "grad_norm": 1.4515615573152536, | |
| "learning_rate": 4.257664548235341e-06, | |
| "loss": 0.7249, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.5501165501165501, | |
| "grad_norm": 0.8907594786465234, | |
| "learning_rate": 4.233515063890013e-06, | |
| "loss": 0.6057, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.5516705516705517, | |
| "grad_norm": 1.1763649838296555, | |
| "learning_rate": 4.209383876572977e-06, | |
| "loss": 0.5774, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.5532245532245532, | |
| "grad_norm": 0.9284584032037477, | |
| "learning_rate": 4.1852715623281934e-06, | |
| "loss": 0.4249, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.5547785547785548, | |
| "grad_norm": 1.038926008258523, | |
| "learning_rate": 4.161178696749092e-06, | |
| "loss": 0.5849, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.5563325563325563, | |
| "grad_norm": 0.9584747895306955, | |
| "learning_rate": 4.1371058549648425e-06, | |
| "loss": 0.5939, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.5578865578865578, | |
| "grad_norm": 0.8432409707491679, | |
| "learning_rate": 4.1130536116266155e-06, | |
| "loss": 0.5545, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.5594405594405595, | |
| "grad_norm": 0.8374098243309068, | |
| "learning_rate": 4.089022540893871e-06, | |
| "loss": 0.5547, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.560994560994561, | |
| "grad_norm": 0.9847581566777266, | |
| "learning_rate": 4.0650132164206515e-06, | |
| "loss": 0.5344, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.5625485625485626, | |
| "grad_norm": 0.7534590052741679, | |
| "learning_rate": 4.041026211341886e-06, | |
| "loss": 0.3433, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.5641025641025641, | |
| "grad_norm": 0.8196558006004733, | |
| "learning_rate": 4.017062098259707e-06, | |
| "loss": 0.4477, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.5656565656565656, | |
| "grad_norm": 0.8447608563383764, | |
| "learning_rate": 3.9931214492297875e-06, | |
| "loss": 0.6335, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.5672105672105672, | |
| "grad_norm": 0.754934359719861, | |
| "learning_rate": 3.969204835747681e-06, | |
| "loss": 0.5477, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.5687645687645687, | |
| "grad_norm": 0.8638053344127238, | |
| "learning_rate": 3.945312828735179e-06, | |
| "loss": 0.6333, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.5703185703185704, | |
| "grad_norm": 0.8784017928990301, | |
| "learning_rate": 3.921445998526684e-06, | |
| "loss": 0.5041, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.5718725718725719, | |
| "grad_norm": 0.9029724643591418, | |
| "learning_rate": 3.897604914855596e-06, | |
| "loss": 0.5241, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.5734265734265734, | |
| "grad_norm": 0.9069716405204118, | |
| "learning_rate": 3.873790146840709e-06, | |
| "loss": 0.501, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.574980574980575, | |
| "grad_norm": 0.9342368491052493, | |
| "learning_rate": 3.8500022629726246e-06, | |
| "loss": 0.3958, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5765345765345765, | |
| "grad_norm": 0.896012165148463, | |
| "learning_rate": 3.8262418311001884e-06, | |
| "loss": 0.6129, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.578088578088578, | |
| "grad_norm": 1.0159020474194678, | |
| "learning_rate": 3.8025094184169254e-06, | |
| "loss": 0.5708, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5796425796425796, | |
| "grad_norm": 0.8566891987710636, | |
| "learning_rate": 3.778805591447505e-06, | |
| "loss": 0.424, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5811965811965812, | |
| "grad_norm": 0.8831453832251531, | |
| "learning_rate": 3.7551309160342233e-06, | |
| "loss": 0.4455, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.5827505827505828, | |
| "grad_norm": 0.8421527973531183, | |
| "learning_rate": 3.731485957323483e-06, | |
| "loss": 0.5675, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5843045843045843, | |
| "grad_norm": 1.2979562207506874, | |
| "learning_rate": 3.707871279752309e-06, | |
| "loss": 0.6143, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.5858585858585859, | |
| "grad_norm": 0.995197386244874, | |
| "learning_rate": 3.68428744703488e-06, | |
| "loss": 0.4698, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5874125874125874, | |
| "grad_norm": 1.1717778711933917, | |
| "learning_rate": 3.6607350221490593e-06, | |
| "loss": 0.6312, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.5889665889665889, | |
| "grad_norm": 0.8374964354419717, | |
| "learning_rate": 3.6372145673229683e-06, | |
| "loss": 0.5194, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5905205905205905, | |
| "grad_norm": 0.7942047183235935, | |
| "learning_rate": 3.613726644021559e-06, | |
| "loss": 0.4238, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5920745920745921, | |
| "grad_norm": 0.773794273550258, | |
| "learning_rate": 3.590271812933212e-06, | |
| "loss": 0.5011, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5936285936285937, | |
| "grad_norm": 0.8912405571204569, | |
| "learning_rate": 3.5668506339563502e-06, | |
| "loss": 0.4961, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5951825951825952, | |
| "grad_norm": 2.0686952933355705, | |
| "learning_rate": 3.5434636661860776e-06, | |
| "loss": 0.4724, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.5967365967365967, | |
| "grad_norm": 1.0721561025968982, | |
| "learning_rate": 3.5201114679008286e-06, | |
| "loss": 0.643, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.5982905982905983, | |
| "grad_norm": 0.9615898602828296, | |
| "learning_rate": 3.4967945965490434e-06, | |
| "loss": 0.4899, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5998445998445998, | |
| "grad_norm": 0.8443768517564723, | |
| "learning_rate": 3.4735136087358646e-06, | |
| "loss": 0.5816, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.6013986013986014, | |
| "grad_norm": 0.8687572364511918, | |
| "learning_rate": 3.450269060209841e-06, | |
| "loss": 0.6585, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.602952602952603, | |
| "grad_norm": 0.7411207969719316, | |
| "learning_rate": 3.42706150584967e-06, | |
| "loss": 0.5498, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.6045066045066045, | |
| "grad_norm": 0.8947980443263261, | |
| "learning_rate": 3.4038914996509464e-06, | |
| "loss": 0.6219, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.6060606060606061, | |
| "grad_norm": 0.9457854814078657, | |
| "learning_rate": 3.3807595947129405e-06, | |
| "loss": 0.4819, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6076146076146076, | |
| "grad_norm": 0.8076353901243662, | |
| "learning_rate": 3.357666343225396e-06, | |
| "loss": 0.4917, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.6091686091686092, | |
| "grad_norm": 0.9568999807001125, | |
| "learning_rate": 3.3346122964553407e-06, | |
| "loss": 0.3623, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.6107226107226107, | |
| "grad_norm": 0.9749020198956558, | |
| "learning_rate": 3.3115980047339415e-06, | |
| "loss": 0.6806, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.6122766122766122, | |
| "grad_norm": 0.9565776234695726, | |
| "learning_rate": 3.288624017443353e-06, | |
| "loss": 0.5648, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.6138306138306139, | |
| "grad_norm": 0.8372606567529206, | |
| "learning_rate": 3.265690883003609e-06, | |
| "loss": 0.6041, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.9657923180658315, | |
| "learning_rate": 3.2427991488595334e-06, | |
| "loss": 0.5534, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.616938616938617, | |
| "grad_norm": 0.779796777254401, | |
| "learning_rate": 3.219949361467668e-06, | |
| "loss": 0.4895, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.6184926184926185, | |
| "grad_norm": 1.0023947035662037, | |
| "learning_rate": 3.197142066283225e-06, | |
| "loss": 0.5017, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.62004662004662, | |
| "grad_norm": 0.968677826181818, | |
| "learning_rate": 3.174377807747079e-06, | |
| "loss": 0.6008, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.6216006216006216, | |
| "grad_norm": 1.1511399987075677, | |
| "learning_rate": 3.1516571292727553e-06, | |
| "loss": 0.5573, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6231546231546231, | |
| "grad_norm": 0.9562397655581797, | |
| "learning_rate": 3.128980573233465e-06, | |
| "loss": 0.678, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.6247086247086248, | |
| "grad_norm": 1.0314415028331958, | |
| "learning_rate": 3.1063486809491595e-06, | |
| "loss": 0.5402, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.6262626262626263, | |
| "grad_norm": 0.8869287498009347, | |
| "learning_rate": 3.0837619926736027e-06, | |
| "loss": 0.4637, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.6278166278166278, | |
| "grad_norm": 0.915327973200595, | |
| "learning_rate": 3.061221047581482e-06, | |
| "loss": 0.4994, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.6293706293706294, | |
| "grad_norm": 0.8673891612365153, | |
| "learning_rate": 3.038726383755531e-06, | |
| "loss": 0.5784, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.6309246309246309, | |
| "grad_norm": 0.8149059444255452, | |
| "learning_rate": 3.0162785381736893e-06, | |
| "loss": 0.487, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.6324786324786325, | |
| "grad_norm": 1.2317767684482286, | |
| "learning_rate": 2.9938780466962768e-06, | |
| "loss": 0.6945, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.634032634032634, | |
| "grad_norm": 0.8114884457654089, | |
| "learning_rate": 2.9715254440532147e-06, | |
| "loss": 0.4866, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.6355866355866356, | |
| "grad_norm": 1.0025564372683453, | |
| "learning_rate": 2.9492212638312458e-06, | |
| "loss": 0.4607, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.6371406371406372, | |
| "grad_norm": 0.7375496261692214, | |
| "learning_rate": 2.9269660384612064e-06, | |
| "loss": 0.5557, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6386946386946387, | |
| "grad_norm": 0.7955765463661515, | |
| "learning_rate": 2.90476029920532e-06, | |
| "loss": 0.4934, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.6402486402486403, | |
| "grad_norm": 0.8629641026853778, | |
| "learning_rate": 2.882604576144505e-06, | |
| "loss": 0.6129, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.6418026418026418, | |
| "grad_norm": 0.9757114529523541, | |
| "learning_rate": 2.8604993981657247e-06, | |
| "loss": 0.4185, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.6433566433566433, | |
| "grad_norm": 0.7834021215202662, | |
| "learning_rate": 2.8384452929493645e-06, | |
| "loss": 0.3618, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.6449106449106449, | |
| "grad_norm": 1.0670630280828879, | |
| "learning_rate": 2.8164427869566367e-06, | |
| "loss": 0.5638, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.6464646464646465, | |
| "grad_norm": 0.8113716929371272, | |
| "learning_rate": 2.7944924054170087e-06, | |
| "loss": 0.5776, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.6480186480186481, | |
| "grad_norm": 0.9957638811754096, | |
| "learning_rate": 2.7725946723156626e-06, | |
| "loss": 0.4072, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.6495726495726496, | |
| "grad_norm": 0.8596642587969949, | |
| "learning_rate": 2.750750110381001e-06, | |
| "loss": 0.5096, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.6511266511266511, | |
| "grad_norm": 1.0292360670108274, | |
| "learning_rate": 2.728959241072149e-06, | |
| "loss": 0.4778, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.6526806526806527, | |
| "grad_norm": 1.2070046089694704, | |
| "learning_rate": 2.7072225845665256e-06, | |
| "loss": 0.5104, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6542346542346542, | |
| "grad_norm": 0.7633836364809529, | |
| "learning_rate": 2.6855406597474098e-06, | |
| "loss": 0.5486, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.6557886557886557, | |
| "grad_norm": 1.1041806227920956, | |
| "learning_rate": 2.6639139841915628e-06, | |
| "loss": 0.4444, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.6573426573426573, | |
| "grad_norm": 0.9575125133042081, | |
| "learning_rate": 2.6423430741568746e-06, | |
| "loss": 0.4381, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.6588966588966589, | |
| "grad_norm": 0.9800662598205424, | |
| "learning_rate": 2.6208284445700373e-06, | |
| "loss": 0.5281, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.6604506604506605, | |
| "grad_norm": 0.7475115086795047, | |
| "learning_rate": 2.5993706090142484e-06, | |
| "loss": 0.5738, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.662004662004662, | |
| "grad_norm": 0.7957543863519091, | |
| "learning_rate": 2.577970079716959e-06, | |
| "loss": 0.5223, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.6635586635586636, | |
| "grad_norm": 0.8150818241751017, | |
| "learning_rate": 2.5566273675376386e-06, | |
| "loss": 0.4616, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.6651126651126651, | |
| "grad_norm": 0.867516029458422, | |
| "learning_rate": 2.535342981955591e-06, | |
| "loss": 0.5592, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.8202323607458766, | |
| "learning_rate": 2.5141174310577774e-06, | |
| "loss": 0.4761, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.6682206682206682, | |
| "grad_norm": 1.0531709032808028, | |
| "learning_rate": 2.492951221526705e-06, | |
| "loss": 0.5706, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6697746697746698, | |
| "grad_norm": 0.7925613983131308, | |
| "learning_rate": 2.4718448586283126e-06, | |
| "loss": 0.4943, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.6713286713286714, | |
| "grad_norm": 1.015333414269206, | |
| "learning_rate": 2.4507988461999283e-06, | |
| "loss": 0.547, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.6728826728826729, | |
| "grad_norm": 1.0021582367745274, | |
| "learning_rate": 2.429813686638227e-06, | |
| "loss": 0.6319, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.6744366744366744, | |
| "grad_norm": 1.2720975755074075, | |
| "learning_rate": 2.408889880887246e-06, | |
| "loss": 0.4789, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.675990675990676, | |
| "grad_norm": 0.7759116382434137, | |
| "learning_rate": 2.38802792842642e-06, | |
| "loss": 0.5063, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6775446775446775, | |
| "grad_norm": 0.7897990263143547, | |
| "learning_rate": 2.3672283272586745e-06, | |
| "loss": 0.6591, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.679098679098679, | |
| "grad_norm": 0.8072958152033197, | |
| "learning_rate": 2.346491573898513e-06, | |
| "loss": 0.4612, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.6806526806526807, | |
| "grad_norm": 0.7877836535078193, | |
| "learning_rate": 2.3258181633601836e-06, | |
| "loss": 0.4869, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.6822066822066822, | |
| "grad_norm": 0.8902691924995161, | |
| "learning_rate": 2.30520858914585e-06, | |
| "loss": 0.5416, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.6837606837606838, | |
| "grad_norm": 0.6369068921434858, | |
| "learning_rate": 2.2846633432338256e-06, | |
| "loss": 0.4979, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6853146853146853, | |
| "grad_norm": 0.8625022953005387, | |
| "learning_rate": 2.2641829160668137e-06, | |
| "loss": 0.5279, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.6868686868686869, | |
| "grad_norm": 0.8374815085950136, | |
| "learning_rate": 2.243767796540207e-06, | |
| "loss": 0.5359, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.6884226884226884, | |
| "grad_norm": 1.0648834804711098, | |
| "learning_rate": 2.223418471990421e-06, | |
| "loss": 0.6371, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.6899766899766899, | |
| "grad_norm": 1.3640624616380017, | |
| "learning_rate": 2.2031354281832555e-06, | |
| "loss": 0.7164, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.6915306915306916, | |
| "grad_norm": 0.9650947768114302, | |
| "learning_rate": 2.1829191493022974e-06, | |
| "loss": 0.4751, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.6930846930846931, | |
| "grad_norm": 0.9507196966289148, | |
| "learning_rate": 2.1627701179373645e-06, | |
| "loss": 0.5319, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.6946386946386947, | |
| "grad_norm": 1.1406920101918319, | |
| "learning_rate": 2.142688815072986e-06, | |
| "loss": 0.6378, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.6961926961926962, | |
| "grad_norm": 0.9181843236568763, | |
| "learning_rate": 2.1226757200769225e-06, | |
| "loss": 0.428, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.6977466977466977, | |
| "grad_norm": 1.2092134761120403, | |
| "learning_rate": 2.102731310688723e-06, | |
| "loss": 0.5284, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.6993006993006993, | |
| "grad_norm": 1.1089425350721833, | |
| "learning_rate": 2.0828560630083127e-06, | |
| "loss": 0.5574, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.7008547008547008, | |
| "grad_norm": 0.6997904053981342, | |
| "learning_rate": 2.0630504514846372e-06, | |
| "loss": 0.582, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.7024087024087025, | |
| "grad_norm": 0.8680833720150671, | |
| "learning_rate": 2.0433149489043296e-06, | |
| "loss": 0.4877, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.703962703962704, | |
| "grad_norm": 1.0412111329827756, | |
| "learning_rate": 2.0236500263804355e-06, | |
| "loss": 0.5206, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.7055167055167055, | |
| "grad_norm": 0.9511530379710712, | |
| "learning_rate": 2.0040561533411494e-06, | |
| "loss": 0.5571, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.7070707070707071, | |
| "grad_norm": 0.9857137048304959, | |
| "learning_rate": 1.9845337975186297e-06, | |
| "loss": 0.5947, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.7086247086247086, | |
| "grad_norm": 0.7358056924894385, | |
| "learning_rate": 1.9650834249378125e-06, | |
| "loss": 0.446, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.7101787101787101, | |
| "grad_norm": 0.9283899065701541, | |
| "learning_rate": 1.945705499905305e-06, | |
| "loss": 0.5944, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.7117327117327117, | |
| "grad_norm": 0.9985343909554646, | |
| "learning_rate": 1.926400484998289e-06, | |
| "loss": 0.6292, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.7132867132867133, | |
| "grad_norm": 0.8149487764948858, | |
| "learning_rate": 1.9071688410534828e-06, | |
| "loss": 0.4852, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.7148407148407149, | |
| "grad_norm": 1.0046527442068103, | |
| "learning_rate": 1.8880110271561415e-06, | |
| "loss": 0.4948, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.7163947163947164, | |
| "grad_norm": 0.7380450104665942, | |
| "learning_rate": 1.8689275006291035e-06, | |
| "loss": 0.5346, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.717948717948718, | |
| "grad_norm": 0.9071832673516289, | |
| "learning_rate": 1.8499187170218614e-06, | |
| "loss": 0.6154, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.7195027195027195, | |
| "grad_norm": 0.885237217235856, | |
| "learning_rate": 1.8309851300996934e-06, | |
| "loss": 0.546, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.721056721056721, | |
| "grad_norm": 0.8643676814843989, | |
| "learning_rate": 1.8121271918328314e-06, | |
| "loss": 0.5854, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.7226107226107226, | |
| "grad_norm": 0.7637623236114972, | |
| "learning_rate": 1.793345352385678e-06, | |
| "loss": 0.5339, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.7241647241647242, | |
| "grad_norm": 0.920346480267907, | |
| "learning_rate": 1.7746400601060476e-06, | |
| "loss": 0.6024, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.7257187257187258, | |
| "grad_norm": 0.8108469777726085, | |
| "learning_rate": 1.7560117615144717e-06, | |
| "loss": 0.4749, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.7229261666322232, | |
| "learning_rate": 1.7374609012935412e-06, | |
| "loss": 0.496, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.7288267288267288, | |
| "grad_norm": 0.8134361172252622, | |
| "learning_rate": 1.7189879222772894e-06, | |
| "loss": 0.4782, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.7303807303807304, | |
| "grad_norm": 1.1199242105205238, | |
| "learning_rate": 1.7005932654406165e-06, | |
| "loss": 0.5725, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.7319347319347319, | |
| "grad_norm": 0.9531044801518151, | |
| "learning_rate": 1.682277369888769e-06, | |
| "loss": 0.3771, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.7334887334887334, | |
| "grad_norm": 0.8181386506919823, | |
| "learning_rate": 1.6640406728468534e-06, | |
| "loss": 0.4525, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.7350427350427351, | |
| "grad_norm": 0.9822241425856895, | |
| "learning_rate": 1.6458836096494046e-06, | |
| "loss": 0.5829, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.7365967365967366, | |
| "grad_norm": 0.9429402946349031, | |
| "learning_rate": 1.6278066137299898e-06, | |
| "loss": 0.6585, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.7381507381507382, | |
| "grad_norm": 0.8416072050437153, | |
| "learning_rate": 1.6098101166108593e-06, | |
| "loss": 0.4289, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.7397047397047397, | |
| "grad_norm": 0.8273010419688407, | |
| "learning_rate": 1.5918945478926484e-06, | |
| "loss": 0.515, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.7412587412587412, | |
| "grad_norm": 0.85924664459265, | |
| "learning_rate": 1.5740603352441281e-06, | |
| "loss": 0.4741, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.7428127428127428, | |
| "grad_norm": 0.9930289461569045, | |
| "learning_rate": 1.5563079043919843e-06, | |
| "loss": 0.548, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.7443667443667443, | |
| "grad_norm": 0.7836995285858434, | |
| "learning_rate": 1.5386376791106627e-06, | |
| "loss": 0.4101, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.745920745920746, | |
| "grad_norm": 0.8342511178476052, | |
| "learning_rate": 1.5210500812122548e-06, | |
| "loss": 0.4793, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7474747474747475, | |
| "grad_norm": 1.0560959113411836, | |
| "learning_rate": 1.5035455305364188e-06, | |
| "loss": 0.5464, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.749028749028749, | |
| "grad_norm": 1.039285125963634, | |
| "learning_rate": 1.4861244449403717e-06, | |
| "loss": 0.3743, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.7505827505827506, | |
| "grad_norm": 0.7525227170176826, | |
| "learning_rate": 1.4687872402888991e-06, | |
| "loss": 0.6774, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.7521367521367521, | |
| "grad_norm": 1.0954674880778155, | |
| "learning_rate": 1.451534330444438e-06, | |
| "loss": 0.5184, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.7536907536907537, | |
| "grad_norm": 0.7965815930286931, | |
| "learning_rate": 1.4343661272571967e-06, | |
| "loss": 0.4797, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7552447552447552, | |
| "grad_norm": 0.9801653007138387, | |
| "learning_rate": 1.4172830405553216e-06, | |
| "loss": 0.7127, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.7567987567987567, | |
| "grad_norm": 0.7911339013108166, | |
| "learning_rate": 1.4002854781351104e-06, | |
| "loss": 0.6389, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.7583527583527584, | |
| "grad_norm": 1.0716997870139735, | |
| "learning_rate": 1.3833738457512842e-06, | |
| "loss": 0.4308, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.7599067599067599, | |
| "grad_norm": 0.8538650301172175, | |
| "learning_rate": 1.3665485471072937e-06, | |
| "loss": 0.5437, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.7614607614607615, | |
| "grad_norm": 0.9268324110689813, | |
| "learning_rate": 1.3498099838456947e-06, | |
| "loss": 0.5925, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.763014763014763, | |
| "grad_norm": 1.0985563955045539, | |
| "learning_rate": 1.3331585555385458e-06, | |
| "loss": 0.5372, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.7645687645687645, | |
| "grad_norm": 0.8917025830540084, | |
| "learning_rate": 1.3165946596778773e-06, | |
| "loss": 0.563, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.7661227661227661, | |
| "grad_norm": 0.9138600611635691, | |
| "learning_rate": 1.3001186916662066e-06, | |
| "loss": 0.656, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.7676767676767676, | |
| "grad_norm": 0.7678531340040682, | |
| "learning_rate": 1.2837310448070929e-06, | |
| "loss": 0.4876, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 0.8377909993364742, | |
| "learning_rate": 1.2674321102957476e-06, | |
| "loss": 0.4719, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7707847707847708, | |
| "grad_norm": 0.9620034622741296, | |
| "learning_rate": 1.251222277209702e-06, | |
| "loss": 0.638, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.7723387723387724, | |
| "grad_norm": 0.8691503561119966, | |
| "learning_rate": 1.2351019324995128e-06, | |
| "loss": 0.5645, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.7738927738927739, | |
| "grad_norm": 1.019242268045496, | |
| "learning_rate": 1.2190714609795334e-06, | |
| "loss": 0.5954, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.7754467754467754, | |
| "grad_norm": 0.8600773262545407, | |
| "learning_rate": 1.203131245318721e-06, | |
| "loss": 0.4787, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.777000777000777, | |
| "grad_norm": 0.7582051607025506, | |
| "learning_rate": 1.1872816660315029e-06, | |
| "loss": 0.5564, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7785547785547785, | |
| "grad_norm": 1.13872043452736, | |
| "learning_rate": 1.171523101468693e-06, | |
| "loss": 0.5101, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.7801087801087802, | |
| "grad_norm": 0.9435747581759769, | |
| "learning_rate": 1.1558559278084647e-06, | |
| "loss": 0.6206, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.7816627816627817, | |
| "grad_norm": 0.8633704185786354, | |
| "learning_rate": 1.1402805190473649e-06, | |
| "loss": 0.5582, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.7832167832167832, | |
| "grad_norm": 0.7417786554815952, | |
| "learning_rate": 1.124797246991387e-06, | |
| "loss": 0.4371, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.7847707847707848, | |
| "grad_norm": 1.0878350516065922, | |
| "learning_rate": 1.1094064812471028e-06, | |
| "loss": 0.495, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.7863247863247863, | |
| "grad_norm": 0.9285222085925927, | |
| "learning_rate": 1.0941085892128272e-06, | |
| "loss": 0.5541, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.7878787878787878, | |
| "grad_norm": 0.8733376888573161, | |
| "learning_rate": 1.07890393606986e-06, | |
| "loss": 0.5254, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.7894327894327894, | |
| "grad_norm": 0.9249119247943376, | |
| "learning_rate": 1.0637928847737594e-06, | |
| "loss": 0.6275, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.790986790986791, | |
| "grad_norm": 0.8091969696708994, | |
| "learning_rate": 1.0487757960456812e-06, | |
| "loss": 0.5614, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.7925407925407926, | |
| "grad_norm": 0.9574853811974608, | |
| "learning_rate": 1.0338530283637704e-06, | |
| "loss": 0.4403, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7940947940947941, | |
| "grad_norm": 1.2352084520669158, | |
| "learning_rate": 1.0190249379546024e-06, | |
| "loss": 0.6218, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.7956487956487956, | |
| "grad_norm": 0.898957730122624, | |
| "learning_rate": 1.0042918787846757e-06, | |
| "loss": 0.5151, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.7972027972027972, | |
| "grad_norm": 0.9789627682865999, | |
| "learning_rate": 9.896542025519645e-07, | |
| "loss": 0.5126, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.7987567987567987, | |
| "grad_norm": 0.8612205498222681, | |
| "learning_rate": 9.751122586775253e-07, | |
| "loss": 0.5889, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.8003108003108003, | |
| "grad_norm": 1.1219328556676227, | |
| "learning_rate": 9.606663942971568e-07, | |
| "loss": 0.5793, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.8018648018648019, | |
| "grad_norm": 1.0414597225046132, | |
| "learning_rate": 9.463169542531059e-07, | |
| "loss": 0.5137, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.8034188034188035, | |
| "grad_norm": 0.8844131961044368, | |
| "learning_rate": 9.320642810858421e-07, | |
| "loss": 0.5543, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.804972804972805, | |
| "grad_norm": 0.8508175460334907, | |
| "learning_rate": 9.179087150258814e-07, | |
| "loss": 0.5992, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.8065268065268065, | |
| "grad_norm": 0.9527620756716164, | |
| "learning_rate": 9.038505939856612e-07, | |
| "loss": 0.4986, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.8080808080808081, | |
| "grad_norm": 0.8675412414033893, | |
| "learning_rate": 8.898902535514747e-07, | |
| "loss": 0.5465, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8096348096348096, | |
| "grad_norm": 0.9106070035517946, | |
| "learning_rate": 8.76028026975459e-07, | |
| "loss": 0.6284, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.8111888111888111, | |
| "grad_norm": 0.9448258212071192, | |
| "learning_rate": 8.62264245167641e-07, | |
| "loss": 0.6337, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.8127428127428128, | |
| "grad_norm": 0.8167720543226298, | |
| "learning_rate": 8.485992366880419e-07, | |
| "loss": 0.4287, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.8142968142968143, | |
| "grad_norm": 0.8126136928569179, | |
| "learning_rate": 8.35033327738829e-07, | |
| "loss": 0.4002, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.8158508158508159, | |
| "grad_norm": 0.9251994602254382, | |
| "learning_rate": 8.215668421565276e-07, | |
| "loss": 0.624, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.8174048174048174, | |
| "grad_norm": 1.043989325595444, | |
| "learning_rate": 8.082001014042945e-07, | |
| "loss": 0.5308, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.818958818958819, | |
| "grad_norm": 0.8603513049330545, | |
| "learning_rate": 7.949334245642459e-07, | |
| "loss": 0.418, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.8205128205128205, | |
| "grad_norm": 0.9200257922531035, | |
| "learning_rate": 7.817671283298345e-07, | |
| "loss": 0.5815, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.822066822066822, | |
| "grad_norm": 0.8267010262009848, | |
| "learning_rate": 7.687015269982917e-07, | |
| "loss": 0.5761, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.8236208236208237, | |
| "grad_norm": 1.0208487089497174, | |
| "learning_rate": 7.557369324631303e-07, | |
| "loss": 0.5874, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.8251748251748252, | |
| "grad_norm": 0.9096017765334373, | |
| "learning_rate": 7.4287365420669e-07, | |
| "loss": 0.7353, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.8267288267288267, | |
| "grad_norm": 0.8240298271847039, | |
| "learning_rate": 7.301119992927585e-07, | |
| "loss": 0.4626, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.8282828282828283, | |
| "grad_norm": 0.9551447108213159, | |
| "learning_rate": 7.174522723592342e-07, | |
| "loss": 0.5483, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.8298368298368298, | |
| "grad_norm": 0.8119880605731007, | |
| "learning_rate": 7.048947756108576e-07, | |
| "loss": 0.538, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.8313908313908314, | |
| "grad_norm": 0.696193829551657, | |
| "learning_rate": 6.924398088119988e-07, | |
| "loss": 0.4711, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.8329448329448329, | |
| "grad_norm": 0.8690121207713474, | |
| "learning_rate": 6.800876692794994e-07, | |
| "loss": 0.545, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.8344988344988346, | |
| "grad_norm": 0.9178082058159108, | |
| "learning_rate": 6.678386518755747e-07, | |
| "loss": 0.6015, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.8360528360528361, | |
| "grad_norm": 0.9846180274571776, | |
| "learning_rate": 6.556930490007762e-07, | |
| "loss": 0.5199, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.8376068376068376, | |
| "grad_norm": 0.902090010388194, | |
| "learning_rate": 6.436511505870091e-07, | |
| "loss": 0.4475, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.8391608391608392, | |
| "grad_norm": 0.9794639484139962, | |
| "learning_rate": 6.317132440906188e-07, | |
| "loss": 0.6774, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.8407148407148407, | |
| "grad_norm": 0.959849989606041, | |
| "learning_rate": 6.198796144855168e-07, | |
| "loss": 0.5281, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.8422688422688422, | |
| "grad_norm": 0.839879332088564, | |
| "learning_rate": 6.081505442563912e-07, | |
| "loss": 0.4718, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.8438228438228438, | |
| "grad_norm": 0.8946560239256621, | |
| "learning_rate": 5.965263133919508e-07, | |
| "loss": 0.5591, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.8453768453768454, | |
| "grad_norm": 0.7803446123118281, | |
| "learning_rate": 5.850071993782525e-07, | |
| "loss": 0.487, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.846930846930847, | |
| "grad_norm": 0.8557306905816813, | |
| "learning_rate": 5.735934771920704e-07, | |
| "loss": 0.3648, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.8484848484848485, | |
| "grad_norm": 1.1143798478654294, | |
| "learning_rate": 5.622854192943317e-07, | |
| "loss": 0.617, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.85003885003885, | |
| "grad_norm": 0.8289932147243455, | |
| "learning_rate": 5.510832956236173e-07, | |
| "loss": 0.4645, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.8515928515928516, | |
| "grad_norm": 1.1875959846429844, | |
| "learning_rate": 5.399873735897137e-07, | |
| "loss": 0.574, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.8531468531468531, | |
| "grad_norm": 1.0110337398247589, | |
| "learning_rate": 5.289979180672344e-07, | |
| "loss": 0.5043, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.8547008547008547, | |
| "grad_norm": 1.030393329446215, | |
| "learning_rate": 5.181151913892896e-07, | |
| "loss": 0.6686, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8562548562548562, | |
| "grad_norm": 0.7985911619479463, | |
| "learning_rate": 5.073394533412296e-07, | |
| "loss": 0.4997, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.8578088578088578, | |
| "grad_norm": 0.7409828694431111, | |
| "learning_rate": 4.966709611544435e-07, | |
| "loss": 0.4658, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.8593628593628594, | |
| "grad_norm": 0.9377326798150777, | |
| "learning_rate": 4.861099695002158e-07, | |
| "loss": 0.5396, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.8609168609168609, | |
| "grad_norm": 0.9284319606041459, | |
| "learning_rate": 4.7565673048364735e-07, | |
| "loss": 0.5622, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.8624708624708625, | |
| "grad_norm": 1.0825280137566733, | |
| "learning_rate": 4.6531149363764126e-07, | |
| "loss": 0.4918, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.864024864024864, | |
| "grad_norm": 0.9050580420904966, | |
| "learning_rate": 4.550745059169398e-07, | |
| "loss": 0.561, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.8655788655788655, | |
| "grad_norm": 0.8106816259375483, | |
| "learning_rate": 4.4494601169223715e-07, | |
| "loss": 0.5667, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.8671328671328671, | |
| "grad_norm": 0.7324122672367018, | |
| "learning_rate": 4.349262527443371e-07, | |
| "loss": 0.4168, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.8686868686868687, | |
| "grad_norm": 0.8563060227977591, | |
| "learning_rate": 4.2501546825838735e-07, | |
| "loss": 0.5519, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.8702408702408703, | |
| "grad_norm": 1.0242601352816976, | |
| "learning_rate": 4.152138948181689e-07, | |
| "loss": 0.6415, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8717948717948718, | |
| "grad_norm": 0.9408471315505526, | |
| "learning_rate": 4.0552176640045017e-07, | |
| "loss": 0.6281, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.8733488733488733, | |
| "grad_norm": 0.9252425519783338, | |
| "learning_rate": 3.959393143693946e-07, | |
| "loss": 0.5774, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.8749028749028749, | |
| "grad_norm": 0.7918385206518544, | |
| "learning_rate": 3.864667674710454e-07, | |
| "loss": 0.6713, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.8764568764568764, | |
| "grad_norm": 0.7826963848048722, | |
| "learning_rate": 3.7710435182786053e-07, | |
| "loss": 0.4774, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.878010878010878, | |
| "grad_norm": 0.7365534429069017, | |
| "learning_rate": 3.6785229093331987e-07, | |
| "loss": 0.3846, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.8795648795648796, | |
| "grad_norm": 0.7733142200970872, | |
| "learning_rate": 3.587108056465827e-07, | |
| "loss": 0.403, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.8811188811188811, | |
| "grad_norm": 1.022816223688587, | |
| "learning_rate": 3.496801141872225e-07, | |
| "loss": 0.5213, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.8826728826728827, | |
| "grad_norm": 0.9427543161563908, | |
| "learning_rate": 3.407604321300123e-07, | |
| "loss": 0.5404, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.8842268842268842, | |
| "grad_norm": 1.02774835732094, | |
| "learning_rate": 3.3195197239978384e-07, | |
| "loss": 0.5665, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.8857808857808858, | |
| "grad_norm": 1.064922469524254, | |
| "learning_rate": 3.232549452663403e-07, | |
| "loss": 0.5141, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8873348873348873, | |
| "grad_norm": 0.7085691145526173, | |
| "learning_rate": 3.146695583394377e-07, | |
| "loss": 0.54, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 0.7520808912542442, | |
| "learning_rate": 3.061960165638317e-07, | |
| "loss": 0.4426, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.8904428904428905, | |
| "grad_norm": 0.8842159851635149, | |
| "learning_rate": 2.9783452221438304e-07, | |
| "loss": 0.5344, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.891996891996892, | |
| "grad_norm": 1.2686448157314256, | |
| "learning_rate": 2.895852748912298e-07, | |
| "loss": 0.512, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.8935508935508936, | |
| "grad_norm": 1.054717271270882, | |
| "learning_rate": 2.814484715150212e-07, | |
| "loss": 0.6288, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.8951048951048951, | |
| "grad_norm": 0.9040626236395383, | |
| "learning_rate": 2.734243063222181e-07, | |
| "loss": 0.4089, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.8966588966588966, | |
| "grad_norm": 1.0743427211769383, | |
| "learning_rate": 2.655129708604576e-07, | |
| "loss": 0.6188, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.8982128982128982, | |
| "grad_norm": 0.8974286721485075, | |
| "learning_rate": 2.5771465398397757e-07, | |
| "loss": 0.5755, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.8997668997668997, | |
| "grad_norm": 1.030531093175199, | |
| "learning_rate": 2.5002954184910887e-07, | |
| "loss": 0.4618, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.9013209013209014, | |
| "grad_norm": 1.0117403154653173, | |
| "learning_rate": 2.424578179098358e-07, | |
| "loss": 0.5362, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.9028749028749029, | |
| "grad_norm": 0.9476481033300673, | |
| "learning_rate": 2.3499966291341213e-07, | |
| "loss": 0.5312, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.9044289044289044, | |
| "grad_norm": 1.0383350051929625, | |
| "learning_rate": 2.2765525489604702e-07, | |
| "loss": 0.5124, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.905982905982906, | |
| "grad_norm": 1.202514418358822, | |
| "learning_rate": 2.2042476917865706e-07, | |
| "loss": 0.592, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.9075369075369075, | |
| "grad_norm": 0.9288774041490958, | |
| "learning_rate": 2.1330837836267882e-07, | |
| "loss": 0.6253, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 1.0753907470152138, | |
| "learning_rate": 2.0630625232595126e-07, | |
| "loss": 0.5975, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.9106449106449106, | |
| "grad_norm": 0.7903008621784656, | |
| "learning_rate": 1.9941855821865918e-07, | |
| "loss": 0.5071, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.9121989121989122, | |
| "grad_norm": 0.9003087903430828, | |
| "learning_rate": 1.9264546045934196e-07, | |
| "loss": 0.5412, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.9137529137529138, | |
| "grad_norm": 0.855353129364411, | |
| "learning_rate": 1.859871207309688e-07, | |
| "loss": 0.715, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.9153069153069153, | |
| "grad_norm": 0.8096629835043319, | |
| "learning_rate": 1.7944369797708362e-07, | |
| "loss": 0.6314, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.9168609168609169, | |
| "grad_norm": 0.8646658337676514, | |
| "learning_rate": 1.7301534839800348e-07, | |
| "loss": 0.7883, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.9184149184149184, | |
| "grad_norm": 0.8516940037455805, | |
| "learning_rate": 1.6670222544709515e-07, | |
| "loss": 0.4638, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.9199689199689199, | |
| "grad_norm": 1.229821089655676, | |
| "learning_rate": 1.6050447982711214e-07, | |
| "loss": 0.5523, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.9215229215229215, | |
| "grad_norm": 0.823339154576201, | |
| "learning_rate": 1.5442225948659183e-07, | |
| "loss": 0.4838, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.9100465786354403, | |
| "learning_rate": 1.4845570961633192e-07, | |
| "loss": 0.4591, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.9246309246309247, | |
| "grad_norm": 0.8249462059650916, | |
| "learning_rate": 1.426049726459172e-07, | |
| "loss": 0.6088, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.9261849261849262, | |
| "grad_norm": 0.8140830885015211, | |
| "learning_rate": 1.368701882403234e-07, | |
| "loss": 0.5409, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.9277389277389277, | |
| "grad_norm": 0.7740424422028419, | |
| "learning_rate": 1.3125149329658083e-07, | |
| "loss": 0.5304, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.9292929292929293, | |
| "grad_norm": 0.7720807782608536, | |
| "learning_rate": 1.2574902194050996e-07, | |
| "loss": 0.5871, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.9308469308469308, | |
| "grad_norm": 0.8881572533340473, | |
| "learning_rate": 1.2036290552351838e-07, | |
| "loss": 0.5654, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.9324009324009324, | |
| "grad_norm": 0.7381756170416801, | |
| "learning_rate": 1.150932726194609e-07, | |
| "loss": 0.5499, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.933954933954934, | |
| "grad_norm": 0.7736924336820196, | |
| "learning_rate": 1.0994024902157674e-07, | |
| "loss": 0.5977, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.9355089355089355, | |
| "grad_norm": 0.9074736160128457, | |
| "learning_rate": 1.0490395773948336e-07, | |
| "loss": 0.4871, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.9370629370629371, | |
| "grad_norm": 0.7621234368730365, | |
| "learning_rate": 9.998451899624007e-08, | |
| "loss": 0.5144, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.9386169386169386, | |
| "grad_norm": 1.014770955103902, | |
| "learning_rate": 9.5182050225478e-08, | |
| "loss": 0.6022, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.9401709401709402, | |
| "grad_norm": 0.9183809943783089, | |
| "learning_rate": 9.049666606859852e-08, | |
| "loss": 0.5193, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.9417249417249417, | |
| "grad_norm": 0.8380871828967922, | |
| "learning_rate": 8.592847837203655e-08, | |
| "loss": 0.5955, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.9432789432789432, | |
| "grad_norm": 0.8781219603592406, | |
| "learning_rate": 8.147759618458706e-08, | |
| "loss": 0.6726, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.9448329448329449, | |
| "grad_norm": 0.9897864403463899, | |
| "learning_rate": 7.714412575480556e-08, | |
| "loss": 0.641, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.9463869463869464, | |
| "grad_norm": 0.9693302536123463, | |
| "learning_rate": 7.292817052847068e-08, | |
| "loss": 0.5614, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.947940947940948, | |
| "grad_norm": 0.8842272966323539, | |
| "learning_rate": 6.882983114611497e-08, | |
| "loss": 0.3866, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.9494949494949495, | |
| "grad_norm": 0.8928788478713224, | |
| "learning_rate": 6.484920544062245e-08, | |
| "loss": 0.7111, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.951048951048951, | |
| "grad_norm": 0.7471058967315293, | |
| "learning_rate": 6.098638843489213e-08, | |
| "loss": 0.4559, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.9526029526029526, | |
| "grad_norm": 1.1989009033463545, | |
| "learning_rate": 5.7241472339572e-08, | |
| "loss": 0.6242, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.9541569541569541, | |
| "grad_norm": 0.825791557449914, | |
| "learning_rate": 5.361454655085529e-08, | |
| "loss": 0.5102, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.9557109557109557, | |
| "grad_norm": 0.9246788337692015, | |
| "learning_rate": 5.0105697648347716e-08, | |
| "loss": 0.5252, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.9572649572649573, | |
| "grad_norm": 0.9616840921861742, | |
| "learning_rate": 4.671500939300133e-08, | |
| "loss": 0.5559, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.9588189588189588, | |
| "grad_norm": 0.9548923348306078, | |
| "learning_rate": 4.344256272511338e-08, | |
| "loss": 0.7163, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.9603729603729604, | |
| "grad_norm": 1.0822537483462542, | |
| "learning_rate": 4.0288435762396164e-08, | |
| "loss": 0.5881, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.9619269619269619, | |
| "grad_norm": 0.7725876652440066, | |
| "learning_rate": 3.725270379811019e-08, | |
| "loss": 0.5614, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.9634809634809635, | |
| "grad_norm": 0.8565244919322886, | |
| "learning_rate": 3.4335439299268414e-08, | |
| "loss": 0.4397, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.965034965034965, | |
| "grad_norm": 0.9606122692265989, | |
| "learning_rate": 3.1536711904904816e-08, | |
| "loss": 0.5277, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.9665889665889665, | |
| "grad_norm": 1.138988513753176, | |
| "learning_rate": 2.8856588424414632e-08, | |
| "loss": 0.6066, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.9681429681429682, | |
| "grad_norm": 0.7295542248220023, | |
| "learning_rate": 2.6295132835956748e-08, | |
| "loss": 0.5854, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.9696969696969697, | |
| "grad_norm": 0.9657806038448509, | |
| "learning_rate": 2.3852406284927687e-08, | |
| "loss": 0.5492, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.9712509712509713, | |
| "grad_norm": 0.9874016708725635, | |
| "learning_rate": 2.152846708250167e-08, | |
| "loss": 0.5257, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9728049728049728, | |
| "grad_norm": 0.9153930251144268, | |
| "learning_rate": 1.93233707042384e-08, | |
| "loss": 0.5436, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.9743589743589743, | |
| "grad_norm": 0.8803440571286754, | |
| "learning_rate": 1.723716978876133e-08, | |
| "loss": 0.5056, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.9759129759129759, | |
| "grad_norm": 0.8302956653359888, | |
| "learning_rate": 1.5269914136497033e-08, | |
| "loss": 0.5629, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.9774669774669774, | |
| "grad_norm": 1.1113800628583712, | |
| "learning_rate": 1.3421650708487777e-08, | |
| "loss": 0.6096, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.9790209790209791, | |
| "grad_norm": 0.893165695075744, | |
| "learning_rate": 1.1692423625273563e-08, | |
| "loss": 0.4906, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9805749805749806, | |
| "grad_norm": 0.8508627162029742, | |
| "learning_rate": 1.0082274165834627e-08, | |
| "loss": 0.5798, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.9821289821289821, | |
| "grad_norm": 0.9254388849186278, | |
| "learning_rate": 8.591240766607779e-09, | |
| "loss": 0.5168, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.9836829836829837, | |
| "grad_norm": 0.8978461708685718, | |
| "learning_rate": 7.219359020570471e-09, | |
| "loss": 0.5819, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.9852369852369852, | |
| "grad_norm": 0.8755369497191023, | |
| "learning_rate": 5.966661676388152e-09, | |
| "loss": 0.5777, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.9867909867909868, | |
| "grad_norm": 0.9185099392157843, | |
| "learning_rate": 4.833178637633773e-09, | |
| "loss": 0.6064, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.9883449883449883, | |
| "grad_norm": 0.7565206452951102, | |
| "learning_rate": 3.818936962076136e-09, | |
| "loss": 0.481, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.98989898989899, | |
| "grad_norm": 0.6749881277152423, | |
| "learning_rate": 2.9239608610298618e-09, | |
| "loss": 0.5021, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.9914529914529915, | |
| "grad_norm": 1.0427691738919331, | |
| "learning_rate": 2.148271698781401e-09, | |
| "loss": 0.5495, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.993006993006993, | |
| "grad_norm": 0.6650883713661591, | |
| "learning_rate": 1.4918879920750029e-09, | |
| "loss": 0.4986, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.9945609945609946, | |
| "grad_norm": 0.9035648077675859, | |
| "learning_rate": 9.548254096752862e-10, | |
| "loss": 0.6534, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.9961149961149961, | |
| "grad_norm": 0.8779326174407937, | |
| "learning_rate": 5.370967719897646e-10, | |
| "loss": 0.5301, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.9976689976689976, | |
| "grad_norm": 0.9522652817049506, | |
| "learning_rate": 2.387120507629792e-10, | |
| "loss": 0.516, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.9992229992229992, | |
| "grad_norm": 0.8372559748420602, | |
| "learning_rate": 5.967836884168687e-11, | |
| "loss": 0.4753, | |
| "step": 643 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 643, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 27996768092160.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |